Commit 672c0703 authored by Patrik Huber's avatar Patrik Huber

Updated a lot of code to snake_case

parent b2e0b3b8
...@@ -78,9 +78,9 @@ vector<Vec2f> readPtsLandmarks(std::string filename) ...@@ -78,9 +78,9 @@ vector<Vec2f> readPtsLandmarks(std::string filename)
if (line == "}") { // end of the file if (line == "}") { // end of the file
break; break;
} }
std::stringstream lineStream(line); std::stringstream line_stream(line);
Vec2f landmark(0.0f, 0.0f); Vec2f landmark(0.0f, 0.0f);
if (!(lineStream >> landmark[0] >> landmark[1])) { if (!(line_stream >> landmark[0] >> landmark[1])) {
throw std::runtime_error(string("Landmark format error while parsing the line: " + line)); throw std::runtime_error(string("Landmark format error while parsing the line: " + line));
} }
// From the iBug website: // From the iBug website:
...@@ -140,8 +140,8 @@ int main(int argc, char *argv[]) ...@@ -140,8 +140,8 @@ int main(int argc, char *argv[])
// Load the image, landmarks, LandmarkMapper and the Morphable Model: // Load the image, landmarks, LandmarkMapper and the Morphable Model:
Mat image = cv::imread(imagefile.string()); Mat image = cv::imread(imagefile.string());
auto landmarks = readPtsLandmarks(landmarksfile.string()); auto landmarks = readPtsLandmarks(landmarksfile.string());
morphablemodel::MorphableModel morphableModel = morphablemodel::loadScmModel(modelfile, isomapfile); morphablemodel::MorphableModel morphable_model = morphablemodel::loadScmModel(modelfile, isomapfile);
core::LandmarkMapper landmarkMapper = mappingsfile.empty() ? core::LandmarkMapper() : core::LandmarkMapper(mappingsfile); core::LandmarkMapper landmark_mapper = mappingsfile.empty() ? core::LandmarkMapper() : core::LandmarkMapper(mappingsfile);
// Draw the loaded landmarks: // Draw the loaded landmarks:
Mat outimg = image.clone(); Mat outimg = image.clone();
...@@ -153,48 +153,48 @@ int main(int argc, char *argv[]) ...@@ -153,48 +153,48 @@ int main(int argc, char *argv[])
std::transform(begin(landmarks), end(landmarks), begin(landmarks), [&image](const Vec2f& lm) { return render::screenToClipSpace(lm, image.cols, image.rows); }); std::transform(begin(landmarks), end(landmarks), begin(landmarks), [&image](const Vec2f& lm) { return render::screenToClipSpace(lm, image.cols, image.rows); });
// These will be the final 2D and 3D points used for the fitting: // These will be the final 2D and 3D points used for the fitting:
vector<Vec4f> modelPoints; ///< the points in the 3D shape model vector<Vec4f> model_points; ///< the points in the 3D shape model
vector<int> vertexIndices; ///< their vertex indices vector<int> vertex_indices; ///< their vertex indices
vector<Vec2f> imagePoints; ///< the corresponding 2D landmark points vector<Vec2f> image_points; ///< the corresponding 2D landmark points
// Sub-select all the landmarks which we have a mapping for (i.e. that are defined in the 3DMM): // Sub-select all the landmarks which we have a mapping for (i.e. that are defined in the 3DMM):
int ibugId = 1; int ibug_id = 1;
for (int i = 0; i < landmarks.size(); ++i) { for (int i = 0; i < landmarks.size(); ++i) {
try { try {
int vertexIdx = boost::lexical_cast<int>(landmarkMapper.convert(std::to_string(ibugId))); int vertex_idx = boost::lexical_cast<int>(landmark_mapper.convert(std::to_string(ibug_id)));
Vec4f vertex = morphableModel.getShapeModel().getMeanAtPoint(vertexIdx); Vec4f vertex = morphable_model.get_shape_model().get_mean_at_point(vertex_idx);
modelPoints.emplace_back(vertex); model_points.emplace_back(vertex);
vertexIndices.emplace_back(vertexIdx); vertex_indices.emplace_back(vertex_idx);
imagePoints.emplace_back(landmarks[i]); image_points.emplace_back(landmarks[i]);
} }
catch (const std::out_of_range&) { catch (const std::out_of_range&) {
// just continue if the point isn't defined in the mapping // just continue if the point isn't defined in the mapping
} }
++ibugId; ++ibug_id;
} }
// Estimate the camera from the 2D - 3D point correspondences // Estimate the camera from the 2D - 3D point correspondences
Mat affineCam = fitting::estimate_affine_camera(imagePoints, modelPoints); Mat affine_cam = fitting::estimate_affine_camera(image_points, model_points);
// Draw the mean-face landmarks projected using the estimated camera: // Draw the mean-face landmarks projected using the estimated camera:
for (auto&& vertex : modelPoints) { for (auto&& vertex : model_points) {
Vec2f screenPoint = fitting::project_affine(vertex, affineCam, image.cols, image.rows); Vec2f screen_point = fitting::project_affine(vertex, affine_cam, image.cols, image.rows);
cv::circle(outimg, cv::Point2f(screenPoint), 5, { 0.0f, 255.0f, 0.0f }); cv::circle(outimg, cv::Point2f(screen_point), 5, { 0.0f, 255.0f, 0.0f });
} }
// Estimate the shape coefficients by fitting the shape to the landmarks: // Estimate the shape coefficients by fitting the shape to the landmarks:
float lambda = 5.0f; ///< the regularisation parameter float lambda = 5.0f; ///< the regularisation parameter
vector<float> fittedCoeffs = fitting::fit_shape_to_landmarks_linear(morphableModel, affineCam, imagePoints, vertexIndices, lambda); vector<float> fitted_coeffs = fitting::fit_shape_to_landmarks_linear(morphable_model, affine_cam, image_points, vertex_indices, lambda);
// Obtain the full mesh and draw it using the estimated camera: // Obtain the full mesh and draw it using the estimated camera:
render::Mesh mesh = morphableModel.drawSample(fittedCoeffs, vector<float>()); render::Mesh mesh = morphable_model.draw_sample(fitted_coeffs, vector<float>());
render::write_obj(mesh, "out.obj"); // save the mesh as obj render::write_obj(mesh, "out.obj"); // save the mesh as obj
// Draw the projected points again, this time using the fitted model shape: // Draw the projected points again, this time using the fitted model shape:
for (auto&& idx : vertexIndices) { for (auto&& idx : vertex_indices) {
Vec4f modelPoint(mesh.vertices[idx][0], mesh.vertices[idx][1], mesh.vertices[idx][2], mesh.vertices[idx][3]); Vec4f model_point(mesh.vertices[idx][0], mesh.vertices[idx][1], mesh.vertices[idx][2], mesh.vertices[idx][3]);
Vec2f screenPoint = fitting::project_affine(modelPoint, affineCam, image.cols, image.rows); Vec2f screen_point = fitting::project_affine(model_point, affine_cam, image.cols, image.rows);
cv::circle(outimg, cv::Point2f(screenPoint), 3, { 0.0f, 0.0f, 255.0f }); cv::circle(outimg, cv::Point2f(screen_point), 3, { 0.0f, 0.0f, 255.0f });
} }
// Save the output image: // Save the output image:
......
...@@ -61,18 +61,18 @@ public: ...@@ -61,18 +61,18 @@ public:
{ {
using std::string; using std::string;
using boost::property_tree::ptree; using boost::property_tree::ptree;
ptree configTree; ptree configtree;
try { try {
boost::property_tree::info_parser::read_info(filename.string(), configTree); boost::property_tree::info_parser::read_info(filename.string(), configtree);
} }
catch (const boost::property_tree::ptree_error& error) { catch (const boost::property_tree::ptree_error& error) {
throw std::runtime_error(string("LandmarkMapper: Error reading landmark-mappings file: ") + error.what()); throw std::runtime_error(string("LandmarkMapper: Error reading landmark-mappings file: ") + error.what());
} }
try { try {
ptree ptLandmarkMappings = configTree.get_child("landmarkMappings"); ptree pt_landmark_mappings = configtree.get_child("landmarkMappings");
for (auto&& mapping : ptLandmarkMappings) { for (auto&& mapping : pt_landmark_mappings) {
landmarkMappings.insert(make_pair(mapping.first, mapping.second.get_value<string>())); landmark_mappings.insert(make_pair(mapping.first, mapping.second.get_value<string>()));
} }
} }
catch (const boost::property_tree::ptree_error& error) { catch (const boost::property_tree::ptree_error& error) {
...@@ -91,14 +91,14 @@ public: ...@@ -91,14 +91,14 @@ public:
* @throws out_of_range exception if there is no mapping * @throws out_of_range exception if there is no mapping
* for the given landmarkName. * for the given landmarkName.
*/ */
std::string convert(std::string landmarkName) const std::string convert(std::string landmark_name) const
{ {
if (landmarkMappings.empty()) { if (landmark_mappings.empty()) {
// perform identity mapping, i.e. return the input // perform identity mapping, i.e. return the input
return landmarkName; return landmark_name;
} }
else { else {
return landmarkMappings.at(landmarkName); // throws an out_of_range exception if landmarkName does not match the key of any element in the map return landmark_mappings.at(landmark_name); // throws an out_of_range exception if landmarkName does not match the key of any element in the map
} }
}; };
...@@ -109,11 +109,11 @@ public: ...@@ -109,11 +109,11 @@ public:
*/ */
auto size() const auto size() const
{ {
return landmarkMappings.size(); return landmark_mappings.size();
}; };
private: private:
std::map<std::string, std::string> landmarkMappings; ///< Mapping from one landmark name to a name in a different format. std::map<std::string, std::string> landmark_mappings; ///< Mapping from one landmark name to a name in a different format.
}; };
} /* namespace core */ } /* namespace core */
......
...@@ -51,7 +51,7 @@ cv::Mat estimate_affine_camera(std::vector<cv::Vec2f> image_points, std::vector< ...@@ -51,7 +51,7 @@ cv::Mat estimate_affine_camera(std::vector<cv::Vec2f> image_points, std::vector<
assert(image_points.size() == model_points.size()); assert(image_points.size() == model_points.size());
assert(image_points.size() >= 4); // Number of correspondence points given needs to be equal to or larger than 4 assert(image_points.size() >= 4); // Number of correspondence points given needs to be equal to or larger than 4
const int numCorrespondences = static_cast<int>(image_points.size()); const int num_correspondences = static_cast<int>(image_points.size());
Mat matImagePoints; // will be numCorrespondences x 2, CV_32FC1 Mat matImagePoints; // will be numCorrespondences x 2, CV_32FC1
Mat matModelPoints; // will be numCorrespondences x 3, CV_32FC1 Mat matModelPoints; // will be numCorrespondences x 3, CV_32FC1
...@@ -125,10 +125,10 @@ cv::Mat estimate_affine_camera(std::vector<cv::Vec2f> image_points, std::vector< ...@@ -125,10 +125,10 @@ cv::Mat estimate_affine_camera(std::vector<cv::Vec2f> image_points, std::vector<
// We are solving the system $A_8 * p_8 = b$ // We are solving the system $A_8 * p_8 = b$
// The solution is obtained by the pseudo-inverse of A_8: // The solution is obtained by the pseudo-inverse of A_8:
// $p_8 = A_8^+ * b$ // $p_8 = A_8^+ * b$
Mat A_8 = Mat::zeros(numCorrespondences * 2, 8, CV_32FC1); Mat A_8 = Mat::zeros(num_correspondences * 2, 8, CV_32FC1);
//Mat p_8(); // p_8 is 8 x 1. We are solving for it. //Mat p_8(); // p_8 is 8 x 1. We are solving for it.
Mat b(numCorrespondences * 2, 1, CV_32FC1); Mat b(num_correspondences * 2, 1, CV_32FC1);
for (int i = 0; i < numCorrespondences; ++i) { for (int i = 0; i < num_correspondences; ++i) {
A_8.at<float>(2 * i, 0) = matModelPoints.at<float>(i, 0); // could maybe made faster by assigning the whole row/col-range if possible? A_8.at<float>(2 * i, 0) = matModelPoints.at<float>(i, 0); // could maybe made faster by assigning the whole row/col-range if possible?
A_8.at<float>(2 * i, 1) = matModelPoints.at<float>(i, 1); A_8.at<float>(2 * i, 1) = matModelPoints.at<float>(i, 1);
A_8.at<float>(2 * i, 2) = matModelPoints.at<float>(i, 2); A_8.at<float>(2 * i, 2) = matModelPoints.at<float>(i, 2);
......
...@@ -61,14 +61,14 @@ inline std::vector<float> fit_shape_to_landmarks_linear(morphablemodel::Morphabl ...@@ -61,14 +61,14 @@ inline std::vector<float> fit_shape_to_landmarks_linear(morphablemodel::Morphabl
using cv::Mat; using cv::Mat;
assert(landmarks.size() == vertex_ids.size()); assert(landmarks.size() == vertex_ids.size());
int num_coeffs_to_fit = num_coefficients_to_fit.get_value_or(morphable_model.getShapeModel().getNumberOfPrincipalComponents()); int num_coeffs_to_fit = num_coefficients_to_fit.get_value_or(morphable_model.get_shape_model().get_num_principal_components());
// $\hat{V} \in R^{3N\times m-1}$, subselect the rows of the eigenvector matrix $V$ associated with the $N$ feature points // $\hat{V} \in R^{3N\times m-1}$, subselect the rows of the eigenvector matrix $V$ associated with the $N$ feature points
// And we insert a row of zeros after every third row, resulting in matrix $\hat{V}_h \in R^{4N\times m-1}$: // And we insert a row of zeros after every third row, resulting in matrix $\hat{V}_h \in R^{4N\times m-1}$:
Mat V_hat_h = Mat::zeros(4 * landmarks.size(), num_coeffs_to_fit, CV_32FC1); Mat V_hat_h = Mat::zeros(4 * landmarks.size(), num_coeffs_to_fit, CV_32FC1);
int row_index = 0; int row_index = 0;
for (int i = 0; i < landmarks.size(); ++i) { for (int i = 0; i < landmarks.size(); ++i) {
Mat basis_rows = morphable_model.getShapeModel().getNormalisedPcaBasis(vertex_ids[i]); // In the paper, the not-normalised basis might be used? I'm not sure, check it. It's even a mess in the paper. PH 26.5.2014: I think the normalised basis is fine/better. Mat basis_rows = morphable_model.get_shape_model().get_normalised_pca_basis(vertex_ids[i]); // In the paper, the not-normalised basis might be used? I'm not sure, check it. It's even a mess in the paper. PH 26.5.2014: I think the normalised basis is fine/better.
//basisRows.copyTo(V_hat_h.rowRange(rowIndex, rowIndex + 3)); //basisRows.copyTo(V_hat_h.rowRange(rowIndex, rowIndex + 3));
basis_rows.colRange(0, num_coeffs_to_fit).copyTo(V_hat_h.rowRange(row_index, row_index + 3)); basis_rows.colRange(0, num_coeffs_to_fit).copyTo(V_hat_h.rowRange(row_index, row_index + 3));
row_index += 4; // replace 3 rows and skip the 4th one, it has all zeros row_index += 4; // replace 3 rows and skip the 4th one, it has all zeros
...@@ -101,7 +101,7 @@ inline std::vector<float> fit_shape_to_landmarks_linear(morphablemodel::Morphabl ...@@ -101,7 +101,7 @@ inline std::vector<float> fit_shape_to_landmarks_linear(morphablemodel::Morphabl
// The mean, with an added homogeneous coordinate (x_1, y_1, z_1, 1, x_2, ...)^t // The mean, with an added homogeneous coordinate (x_1, y_1, z_1, 1, x_2, ...)^t
Mat v_bar = Mat::ones(4 * landmarks.size(), 1, CV_32FC1); Mat v_bar = Mat::ones(4 * landmarks.size(), 1, CV_32FC1);
for (int i = 0; i < landmarks.size(); ++i) { for (int i = 0; i < landmarks.size(); ++i) {
cv::Vec4f model_mean = morphable_model.getShapeModel().getMeanAtPoint(vertex_ids[i]); cv::Vec4f model_mean = morphable_model.get_shape_model().get_mean_at_point(vertex_ids[i]);
v_bar.at<float>(4 * i, 0) = model_mean[0]; v_bar.at<float>(4 * i, 0) = model_mean[0];
v_bar.at<float>((4 * i) + 1, 0) = model_mean[1]; v_bar.at<float>((4 * i) + 1, 0) = model_mean[1];
v_bar.at<float>((4 * i) + 2, 0) = model_mean[2]; v_bar.at<float>((4 * i) + 2, 0) = model_mean[2];
......
...@@ -33,7 +33,7 @@ ...@@ -33,7 +33,7 @@
* Forward declaration of an internal function * Forward declaration of an internal function
*/ */
namespace eos { namespace morphablemodel { namespace detail { namespace eos { namespace morphablemodel { namespace detail {
eos::render::Mesh sampleToMesh(cv::Mat shape, cv::Mat color, std::vector<std::array<int, 3>> tvi, std::vector<std::array<int, 3>> tci, std::vector<cv::Vec2f> textureCoordinates = std::vector<cv::Vec2f>()); eos::render::Mesh sample_to_mesh(cv::Mat shape, cv::Mat color, std::vector<std::array<int, 3>> tvi, std::vector<std::array<int, 3>> tci, std::vector<cv::Vec2f> textureCoordinates = std::vector<cv::Vec2f>());
} } } } } }
namespace eos { namespace eos {
...@@ -58,7 +58,7 @@ public: ...@@ -58,7 +58,7 @@ public:
* @param[in] colorModel A PCA model over the color (albedo). * @param[in] colorModel A PCA model over the color (albedo).
* @param[in] textureCoordinates Optional texture coordinates for every vertex. * @param[in] textureCoordinates Optional texture coordinates for every vertex.
*/ */
MorphableModel(PcaModel shapeModel, PcaModel colorModel, std::vector<cv::Vec2f> textureCoordinates = std::vector<cv::Vec2f>()) : shapeModel(shapeModel), colorModel(colorModel), textureCoordinates(textureCoordinates) MorphableModel(PcaModel shape_model, PcaModel color_model, std::vector<cv::Vec2f> texture_coordinates = std::vector<cv::Vec2f>()) : shape_model(shape_model), color_model(color_model), texture_coordinates(texture_coordinates)
{ {
}; };
...@@ -68,9 +68,9 @@ public: ...@@ -68,9 +68,9 @@ public:
* *
* @return The shape model. * @return The shape model.
*/ */
PcaModel getShapeModel() const PcaModel get_shape_model() const
{ {
return shapeModel; return shape_model;
}; };
/** /**
...@@ -78,9 +78,9 @@ public: ...@@ -78,9 +78,9 @@ public:
* *
* @return The color model. * @return The color model.
*/ */
PcaModel getColorModel() const PcaModel get_color_model() const
{ {
return colorModel; return color_model;
}; };
/** /**
...@@ -88,19 +88,19 @@ public: ...@@ -88,19 +88,19 @@ public:
* *
* @return An mesh instance of the mean of the Morphable Model. * @return An mesh instance of the mean of the Morphable Model.
*/ */
render::Mesh getMean() const render::Mesh get_mean() const
{ {
assert(shapeModel.getDataDimension() == colorModel.getDataDimension()); // The number of vertices (= model.getDataDimension() / 3) has to be equal for both models. assert(shape_model.get_data_dimension() == color_model.get_data_dimension()); // The number of vertices (= model.getDataDimension() / 3) has to be equal for both models.
cv::Mat shape = shapeModel.getMean(); cv::Mat shape = shape_model.get_mean();
cv::Mat color = colorModel.getMean(); cv::Mat color = color_model.get_mean();
render::Mesh mesh; render::Mesh mesh;
if (hasTextureCoordinates()) { if (has_texture_coordinates()) {
mesh = detail::sampleToMesh(shape, color, shapeModel.getTriangleList(), colorModel.getTriangleList(), textureCoordinates); mesh = detail::sample_to_mesh(shape, color, shape_model.get_triangle_list(), color_model.get_triangle_list(), texture_coordinates);
} }
else { else {
mesh = detail::sampleToMesh(shape, color, shapeModel.getTriangleList(), colorModel.getTriangleList()); mesh = detail::sample_to_mesh(shape, color, shape_model.get_triangle_list(), color_model.get_triangle_list());
} }
return mesh; return mesh;
}; };
...@@ -114,19 +114,19 @@ public: ...@@ -114,19 +114,19 @@ public:
* @param[in] colorSigma The color model standard deviation. * @param[in] colorSigma The color model standard deviation.
* @return A random sample from the model. * @return A random sample from the model.
*/ */
render::Mesh drawSample(float shapeSigma = 1.0f, float colorSigma = 1.0f) render::Mesh draw_sample(float shapeSigma = 1.0f, float colorSigma = 1.0f)
{ {
assert(shapeModel.getDataDimension() == colorModel.getDataDimension()); // The number of vertices (= model.getDataDimension() / 3) has to be equal for both models. assert(shape_model.get_data_dimension() == color_model.get_data_dimension()); // The number of vertices (= model.getDataDimension() / 3) has to be equal for both models.
cv::Mat shapeSample = shapeModel.drawSample(shapeSigma); cv::Mat shapeSample = shape_model.draw_sample(shapeSigma);
cv::Mat colorSample = colorModel.drawSample(colorSigma); cv::Mat colorSample = color_model.draw_sample(colorSigma);
render::Mesh mesh; render::Mesh mesh;
if (hasTextureCoordinates()) { if (has_texture_coordinates()) {
mesh = detail::sampleToMesh(shapeSample, colorSample, shapeModel.getTriangleList(), colorModel.getTriangleList(), textureCoordinates); mesh = detail::sample_to_mesh(shapeSample, colorSample, shape_model.get_triangle_list(), color_model.get_triangle_list(), texture_coordinates);
} }
else { else {
mesh = detail::sampleToMesh(shapeSample, colorSample, shapeModel.getTriangleList(), colorModel.getTriangleList()); mesh = detail::sample_to_mesh(shapeSample, colorSample, shape_model.get_triangle_list(), color_model.get_triangle_list());
} }
return mesh; return mesh;
}; };
...@@ -142,40 +142,40 @@ public: ...@@ -142,40 +142,40 @@ public:
* @param[in] colorCoefficients The PCA coefficients used to generate the shape sample. * @param[in] colorCoefficients The PCA coefficients used to generate the shape sample.
* @return A model instance with given coefficients. * @return A model instance with given coefficients.
*/ */
render::Mesh drawSample(std::vector<float> shapeCoefficients, std::vector<float> colorCoefficients) render::Mesh draw_sample(std::vector<float> shapeCoefficients, std::vector<float> colorCoefficients)
{ {
assert(shapeModel.getDataDimension() == colorModel.getDataDimension()); // The number of vertices (= model.getDataDimension() / 3) has to be equal for both models. assert(shape_model.get_data_dimension() == color_model.get_data_dimension()); // The number of vertices (= model.getDataDimension() / 3) has to be equal for both models.
cv::Mat shapeSample; cv::Mat shapeSample;
cv::Mat colorSample; cv::Mat colorSample;
if (shapeCoefficients.empty()) { if (shapeCoefficients.empty()) {
shapeSample = shapeModel.getMean(); shapeSample = shape_model.get_mean();
} }
else { else {
shapeSample = shapeModel.drawSample(shapeCoefficients); shapeSample = shape_model.draw_sample(shapeCoefficients);
} }
if (colorCoefficients.empty()) { if (colorCoefficients.empty()) {
colorSample = colorModel.getMean(); colorSample = color_model.get_mean();
} }
else { else {
colorSample = colorModel.drawSample(colorCoefficients); colorSample = color_model.draw_sample(colorCoefficients);
} }
render::Mesh mesh; render::Mesh mesh;
if (hasTextureCoordinates()) { if (has_texture_coordinates()) {
mesh = detail::sampleToMesh(shapeSample, colorSample, shapeModel.getTriangleList(), colorModel.getTriangleList(), textureCoordinates); mesh = detail::sample_to_mesh(shapeSample, colorSample, shape_model.get_triangle_list(), color_model.get_triangle_list(), texture_coordinates);
} }
else { else {
mesh = detail::sampleToMesh(shapeSample, colorSample, shapeModel.getTriangleList(), colorModel.getTriangleList()); mesh = detail::sample_to_mesh(shapeSample, colorSample, shape_model.get_triangle_list(), color_model.get_triangle_list());
} }
return mesh; return mesh;
}; };
private: private:
PcaModel shapeModel; ///< A PCA model of the shape PcaModel shape_model; ///< A PCA model of the shape
PcaModel colorModel; ///< A PCA model of vertex color information PcaModel color_model; ///< A PCA model of vertex color information
std::vector<cv::Vec2f> textureCoordinates; ///< uv-coordinates for every vertex std::vector<cv::Vec2f> texture_coordinates; ///< uv-coordinates for every vertex
/** /**
* Returns whether the model has texture mapping coordinates, i.e. * Returns whether the model has texture mapping coordinates, i.e.
...@@ -183,8 +183,8 @@ private: ...@@ -183,8 +183,8 @@ private:
* *
* @return True if the model contains texture mapping coordinates. * @return True if the model contains texture mapping coordinates.
*/ */
bool hasTextureCoordinates() const { bool has_texture_coordinates() const {
return textureCoordinates.size() > 0 ? true : false; return texture_coordinates.size() > 0 ? true : false;
}; };
}; };
...@@ -202,7 +202,7 @@ private: ...@@ -202,7 +202,7 @@ private:
* @param[in] textureCoordinates Optional texture coordinates for each vertex. * @param[in] textureCoordinates Optional texture coordinates for each vertex.
* @return A mesh created from given parameters. * @return A mesh created from given parameters.
*/ */
eos::render::Mesh sampleToMesh(cv::Mat shape, cv::Mat color, std::vector<std::array<int, 3>> tvi, std::vector<std::array<int, 3>> tci, std::vector<cv::Vec2f> textureCoordinates /* = std::vector<cv::Vec2f>() */) eos::render::Mesh sample_to_mesh(cv::Mat shape, cv::Mat color, std::vector<std::array<int, 3>> tvi, std::vector<std::array<int, 3>> tci, std::vector<cv::Vec2f> textureCoordinates /* = std::vector<cv::Vec2f>() */)
{ {
assert(shape.rows == color.rows); // The number of vertices (= model.getDataDimension() / 3) has to be equal for both models. assert(shape.rows == color.rows); // The number of vertices (= model.getDataDimension() / 3) has to be equal for both models.
......
...@@ -35,8 +35,8 @@ namespace eos { ...@@ -35,8 +35,8 @@ namespace eos {
/** /**
* Forward declarations of free functions * Forward declarations of free functions
*/ */
cv::Mat normalisePcaBasis(cv::Mat unnormalisedBasis, cv::Mat eigenvalues); cv::Mat normalise_pca_basis(cv::Mat unnormalisedBasis, cv::Mat eigenvalues);
cv::Mat unnormalisePcaBasis(cv::Mat normalisedBasis, cv::Mat eigenvalues); cv::Mat unnormalise_pca_basis(cv::Mat normalisedBasis, cv::Mat eigenvalues);
/** /**
* This class represents a PCA-model that consists of: * This class represents a PCA-model that consists of:
...@@ -60,15 +60,15 @@ public: ...@@ -60,15 +60,15 @@ public:
* be arranged. * be arranged.
* *
* @param[in] mean The mean used to build the PCA model. * @param[in] mean The mean used to build the PCA model.
* @param[in] pcaBasis The PCA basis (eigenvectors), normalised (multiplied by the eigenvalues). * @param[in] pca_basis The PCA basis (eigenvectors), normalised (multiplied by the eigenvalues).
* @param[in] eigenvalues The eigenvalues used to build the PCA model. * @param[in] eigenvalues The eigenvalues used to build the PCA model.
* @param[in] triangleList An index list of how to assemble the mesh. * @param[in] triangle_list An index list of how to assemble the mesh.
*/ */
PcaModel(cv::Mat mean, cv::Mat pcaBasis, cv::Mat eigenvalues, std::vector<std::array<int, 3>> triangleList) : mean(mean), normalisedPcaBasis(pcaBasis), eigenvalues(eigenvalues), triangleList(triangleList) PcaModel(cv::Mat mean, cv::Mat pca_basis, cv::Mat eigenvalues, std::vector<std::array<int, 3>> triangle_list) : mean(mean), normalised_pca_basis(pca_basis), eigenvalues(eigenvalues), triangle_list(triangle_list)
{ {
const auto seed = std::random_device()(); const auto seed = std::random_device()();
engine.seed(seed); engine.seed(seed);
unnormalisedPcaBasis = unnormalisePcaBasis(normalisedPcaBasis, eigenvalues); unnormalised_pca_basis = unnormalise_pca_basis(normalised_pca_basis, eigenvalues);
}; };
/** /**
...@@ -76,10 +76,10 @@ public: ...@@ -76,10 +76,10 @@ public:
* *
* @return The number of principal components in the model. * @return The number of principal components in the model.
*/ */
int getNumberOfPrincipalComponents() const int get_num_principal_components() const
{ {
// Note: we could assert(normalisedPcaBasis.cols==unnormalisedPcaBasis.cols) // Note: we could assert(normalisedPcaBasis.cols==unnormalisedPcaBasis.cols)
return normalisedPcaBasis.cols; return normalised_pca_basis.cols;
}; };
/** /**
...@@ -90,10 +90,10 @@ public: ...@@ -90,10 +90,10 @@ public:
* *
* @return The dimension of the data. * @return The dimension of the data.
*/ */
int getDataDimension() const int get_data_dimension() const
{ {
// Note: we could assert(normalisedPcaBasis.rows==unnormalisedPcaBasis.rows) // Note: we could assert(normalisedPcaBasis.rows==unnormalisedPcaBasis.rows)
return normalisedPcaBasis.rows; return normalised_pca_basis.rows;
}; };
/** /**
...@@ -101,9 +101,9 @@ public: ...@@ -101,9 +101,9 @@ public:
* *
* @return The list of triangles to build a mesh. * @return The list of triangles to build a mesh.
*/ */
std::vector<std::array<int, 3>> getTriangleList() const std::vector<std::array<int, 3>> get_triangle_list() const
{ {
return triangleList; return triangle_list;
}; };
/** /**
...@@ -111,7 +111,7 @@ public: ...@@ -111,7 +111,7 @@ public:
* *
* @return The mean of the model. * @return The mean of the model.
*/ */
cv::Mat getMean() const cv::Mat get_mean() const
{ {
return mean; return mean;
}; };
...@@ -122,13 +122,13 @@ public: ...@@ -122,13 +122,13 @@ public:
* @param[in] vertexIndex A vertex id. * @param[in] vertexIndex A vertex id.
* @return A homogeneous vector containing the values at the given vertex id. * @return A homogeneous vector containing the values at the given vertex id.
*/ */
cv::Vec4f getMeanAtPoint(int vertexIndex) const cv::Vec4f get_mean_at_point(int vertex_index) const
{ {
vertexIndex *= 3; vertex_index *= 3;
if (vertexIndex >= mean.rows) { if (vertex_index >= mean.rows) {
throw std::out_of_range("The given vertex id is larger than the dimension of the mean."); throw std::out_of_range("The given vertex id is larger than the dimension of the mean.");
} }
return cv::Vec4f(mean.at<float>(vertexIndex), mean.at<float>(vertexIndex + 1), mean.at<float>(vertexIndex + 2), 1.0f); return cv::Vec4f(mean.at<float>(vertex_index), mean.at<float>(vertex_index + 1), mean.at<float>(vertex_index + 2), 1.0f);
}; };
/** /**
...@@ -138,17 +138,17 @@ public: ...@@ -138,17 +138,17 @@ public:
* @param[in] sigma The standard deviation. * @param[in] sigma The standard deviation.
* @return A random sample from the model. * @return A random sample from the model.
*/ */
cv::Mat drawSample(float sigma = 1.0f) cv::Mat draw_sample(float sigma = 1.0f)
{ {
std::normal_distribution<float> distribution(0.0f, sigma); // this constructor takes the stddev std::normal_distribution<float> distribution(0.0f, sigma); // this constructor takes the stddev
std::vector<float> alphas(getNumberOfPrincipalComponents()); std::vector<float> alphas(get_num_principal_components());
for (auto&& a : alphas) { for (auto&& a : alphas) {
a = distribution(engine); a = distribution(engine);
} }
return drawSample(alphas); return draw_sample(alphas);
}; };
/** /**
...@@ -159,17 +159,17 @@ public: ...@@ -159,17 +159,17 @@ public:
* @param[in] coefficients The PCA coefficients used to generate the sample. * @param[in] coefficients The PCA coefficients used to generate the sample.
* @return A model instance with given coefficients. * @return A model instance with given coefficients.
*/ */
cv::Mat drawSample(std::vector<float> coefficients) cv::Mat draw_sample(std::vector<float> coefficients)
{ {
// Fill the rest with zeros if not all coefficients are given: // Fill the rest with zeros if not all coefficients are given:
if (coefficients.size() < getNumberOfPrincipalComponents()) { if (coefficients.size() < get_num_principal_components()) {
coefficients.resize(getNumberOfPrincipalComponents()); coefficients.resize(get_num_principal_components());
} }
cv::Mat alphas(coefficients); cv::Mat alphas(coefficients);
cv::Mat modelSample = mean + normalisedPcaBasis * alphas; cv::Mat model_sample = mean + normalised_pca_basis * alphas;
return modelSample; return model_sample;
}; };
/** /**
...@@ -183,9 +183,9 @@ public: ...@@ -183,9 +183,9 @@ public:
* *
* @return Returns the normalised PCA basis matrix. * @return Returns the normalised PCA basis matrix.
*/ */
cv::Mat getNormalisedPcaBasis() const cv::Mat get_normalised_pca_basis() const
{ {
return normalisedPcaBasis.clone(); return normalised_pca_basis.clone();
}; };
/** /**
...@@ -196,10 +196,10 @@ public: ...@@ -196,10 +196,10 @@ public:
* @param[in] vertexId A vertex index. Make sure it is valid. * @param[in] vertexId A vertex index. Make sure it is valid.
* @return A Mat that points to the rows in the original basis. * @return A Mat that points to the rows in the original basis.
*/ */
cv::Mat getNormalisedPcaBasis(int vertexId) const cv::Mat get_normalised_pca_basis(int vertex_id) const
{ {
vertexId *= 3; // the basis is stored in the format [x y z x y z ...] vertex_id *= 3; // the basis is stored in the format [x y z x y z ...]
return normalisedPcaBasis.rowRange(vertexId, vertexId + 3); return normalised_pca_basis.rowRange(vertex_id, vertex_id + 3);
}; };
/** /**
...@@ -212,9 +212,9 @@ public: ...@@ -212,9 +212,9 @@ public:
* *
* @return Returns the unnormalised PCA basis matrix. * @return Returns the unnormalised PCA basis matrix.
*/ */
cv::Mat getUnnormalisedPcaBasis() const cv::Mat get_unnormalised_pca_basis() const
{ {
return unnormalisedPcaBasis.clone(); return unnormalised_pca_basis.clone();
}; };
/** /**
...@@ -224,10 +224,10 @@ public: ...@@ -224,10 +224,10 @@ public:
* @param[in] vertexId A vertex index. Make sure it is valid. * @param[in] vertexId A vertex index. Make sure it is valid.
* @return A Mat that points to the rows in the original basis. * @return A Mat that points to the rows in the original basis.
*/ */
cv::Mat getUnnormalisedPcaBasis(int vertexId) const cv::Mat get_unnormalised_pca_basis(int vertex_id) const
{ {
vertexId *= 3; // the basis is stored in the format [x y z x y z ...] vertex_id *= 3; // the basis is stored in the format [x y z x y z ...]
return unnormalisedPcaBasis.rowRange(vertexId, vertexId + 3); return unnormalised_pca_basis.rowRange(vertex_id, vertex_id + 3);
}; };
/** /**
...@@ -236,7 +236,7 @@ public: ...@@ -236,7 +236,7 @@ public:
* @param[in] index The index of the eigenvalue to return. * @param[in] index The index of the eigenvalue to return.
* @return The eigenvalue. * @return The eigenvalue.
*/ */
float getEigenvalue(int index) const float get_eigenvalue(int index) const
{ {
return eigenvalues.at<float>(index); return eigenvalues.at<float>(index);
}; };
...@@ -245,11 +245,11 @@ private: ...@@ -245,11 +245,11 @@ private:
std::mt19937 engine; ///< Random number engine used to draw random coefficients. std::mt19937 engine; ///< Random number engine used to draw random coefficients.
cv::Mat mean; ///< A 3m x 1 col-vector (xyzxyz...)', where m is the number of model-vertices. cv::Mat mean; ///< A 3m x 1 col-vector (xyzxyz...)', where m is the number of model-vertices.
cv::Mat normalisedPcaBasis; ///< The normalised PCA basis matrix. m x n (rows x cols) = numShapeDims x numShapePcaCoeffs, (=eigenvector matrix V). Each column is an eigenvector. cv::Mat normalised_pca_basis; ///< The normalised PCA basis matrix. m x n (rows x cols) = numShapeDims x numShapePcaCoeffs, (=eigenvector matrix V). Each column is an eigenvector.
cv::Mat unnormalisedPcaBasis; ///< The unnormalised PCA basis matrix. m x n (rows x cols) = numShapeDims x numShapePcaCoeffs, (=eigenvector matrix V). Each column is an eigenvector. cv::Mat unnormalised_pca_basis; ///< The unnormalised PCA basis matrix. m x n (rows x cols) = numShapeDims x numShapePcaCoeffs, (=eigenvector matrix V). Each column is an eigenvector.
cv::Mat eigenvalues; ///< A col-vector of the eigenvalues (variances in the PCA space). cv::Mat eigenvalues; ///< A col-vector of the eigenvalues (variances in the PCA space).
std::vector<std::array<int, 3>> triangleList; ///< List of triangles that make up the mesh of the model. std::vector<std::array<int, 3>> triangle_list; ///< List of triangles that make up the mesh of the model.
}; };
...@@ -263,7 +263,7 @@ private: ...@@ -263,7 +263,7 @@ private:
* @param[in] eigenvalues A row or column vector of eigenvalues. * @param[in] eigenvalues A row or column vector of eigenvalues.
* @return The normalised PCA basis matrix. * @return The normalised PCA basis matrix.
*/ */
inline cv::Mat normalisePcaBasis(cv::Mat unnormalisedBasis, cv::Mat eigenvalues) inline cv::Mat normalise_pca_basis(cv::Mat unnormalisedBasis, cv::Mat eigenvalues)
{ {
using cv::Mat; using cv::Mat;
Mat normalisedPcaBasis(unnormalisedBasis.size(), unnormalisedBasis.type()); // empty matrix with the same dimensions Mat normalisedPcaBasis(unnormalisedBasis.size(), unnormalisedBasis.type()); // empty matrix with the same dimensions
...@@ -290,7 +290,7 @@ inline cv::Mat normalisePcaBasis(cv::Mat unnormalisedBasis, cv::Mat eigenvalues) ...@@ -290,7 +290,7 @@ inline cv::Mat normalisePcaBasis(cv::Mat unnormalisedBasis, cv::Mat eigenvalues)
* @param[in] eigenvalues A row or column vector of eigenvalues. * @param[in] eigenvalues A row or column vector of eigenvalues.
* @return The unnormalised PCA basis matrix. * @return The unnormalised PCA basis matrix.
*/ */
inline cv::Mat unnormalisePcaBasis(cv::Mat normalisedBasis, cv::Mat eigenvalues) inline cv::Mat unnormalise_pca_basis(cv::Mat normalisedBasis, cv::Mat eigenvalues)
{ {
using cv::Mat; using cv::Mat;
Mat unnormalisedBasis(normalisedBasis.size(), normalisedBasis.type()); // empty matrix with the same dimensions Mat unnormalisedBasis(normalisedBasis.size(), normalisedBasis.type()); // empty matrix with the same dimensions
......
...@@ -158,7 +158,7 @@ MorphableModel loadScmModel(boost::filesystem::path modelFilename, boost::filesy ...@@ -158,7 +158,7 @@ MorphableModel loadScmModel(boost::filesystem::path modelFilename, boost::filesy
} }
// We read the unnormalised basis from the file. Now let's normalise it and store the normalised basis separately. // We read the unnormalised basis from the file. Now let's normalise it and store the normalised basis separately.
Mat normalisedPcaBasisShape = normalisePcaBasis(unnormalisedPcaBasisShape, eigenvaluesShape); Mat normalisedPcaBasisShape = normalise_pca_basis(unnormalisedPcaBasisShape, eigenvaluesShape);
PcaModel shapeModel(meanShape, normalisedPcaBasisShape, eigenvaluesShape, triangleList); PcaModel shapeModel(meanShape, normalisedPcaBasisShape, eigenvaluesShape, triangleList);
// Reading the color model // Reading the color model
...@@ -207,7 +207,7 @@ MorphableModel loadScmModel(boost::filesystem::path modelFilename, boost::filesy ...@@ -207,7 +207,7 @@ MorphableModel loadScmModel(boost::filesystem::path modelFilename, boost::filesy
} }
// We read the unnormalised basis from the file. Now let's normalise it and store the normalised basis separately. // We read the unnormalised basis from the file. Now let's normalise it and store the normalised basis separately.
Mat normalisedPcaBasisColor = normalisePcaBasis(unnormalisedPcaBasisColor, eigenvaluesColor); Mat normalisedPcaBasisColor = normalise_pca_basis(unnormalisedPcaBasisColor, eigenvaluesColor);
PcaModel colorModel(meanColor, normalisedPcaBasisColor, eigenvaluesColor, triangleList); PcaModel colorModel(meanColor, normalisedPcaBasisColor, eigenvaluesColor, triangleList);
modelFile.close(); modelFile.close();
...@@ -216,7 +216,7 @@ MorphableModel loadScmModel(boost::filesystem::path modelFilename, boost::filesy ...@@ -216,7 +216,7 @@ MorphableModel loadScmModel(boost::filesystem::path modelFilename, boost::filesy
std::vector<cv::Vec2f> texCoords; std::vector<cv::Vec2f> texCoords;
if (!isomapFile.empty()) { if (!isomapFile.empty()) {
texCoords = loadIsomap(isomapFile); texCoords = loadIsomap(isomapFile);
if (shapeModel.getDataDimension() / 3.0f != texCoords.size()) { if (shapeModel.get_data_dimension() / 3.0f != texCoords.size()) {
std::string errorMessage("Error, wrong number of texture coordinates. Don't have the same number of texcoords than the shape model has vertices."); std::string errorMessage("Error, wrong number of texture coordinates. Don't have the same number of texcoords than the shape model has vertices.");
std::cout << errorMessage << std::endl; std::cout << errorMessage << std::endl;
throw std::runtime_error(errorMessage); throw std::runtime_error(errorMessage);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment