Commit 949c8db1 authored by Richard Torenvliet's avatar Richard Torenvliet

First version testing the multi-frame chain with bufferedVideoIterator

parent 6690296f
...@@ -84,6 +84,29 @@ namespace eos { ...@@ -84,6 +84,29 @@ namespace eos {
return landmarks; return landmarks;
} }
/**
* Read strings from a given file and return them as a vector of strings.
*
* @param filename
* @return
*/
std::vector<std::string> file_to_string_vector(std::string filename) {
std::ifstream file(filename);
if (!file.is_open()) {
throw std::runtime_error(string("Could not open annotation list file: " + filename));
}
string line;
std::vector<std::string> output;
while (getline(file, line)) {
std::cout << line << std::endl;
output.push_back(line);
}
return output;
}
/** /**
* Helper function, gathers matching model_points with given landmarks and LandmarkMapper. * Helper function, gathers matching model_points with given landmarks and LandmarkMapper.
* *
...@@ -115,22 +138,32 @@ namespace eos { ...@@ -115,22 +138,32 @@ namespace eos {
} }
/** /**
* Load annotations, return all annotations as image points (vectors of Vec2f). * Load annotations, return all annotations as a vector of LandmarkCollection (vectors of Vec2f).
* Supports either a given template, any cv::Vec2f / Eigen::Vector2f will work.
*
* Returns the annotations as a list of files, useful for later use if read_from_file flag is set to true.
* This flag will read the filepaths from the given annotation file, probably depends on the command line args.
* *
* @tparam vec2f
* @param annotations * @param annotations
* @param mappingsfile * @param mapper
* @throws std::runtime_error in case of faulty annotation file * @param morphable_model
* @return std::vector<std::vector<cv::Vec2f>> image_points in a vector of OpenCV float pairs (Vec2f). * @param read_from_file
* @return
*/ */
template<typename vec2f, typename vec3f> template<typename vec2f>
std::vector<core::LandmarkCollection<vec2f>> load_annotations(std::vector <std::string> annotations, fs::path mappingsfile) { std::pair <std::vector<core::LandmarkCollection<vec2f>>, std::vector<std::string>>
//std::vector <std::vector<vec2f>> image_points; // the corresponding 2D landmark points of all annotation files. load_annotations(
eos::core::LandmarkMapper landmark_mapper = eos::core::LandmarkMapper(mappingsfile); std::vector<std::string>annotations,
core::LandmarkMapper mapper, morphablemodel::MorphableModel morphable_model, bool read_from_file = false) {
std::vector<core::LandmarkCollection<vec2f>> landmark_collection_list; std::vector<core::LandmarkCollection<vec2f>> landmark_collection_list;
// These will be the final 2D and 3D points used for the fitting: // load file names from one file.
std::vector <vec3f> model_points; // the points in the 3D shape model std::vector<std::string> annotation_files;
std::vector<int> vertex_indices; // their vertex indices
if (read_from_file) {
annotations = file_to_string_vector(annotations[0]);
}
for (int i = 0; i < annotations.size(); i++) { for (int i = 0; i < annotations.size(); i++) {
eos::core::LandmarkCollection <vec2f> landmarks; eos::core::LandmarkCollection <vec2f> landmarks;
...@@ -142,13 +175,17 @@ namespace eos { ...@@ -142,13 +175,17 @@ namespace eos {
throw std::runtime_error("Error reading the landmarks: " + annotations[i]); throw std::runtime_error("Error reading the landmarks: " + annotations[i]);
} }
core::LandmarkCollection<vec2f> image_points;
// Sub-select all the landmarks which we have a mapping for (i.e. that are defined in the 3DMM): // Sub-select all the landmarks which we have a mapping for (i.e. that are defined in the 3DMM):
for (int j = 0; j < landmarks.size(); j++) { for (int i = 0; i < landmarks.size(); ++i) {
landmark_collection_list.emplace_back(landmarks); image_points.emplace_back(landmarks[i]);
} }
landmark_collection_list.push_back(image_points);
} }
return landmark_collection_list; return std::make_pair(landmark_collection_list, annotations);
} }
} }
} }
......
...@@ -464,6 +464,7 @@ inline std::pair<std::vector<core::Mesh>, std::vector<fitting::RenderingParamete ...@@ -464,6 +464,7 @@ inline std::pair<std::vector<core::Mesh>, std::vector<fitting::RenderingParamete
std::vector<float>& pca_shape_coefficients, std::vector<float>& pca_shape_coefficients,
std::vector<std::vector<float>>& blendshape_coefficients, std::vector<std::vector<float>>& blendshape_coefficients,
std::vector<std::vector<cv::Vec2f>>& fitted_image_points) { std::vector<std::vector<cv::Vec2f>>& fitted_image_points) {
assert(blendshapes.size() > 0); assert(blendshapes.size() > 0);
assert(num_iterations > 0); // Can we allow 0, for only the initial pose-fit? assert(num_iterations > 0); // Can we allow 0, for only the initial pose-fit?
assert(pca_shape_coefficients.size() <= morphable_model.get_shape_model().get_num_principal_components()); assert(pca_shape_coefficients.size() <= morphable_model.get_shape_model().get_num_principal_components());
...@@ -475,7 +476,6 @@ inline std::pair<std::vector<core::Mesh>, std::vector<fitting::RenderingParamete ...@@ -475,7 +476,6 @@ inline std::pair<std::vector<core::Mesh>, std::vector<fitting::RenderingParamete
using Eigen::VectorXf; using Eigen::VectorXf;
using Eigen::MatrixXf; using Eigen::MatrixXf;
if (!num_shape_coefficients_to_fit) { if (!num_shape_coefficients_to_fit) {
num_shape_coefficients_to_fit = morphable_model.get_shape_model().get_num_principal_components(); num_shape_coefficients_to_fit = morphable_model.get_shape_model().get_num_principal_components();
} }
...@@ -691,6 +691,26 @@ inline std::pair<std::vector<core::Mesh>, std::vector<fitting::RenderingParamete ...@@ -691,6 +691,26 @@ inline std::pair<std::vector<core::Mesh>, std::vector<fitting::RenderingParamete
); );
}; };
inline std::pair<std::vector<core::Mesh>, std::vector<fitting::RenderingParameters>> fit_shape_and_pose_multi_2(
const morphablemodel::MorphableModel& morphable_model,
const std::vector<morphablemodel::Blendshape>& blendshapes,
const std::vector<core::LandmarkCollection<cv::Vec2f>>& landmarks,
const core::LandmarkMapper& landmark_mapper,
int image_width,
int image_height,
int num_images,
const morphablemodel::EdgeTopology& edge_topology,
const fitting::ContourLandmarks& contour_landmarks,
const fitting::ModelContour& model_contour,
int num_iterations,
boost::optional<int> num_shape_coefficients_to_fit,
float lambda,
boost::optional<fitting::RenderingParameters> initial_rendering_params,
std::vector<float>& pca_shape_coefficients,
std::vector<std::vector<float>>& blendshape_coefficients,
std::vector<std::vector<cv::Vec2f>>& fitted_image_points) {
}
} /* namespace fitting */ } /* namespace fitting */
} /* namespace eos */ } /* namespace eos */
......
...@@ -43,7 +43,7 @@ using namespace std; ...@@ -43,7 +43,7 @@ using namespace std;
namespace eos { namespace eos {
namespace video { namespace video {
// TODO: maybe move video iterator here.. or remove file. // TODO: maybe move video iterator here.. or remove fil.
} }
} }
......
...@@ -58,9 +58,10 @@ public: ...@@ -58,9 +58,10 @@ public:
* @param frame * @param frame
* @param score * @param score
*/ */
Keyframe(float score, cv::Mat frame) { Keyframe(float score, cv::Mat frame, int frame_number) {
this->score = score; this->score = score;
this->frame = frame; this->frame = frame;
this->frame_number = frame_number;
} }
/** /**
...@@ -78,6 +79,7 @@ public: ...@@ -78,6 +79,7 @@ public:
float score = 0.0f; float score = 0.0f;
cv::Mat frame; cv::Mat frame;
fitting::FittingResult fitting_result; fitting::FittingResult fitting_result;
int frame_number;
}; };
/** /**
...@@ -319,19 +321,18 @@ public: ...@@ -319,19 +321,18 @@ public:
BufferedVideoIterator(std::string videoFilePath, int max_frames = 5, int min_frames = 4) { BufferedVideoIterator(std::string videoFilePath, int max_frames = 5, int min_frames = 4) {
std::ifstream file(videoFilePath); std::ifstream file(videoFilePath);
std::cout << "video file path: " << videoFilePath << std::endl;
if (!file.is_open()) { if (!file.is_open()) {
throw std::runtime_error("Error opening given file: " + videoFilePath); throw std::runtime_error("Error opening given file: " + videoFilePath);
} }
cv::VideoCapture cap(videoFilePath); // open video file cv::VideoCapture tmp_cap(videoFilePath); // open video file
if (!cap.isOpened()) { // check if we succeeded if (!tmp_cap.isOpened()) { // check if we succeeded
throw std::runtime_error("Could not play video"); throw std::runtime_error("Could not play video");
} }
width = static_cast<int>(cap.get(CV_CAP_PROP_FRAME_HEIGHT)); this->cap = tmp_cap;
height = static_cast<int>(cap.get(CV_CAP_PROP_FRAME_HEIGHT));
this->max_frames = max_frames; this->max_frames = max_frames;
this->min_frames = min_frames; this->min_frames = min_frames;
} }
...@@ -350,15 +351,22 @@ public: ...@@ -350,15 +351,22 @@ public:
cv::Mat frame; cv::Mat frame;
cap >> frame; cap >> frame;
if (frame.empty()) {
frame_buffer = next();
return frame_buffer;
}
// Pop if we exceeded max_frames. // Pop if we exceeded max_frames.
if (n_frames > max_frames) { if (n_frames > max_frames) {
frame_buffer.pop_front(); frame_buffer.pop_front();
n_frames--;
} }
float lap_score = static_cast<float>(variance_of_laplacian(frame)); float lap_score = static_cast<float>(variance_of_laplacian(frame));
if (lap_score < laplacian_threshold) { if (lap_score < laplacian_threshold) {
frame_buffer.push_back(Keyframe(lap_score, frame)); total_frames_processed++;
frame_buffer.push_back(Keyframe(lap_score, frame, total_frames_processed));
} }
n_frames++; n_frames++;
...@@ -379,11 +387,13 @@ private: ...@@ -379,11 +387,13 @@ private:
std::deque<Keyframe> frame_buffer; std::deque<Keyframe> frame_buffer;
// TODO: make set-able // TODO: make set-able
long total_frames_processed = 1;
// total frames in buffer // total frames in buffer
long n_frames = 1; long n_frames = 1;
// min frames to load at the start. // min frames to load at the start.
long min_frames = 3; long min_frames = 3;
// keep max_frames into the buffer. // keep max_frames into the buffer.
long max_frames = 5; long max_frames = 5;
...@@ -394,6 +404,78 @@ private: ...@@ -394,6 +404,78 @@ private:
double laplacian_threshold = 10000000; double laplacian_threshold = 10000000;
}; };
/**
* @brief Merges isomaps from a live video with a weighted averaging, based
* on the view angle of each vertex to the camera.
*
* An optional merge_threshold can be specified upon construction. Pixels with
* a view-angle above that threshold will be completely discarded. All pixels
* below the threshold are merged with a weighting based on its vertex view-angle.
* Assumes the isomaps to be 512x512.
*/
class WeightedIsomapAveraging
{
public:
/**
* @brief Constructs a new object that will hold the current averaged isomap and
* be able to add frames from a live video and merge them on-the-fly.
*
* The threshold means: Each triangle with a view angle smaller than the given angle will be used to merge.
* The default threshold (90°) means all triangles, as long as they're a little bit visible, are merged.
*
* @param[in] merge_threshold View-angle merge threshold, in degrees, from 0 to 90.
*/
WeightedIsomapAveraging(float merge_threshold = 90.0f)
{
assert(merge_threshold >= 0.f && merge_threshold <= 90.f);
visibility_counter = cv::Mat::zeros(512, 512, CV_32SC1);
merged_isomap = cv::Mat::zeros(512, 512, CV_32FC4);
// map 0° to 255, 90° to 0:
float alpha_thresh = (-255.f / 90.f) * merge_threshold + 255.f;
if (alpha_thresh < 0.f) // could maybe happen due to float inaccuracies / rounding?
alpha_thresh = 0.0f;
threshold = static_cast<unsigned char>(alpha_thresh);
};
/**
* @brief Merges the given new isomap with all previously processed isomaps.
*
* @param[in] isomap The new isomap to add.
* @return The merged isomap of all images processed so far, as 8UC4.
*/
cv::Mat add_and_merge(const cv::Mat& isomap)
{
// Merge isomaps, add the current to the already merged, pixel by pixel:
for (int r = 0; r < isomap.rows; ++r)
{
for (int c = 0; c < isomap.cols; ++c)
{
if (isomap.at<cv::Vec4b>(r, c)[3] <= threshold)
{
continue; // ignore this pixel, not visible in the extracted isomap of this current frame
}
// we're sure to have a visible pixel, merge it:
// merged_pixel = (old_average * visible_count + new_pixel) / (visible_count + 1)
merged_isomap.at<cv::Vec4f>(r, c)[0] = (merged_isomap.at<cv::Vec4f>(r, c)[0] * visibility_counter.at<int>(r, c) + isomap.at<cv::Vec4b>(r, c)[0]) / (visibility_counter.at<int>(r, c) + 1);
merged_isomap.at<cv::Vec4f>(r, c)[1] = (merged_isomap.at<cv::Vec4f>(r, c)[1] * visibility_counter.at<int>(r, c) + isomap.at<cv::Vec4b>(r, c)[1]) / (visibility_counter.at<int>(r, c) + 1);
merged_isomap.at<cv::Vec4f>(r, c)[2] = (merged_isomap.at<cv::Vec4f>(r, c)[2] * visibility_counter.at<int>(r, c) + isomap.at<cv::Vec4b>(r, c)[2]) / (visibility_counter.at<int>(r, c) + 1);
merged_isomap.at<cv::Vec4f>(r, c)[3] = 255; // as soon as we've seen the pixel visible once, we set it to visible.
++visibility_counter.at<int>(r, c);
}
}
cv::Mat merged_isomap_uchar;
merged_isomap.convertTo(merged_isomap_uchar, CV_8UC4);
return merged_isomap_uchar;
};
private:
cv::Mat visibility_counter;
cv::Mat merged_isomap;
unsigned char threshold;
};
} /* namespace video */ } /* namespace video */
} /* namespace eos */ } /* namespace eos */
......
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment