Skip to content
Snippets Groups Projects
Commit 515b33c9 authored by Richard Torenvliet's avatar Richard Torenvliet
Browse files

Refactoring, WIP

parent 059f1949
No related branches found
No related tags found
No related merge requests found
Showing
with 11344 additions and 75 deletions
data/*
src/.cache/*
src/*/.cache
src/utils/*.c
src/utils/*.o
src/utils/*.so
src/utils/build/
src/reconstruction/*.c
*.o
*.so
src/reconstruction/build/
......@@ -5,16 +5,16 @@ SITE_PACKAGES := $(VIRTUALENV)/lib/$(PYTHON)/site-packages
OPENCV:= $(SITE_PACKAGES)/cv.py $(SITE_PACKAGES)/cv2.so
TARGETS:= $(OPENCV) $(VIRTUALENV) data utils
TARGETS:= $(OPENCV) $(VIRTUALENV) data reconstruction
all: $(TARGETS)
include actions.mk
data: data/imm_face_db
utils: texture.so
reconstruction: texture.so
texture.so: src/utils/texture.pyx
(cd src/utils; python setup.py build_ext --inplace)
texture.so: src/reconstruction/texture.pyx
(cd src/reconstruction; python setup.py build_ext --inplace)
build: requirements.txt
......
......@@ -5,8 +5,8 @@ import cv2
# local imports
import pca
import utils.texture as tx
from utils import utils
import reconstruction.texture as tx
logging.basicConfig(
level=logging.INFO,
......
......@@ -6,7 +6,8 @@ import aam
import pca
import imm_points as imm
from utils import triangles as tri
from reconstruction import triangles as tri
def test_build_mean_aan():
imm_points = np.array([
......
......@@ -11,7 +11,7 @@ import pca
import aam
import imm_points as imm
from utils import utils
from reconstruction import reconstruction
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(name)s: %(message)s')
......@@ -177,7 +177,7 @@ def show_pca_model(args):
assert args.model_shape_file, '--model_texture_file needs to be provided to save the pca model'
assert args.model_texture_file, '--model_texture_file needs to be provided to save the pca model'
from utils.triangles import draw_shape, get_texture
from reconstruction.triangles import draw_shape, get_texture
Vt_shape, s, n_shape_components, mean_value_points, triangles = pca.load(args.model_shape_file)
Vt_texture, s_texture, n_texture_components, mean_values_texture, _ = pca.load(args.model_texture_file)
......@@ -211,30 +211,33 @@ def show_reconstruction(args):
assert args.model_shape_file, '--model_texture_file needs to be provided to save the pca model'
assert args.model_texture_file, '--model_texture_file needs to be provided to save the pca model'
Vt_shape, s, n_shape_components, mean_value_points, triangles = pca.load(args.model_shape_file)
Vt_texture, s_texture, n_texture_components, mean_values_texture, _ = pca.load(args.model_texture_file)
# Vt_shape, s, n_shape_components, mean_value_points, triangles = pca.load(args.model_shape_file)
# Vt_texture, s_texture, n_texture_components, mean_values_texture, _ = pca.load(args.model_texture_file)
shape_model = pca.PcaModel(args.model_shape_file)
texture_model = pca.PcaModel(args.model_texture_file)
InputPoints = imm.IMMPoints(filename='data/imm_face_db/40-3m.asf')
input_image = InputPoints.get_image()
input_points = imm.IMMPoints(filename='data/imm_face_db/40-3m.asf')
input_image = input_points.get_image()
MeanPoints = imm.IMMPoints(points_list=mean_value_points)
MeanPoints.get_scaled_points(input_image.shape)
mean_points = imm.IMMPoints(points_list=shape_model.mean_values)
mean_points.get_scaled_points(input_image.shape)
while True:
utils.reconstruct_texture(
reconstruction.reconstruct_texture(
input_image, # src image
input_image, # dst image
Vt_texture, # Vt
InputPoints, # shape points input
MeanPoints, # shape points mean
mean_values_texture, # mean texture
triangles, # triangles
n_texture_components # learned n_texture_components
texture_model,
#Vt_texture, # Vt
input_points, # shape points input
mean_points, # shape points mean
#mean_values_texture, # mean texture
#triangles, # triangles
#n_texture_components # learned n_texture_components
)
dst = utils.get_texture(MeanPoints, mean_values_texture)
dst = reconstruction.get_texture(mean_points, texture_model.mean_values)
cv2.imshow('original', InputPoints.get_image())
cv2.imshow('original', input_points.get_image())
cv2.imshow('reconstructed', input_image)
cv2.imshow('main face', dst)
......
import numpy as np
class PcaModel:
"""Abstraction for a pca model"""
def __init__(self, model_file):
Vtm = np.load(model_file)
self.Vt = Vtm[0]
self.s = Vtm[1]
self.n_components = Vtm[2]
self.mean_values = Vtm[3][0]
self.triangles = Vtm[4]
def pca(data, mean_values, variance_percentage=90):
"""
Perform Singlar Value Decomposition
......@@ -98,6 +109,11 @@ def load(filename):
return Vt, s, n_components, mean_values, triangles
#def load_model(filename):
# # load the stored model file
# return PcaModel(filename)
def flatten_feature_vectors(data, dim=0):
"""
Flattens the feature vectors inside a ndarray
......
File moved
import numpy as np
import cv2
from .texture import fill_triangle, fill_triangle_src_dst
import numpy as np
import pca
import aam
from .texture import fill_triangle_src_dst
def cartesian2barycentric(r1, r2, r3, r):
"""
......@@ -64,8 +64,8 @@ def draw_shape(image, points, triangles, multiply=True):
for i, p in enumerate(points):
point_index = int(point_indices[i])
#cv2.putText(image, str(point_index), (p[0], p[1]),
# cv2.FONT_HERSHEY_SIMPLEX, .5, (100, 0, 255))
cv2.putText(image, str(point_index), (p[0], p[1]),
cv2.FONT_HERSHEY_SIMPLEX, .5, (100, 0, 255))
cv2.putText(image, str(i), (p[0], p[1]),
cv2.FONT_HERSHEY_SIMPLEX, .5, (100, 0, 255))
cv2.circle(image, tuple(p), 3, color=(0, 255, 100))
......@@ -78,34 +78,45 @@ def get_texture(Points, flattened_texture):
return np.asarray(flattened_texture, np.uint8).reshape((h_slice, w_slice, 3))
def reconstruct_texture(src, dst, Vt, SrcPoints, DstPoints,
mean_texture, triangles, n_components):
def reconstruct_texture(src_image, dst_image, texture_model, src_points, dst_points):
"""
Recontruct texture given the src and dst image
Args:
src_points(aam.AAMPoints)
dst_points(aam.AAMPoints)
"""
Vt = texture_model.Vt
triangles = texture_model.triangles
mean_texture = texture_model.mean_values
# n_components = texture_model.n_components
# S_mean format
h, w, c = src.shape
h, w, c = src_image.shape
input_texture = np.full((h, w, 3), fill_value=0, dtype=np.uint8)
points2d_src = SrcPoints.get_scaled_points(src.shape)
points2d_dst = DstPoints.get_scaled_points(dst.shape)
points2d_src = src_points.get_scaled_points(src_image.shape)
points2d_dst = dst_points.get_scaled_points(dst_image.shape)
aam.sample_from_triangles(
src,
src_image,
points2d_src,
points2d_dst,
triangles,
input_texture
)
offset_x, offset_y, w_slice, h_slice = DstPoints.get_bounding_box()
offset_x, offset_y, w_slice, h_slice = dst_points.get_bounding_box()
input_texture = input_texture[offset_y: offset_y + h_slice,
offset_x: offset_x + w_slice].flatten()
## Still in S_mean format
# Still in S_mean format
r_texture = pca.reconstruct(input_texture, Vt, mean_texture)
## Make an image from the float data
# Make an image from the float data
r_texture = np.asarray(r_texture, np.uint8).reshape((h_slice, w_slice, 3))
### subtract the offset
# subtract the offset
points2d_dst[:, 0] -= offset_x
points2d_dst[:, 1] -= offset_y
......@@ -114,7 +125,7 @@ def reconstruct_texture(src, dst, Vt, SrcPoints, DstPoints,
dst_p1, dst_p2, dst_p3 = points2d_dst[tri]
fill_triangle_src_dst(
r_texture, dst,
r_texture, dst_image,
dst_p1[0], dst_p1[1],
dst_p2[0], dst_p2[1],
dst_p3[0], dst_p3[1],
......
File moved
Source diff could not be displayed: it is too large. Options to address this: view the blob.
File moved
......@@ -4,6 +4,7 @@ import base64
from glob import glob
from tornado import websocket, web, ioloop, autoreload
from reconstruction import reconstruction
import imm_points as imm
......@@ -38,14 +39,6 @@ class ImageWebSocketHandler(websocket.WebSocketHandler):
def open(self):
print("WebSocket opened")
#self.write_message(
# json.dumps({
# 'n_images': len(self.images),
# 'image': self.__get_base64_image(self.images[0])
# }
#))
#self.write_message(json.dumps({'n_images': len(self.images)}))
def __return_error(self, message):
self.write_message(json.dumps(
......@@ -53,15 +46,19 @@ class ImageWebSocketHandler(websocket.WebSocketHandler):
))
def handle_return_reconstruction(self, message):
""" Return the reconstruction of the given image """
image_index = message['reconstruction_index']
filename = self.images[image_index]
input_points = self.asf[image_index]
image = self.__get_base64_image(filename)
self.write_message(json.dumps({'reconstructed': image}))
reconstructed = reconstruction.reconstruct_texture(image)
self.write_message(json.dumps({'reconstructed': reconstructed}))
def handle_return_image(self, message):
filename = message['filename']
#filename = self.images[image_index]
image = self.__get_base64_image(filename)
self.write_message(json.dumps({'image': image}))
......
File deleted
......@@ -3,8 +3,9 @@ import Ember from 'ember';
const { get, inject } = Ember;
export default Ember.Controller.extend({
title: 'title',
websockets: inject.service(),
title: 'title',
faces: null,
image: null,
......@@ -34,10 +35,15 @@ export default Ember.Controller.extend({
socket.off('open', this.openHandler);
socket.off('message', this.messageHandler);
socket.off('close', this.closeHandler);
console.log('Websockets: Removed all handlers');
},
openHandler(event) {
console.log(`On open event has been called: ${event}`);
// get the reconstruction right after the socket opened
this.send('getReconstruction');
},
messageHandler(event) {
......@@ -47,10 +53,6 @@ export default Ember.Controller.extend({
this.set('n_images', message.n_images);
}
//if (message.image) {
// this.set('image', message.image);
//}
if (message.reconstructed) {
this.set('reconstructed', message.reconstructed);
}
......@@ -59,14 +61,11 @@ export default Ember.Controller.extend({
console.log(message.error);
}
//this.get('store').createRecord('face', {
// filename: 'Derp',
// shape: [1, 2, 3, 4, 5]
//});
this.set('loading', false);
},
getReconstruction: Ember.computed('faces', function() {
console.log(this.get('faces'));
getReconstruction: Ember.observer('image_index', function() {
this.send('getReconstruction');
}),
closeHandler(event) {
......@@ -75,6 +74,8 @@ export default Ember.Controller.extend({
actions: {
getImage(faceModel) {
this.set('loading', true);
var filename = faceModel.get('filename');
const socket = this.get('socketRef');
......@@ -84,6 +85,8 @@ export default Ember.Controller.extend({
},
getReconstruction() {
this.set('loading', true);
const socket = this.get('socketRef');
socket.send(
......
canvas { width: 100%; height: 100% }
.empty-image-container {
width: 100%;
height: 480px;
}
{{# if current_face_filename }}
<img src='{{current_face_filename}}' alt='missing original'>
{{else}}
<div class="table">
<div class="table-cell align-middle">
{{fa-icon "spinner" spin=true size='lg'}}
Loading..
</div>
</div>
{{/if}}
{{yield}}
......@@ -7,7 +7,14 @@
</div>
<div class="col col-6">
{{#if reconstructed }}
{{#if loading }}
<div class="table">
<div class="table-cell align-middle">
{{fa-icon "spinner" spin=true size='lg'}}
Loading..
</div>
</div>
{{else if reconstructed }}
<img src='data:image/jpg;base64,{{reconstructed}}'
alt='missing image'>
{{/if}}
......@@ -18,11 +25,3 @@
{{three-js-reconstruction update=(action 'updateComponentConnector')}}
</div>
</div>
<div class="clearfix">
<div class="mx-auto col-2">
<button class="btn btn-primary" {{action "getReconstruction"}}>
Show reconstruction
</button>
</div>
</div>
......@@ -6,6 +6,7 @@
"ember-cli-test-loader": "0.2.2",
"ember-qunit-notifications": "0.1.0",
"urijs": "^1.18.1",
"basscss": "~8.0.1"
"basscss": "~8.0.1",
"font-awesome": "~4.5.0"
}
}
......@@ -38,6 +38,7 @@
"ember-cli-uglify": "^1.2.0",
"ember-data": "^2.6.0",
"ember-export-application-global": "^1.0.5",
"ember-font-awesome": "2.1.1",
"ember-load-initializers": "^0.5.1",
"ember-resolver": "^2.0.3",
"ember-websockets": "4.0.1",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment