Commit 1578f00a authored by Richard Torenvliet's avatar Richard Torenvliet

Implement logging system and support debug levels, closes #6

parent f718b904
...@@ -63,14 +63,18 @@ graph_reconstruction: ...@@ -63,14 +63,18 @@ graph_reconstruction:
--shape_type imm \ --shape_type imm \
--n_components 6 --n_components 6
.PHONY:= test
test: test:
python -m py.test -f src/test/*_test.py python -m py.test -f src/test/*_test.py
.PHONY:= server
server: server:
(cd src/; python -m tornado.autoreload server.py) (cd src/; python -m tornado.autoreload server.py)
.PHONY:= ember
ember: ember:
(cd viewer; ember server); (cd viewer; ember server);
.PHONY:= ctags
ctags: ctags:
ctags -R --python-kinds=-i src ctags -R --python-kinds=-i src
""" """
.. module:: active_appearance_model .. module:: active_appearance_model
:platform: Unix, Windows :platform: Unix
:synopsis: Contains the aam data format abstraction layer :synopsis: Contains the aam data format abstraction layer
""" """
import logging
import numpy as np import numpy as np
from matplotlib.tri import Triangulation from matplotlib.tri import Triangulation
import cv2 import cv2
...@@ -13,13 +12,7 @@ import cv2 ...@@ -13,13 +12,7 @@ import cv2
# local imports # local imports
import pca import pca
import reconstruction.texture as tx import reconstruction.texture as tx
from settings import logger
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(levelname)s %(name)s: %(message)s'
)
logger = logging.getLogger(__name__)
class AAMPoints(): class AAMPoints():
......
#!/usr/local/bin/python #!/usr/local/bin/python
# python std # python std
import argparse import argparse
import logging
import importlib import importlib
# installed packages # installed packages
...@@ -14,9 +13,7 @@ import aam ...@@ -14,9 +13,7 @@ import aam
from reconstruction import reconstruction from reconstruction import reconstruction
logging.basicConfig(level=logging.INFO, from settings import logger
format='%(asctime)s %(levelname)s %(name)s: %(message)s')
logger = logging.getLogger(__name__)
def add_parser_options(): def add_parser_options():
...@@ -104,7 +101,7 @@ def save_pca_model_texture(args): ...@@ -104,7 +101,7 @@ def save_pca_model_texture(args):
assert args.shape_type, '--shape_type the type of dataset, see datasets module' assert args.shape_type, '--shape_type the type of dataset, see datasets module'
dataset_module = import_dataset_module(args.shape_type) dataset_module = import_dataset_module(args.shape_type)
shape_model = pca.PcaModel(args.model_shape_file) shape_model = pca.PCAModel(args.model_shape_file)
mean_points = dataset_module.IMMPoints(points_list=shape_model.mean_values) mean_points = dataset_module.IMMPoints(points_list=shape_model.mean_values)
textures = aam.build_texture_feature_vectors( textures = aam.build_texture_feature_vectors(
...@@ -174,8 +171,8 @@ def generate_call_graph(args): ...@@ -174,8 +171,8 @@ def generate_call_graph(args):
graphviz = GraphvizOutput(output_file='filter_none.png') graphviz = GraphvizOutput(output_file='filter_none.png')
with PyCallGraph(output=graphviz): with PyCallGraph(output=graphviz):
shape_model = pca.PcaModel(args.model_shape_file) shape_model = pca.PCAModel(args.model_shape_file)
texture_model = pca.PcaModel(args.model_texture_file) texture_model = pca.PCAModel(args.model_texture_file)
input_points = dataset_module.IMMPoints(filename='data/imm_face_db/40-3m.asf') input_points = dataset_module.IMMPoints(filename='data/imm_face_db/40-3m.asf')
input_image = input_points.get_image() input_image = input_points.get_image()
...@@ -198,8 +195,8 @@ def show_reconstruction(args): ...@@ -198,8 +195,8 @@ def show_reconstruction(args):
dataset_module = import_dataset_module(args.shape_type) dataset_module = import_dataset_module(args.shape_type)
shape_model = pca.PcaModel(args.model_shape_file) shape_model = pca.PCAModel(args.model_shape_file)
texture_model = pca.PcaModel(args.model_texture_file) texture_model = pca.PCAModel(args.model_texture_file)
input_points = dataset_module.IMMPoints( input_points = dataset_module.IMMPoints(
filename='data/imm_face_db/40-3m.asf' filename='data/imm_face_db/40-3m.asf'
......
import numpy as np import numpy as np
from settings import logger
class PcaModel:
class PCAModel:
""" """
Abstraction for a pca model file. The pca model is stored in a numpy file Abstraction for a pca model file. The pca model is stored in a numpy file
using numpy.save. The following information is stored: using numpy.save. The following information is stored:
...@@ -15,7 +17,7 @@ class PcaModel: ...@@ -15,7 +17,7 @@ class PcaModel:
Examples: Examples:
pca = PcaModel(path_to_numpy_model_file) pca = PCAModel(path_to_numpy_model_file)
""" """
def __init__(self, filename=None): def __init__(self, filename=None):
self.filename = filename self.filename = filename
...@@ -51,8 +53,7 @@ class PcaModel: ...@@ -51,8 +53,7 @@ class PcaModel:
assert hasattr(self, 'mean_values') assert hasattr(self, 'mean_values')
assert hasattr(self, 'triangles') assert hasattr(self, 'triangles')
saving = np.asarray( saving = np.asarray([
[
self.Vt, self.Vt,
self.s, self.s,
self.n_components, self.n_components,
...@@ -65,23 +66,23 @@ class PcaModel: ...@@ -65,23 +66,23 @@ class PcaModel:
def load(self): def load(self):
""" """
Loads the numpy file, see PcaModel whichs uses this function to load Loads the numpy file, see PCAModel whichs uses this function to load
the PCA Model data. the PCA Model data.
Returns: Returns:
(tuple): Vt, s, n_components, mean_values and triangles (tuple): Vt, s, n_components, mean_values and triangles
Vt (numpy ndarray): Two dimensional array with dimensions Vt (numpy ndarray): Two dimensional array with dimensions
(n_features, n_features) (n_features, n_features)
n_components: number of components needed to cover .90 percent of the n_components: number of components needed to cover .90 percent of
variance the variance
mean_values (numpy ndarray): mean values of the features of the model, mean_values (numpy ndarray): mean values of the features of the
this should have dimensions (n_featurs, ) model, this should have dimensions (n_features, )
triangles: a list of lists of indices that form a triangles in the triangles: a list of lists of indices that form a triangles in the
AAM list. AAM list.
Examples: Examples:
We would advise not to use this function directly but to use the We would advise not to use this function directly but to use the
PcaModel. See the :class:`PcaModel` PCAModel. See the :class:`PCAModel`
""" """
pca_model = np.load(self.filename) pca_model = np.load(self.filename)
...@@ -94,12 +95,29 @@ class PcaModel: ...@@ -94,12 +95,29 @@ class PcaModel:
def pca(data, mean_values, variance_percentage=90): def pca(data, mean_values, variance_percentage=90):
""" """
Perform Singlar Value Decomposition Perform Singlar Value Decomposition which we see as a PCA analysis
We calculate how many components are needed to get `variance_percentage`
(default is 90 percent).
Args:
data(ndarray): list of flattened feature vectors.
mean_values(ndarray): mean of all data flattened feature vectors,
in the same order.
Kwargs:
variance_percentage(int): is to calculate how many components you would
need to keep 90 (default is 90) percent of the variance. Note that we
do not alter any data, just return extra information in the form of
`n_components`, so that the user knows how much components it could
keep or to discard to still have 90 percent of the variance.
Returns: Returns:
U (ndarray): U matrix tuple of:
s (ndarray): 1d singular values (diagonal in array form) U (ndarray): U matrix
Vt (ndarray): Vt matrix s (ndarray): 1d singular values in flattened form.
Vt (ndarray): Vt matrix
n_components(int): The amount of components that (together) form
`variance_percentage` of variance.
""" """
# subtract mean # subtract mean
zero_mean = data - mean_values zero_mean = data - mean_values
...@@ -115,6 +133,7 @@ def pca(data, mean_values, variance_percentage=90): ...@@ -115,6 +133,7 @@ def pca(data, mean_values, variance_percentage=90):
i += 1 i += 1
n_components = i n_components = i
logger.debug('%s components form %s of the variance', n_components, variance_percentage)
return U, s, Vt, n_components return U, s, Vt, n_components
...@@ -124,13 +143,13 @@ def reconstruct(feature_vector, Vt, mean_values, n_components=None): ...@@ -124,13 +143,13 @@ def reconstruct(feature_vector, Vt, mean_values, n_components=None):
Reconstruct with U, s, Vt Reconstruct with U, s, Vt
Args: Args:
U (numpy ndarray): One feature vector from the reduced SVD. U (numpy ndarray): One feature vector from the SVD.
U should have shape (n_features,), (i.e., one dimensional) U should have shape (n_features,), (i.e., one dimensional)
s (numpy ndarray): The singular values as a one dimensional array s (numpy ndarray): The singular values as a one dimensional array
Vt (numpy ndarray): Two dimensional array with dimensions Vt (numpy ndarray): Two dimensional array with dimensions
(n_features, n_features) (n_features, n_features)
mean_values (numpy ndarray): mean values of the features of the model, mean_values (numpy ndarray): mean values of the features of the
this should have dimensions (n_features, ) model, this should have dimensions (n_features, )
""" """
......
def reconstruct(model_shape_file, model_texture_file, image, asf_file):
#assert args.model_shape_file, '--model_texture_file needs to be provided to save the pca model'
#assert args.model_texture_file, '--model_texture_file needs to be provided to save the pca model'
Vt_shape, s, n_shape_components, mean_value_points, triangles = pca.load(args.model_shape_file)
Vt_texture, s_texture, n_texture_components, mean_values_texture, _ = pca.load(args.model_texture_file)
InputPoints = imm.IMMPoints(filename=asf_file)
input_image = InputPoints.get_image()
MeanPoints = imm.IMMPoints(points_list=mean_value_points)
MeanPoints.get_scaled_points(input_image.shape)
while True:
utils.reconstruct_texture(
input_image, # src image
input_image, # dst image
Vt_texture, # Vt
InputPoints, # shape points input
MeanPoints, # shape points mean
mean_values_texture, # mean texture
triangles, # triangles
n_texture_components # learned n_texture_components
)
dst = utils.get_texture(MeanPoints, mean_values_texture)
cv2.imshow('original', InputPoints.get_image())
cv2.imshow('reconstructed', input_image)
cv2.imshow('main face', dst)
k = cv2.waitKey(0) & 0xFF
if k == 27:
break
cv2.destroyAllWindows()
import json import json
import os.path import os.path
import base64 import base64
from cStringIO import StringIO
from glob import glob from glob import glob
import cv2 import cv2
...@@ -31,8 +30,8 @@ class ImageWebSocketHandler(websocket.WebSocketHandler): ...@@ -31,8 +30,8 @@ class ImageWebSocketHandler(websocket.WebSocketHandler):
model_texture_file = '{}/pca_texture_model.npy'.format(FILES_DIR) model_texture_file = '{}/pca_texture_model.npy'.format(FILES_DIR)
model_shape_file = '{}/pca_shape_model.npy'.format(FILES_DIR) model_shape_file = '{}/pca_shape_model.npy'.format(FILES_DIR)
self.shape_model = pca.PcaModel(model_shape_file) self.shape_model = pca.PCAModel(model_shape_file)
self.texture_model = pca.PcaModel(model_texture_file) self.texture_model = pca.PCAModel(model_texture_file)
websocket.WebSocketHandler.__init__(self, *args, **kwargs) websocket.WebSocketHandler.__init__(self, *args, **kwargs)
......
"""
.. module:: settings_module
:platform: Unix
:synopsis: This module contains global settings.
"""
import logging
import logging.config
import os
#logging.basicConfig(level=logging.INFO,
# format='%(asctime)s %(levelname)s %(name)s: %(message)s')
logging.config.dictConfig({
'version': 1,
'disable_existing_loggers': False, # this fixes the problem
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)s %(module)s: %(message)s'
},
},
'handlers': {
'default': {
'level': logging.INFO,
'formatter': 'standard',
'class': 'logging.StreamHandler',
},
'debug': {
'level': logging.DEBUG,
'formatter': 'standard',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'root': {
'handlers': ['default'],
'level': logging.INFO,
'propagate': True
},
'debug': {
'handlers': ['debug'],
'level': logging.DEBUG,
'propagate': False
}
}
})
logger = logging.getLogger('root')
#logger.setLevel(logging.DEBUG)
if os.environ.get('DEBUG', False):
logger = logging.getLogger('debug')
...@@ -48,8 +48,8 @@ def test_zero_mean_aan(): ...@@ -48,8 +48,8 @@ def test_zero_mean_aan():
def test_build_texture_feature_vectors(): def test_build_texture_feature_vectors():
shape_model = pca.PcaModel('data/test_data/pca_shape_model.npy') shape_model = pca.PCAModel('data/test_data/pca_shape_model.npy')
texture_model = pca.PcaModel('data/test_data/pca_texture_model.npy') texture_model = pca.PCAModel('data/test_data/pca_texture_model.npy')
input_points = imm.IMMPoints(filename='data/imm_face_db/40-3m.asf') input_points = imm.IMMPoints(filename='data/imm_face_db/40-3m.asf')
input_image = input_points.get_image() input_image = input_points.get_image()
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment