Commit b9a4a734 authored by Taddeüs Kroes's avatar Taddeüs Kroes

StatRed ass3: Started work on part 3.

parent 443e0415
from pylab import argmin, argmax, tile, unique, argwhere, array, mean, \ from pylab import argmin, argmax, tile, unique, argwhere, array, mean, \
newaxis, dot, pi, e, matrix newaxis, dot, pi, e, matrix
from svm import svm_model, svm_problem, svm_parameter, LINEAR
class NNb: class NNb:
def __init__(self, X, c): def __init__(self, X, c):
...@@ -42,8 +43,8 @@ class MEC: ...@@ -42,8 +43,8 @@ class MEC:
self.estimate() self.estimate()
def estimate(self): def estimate(self):
"""Estimate the mean and covariance matrix each class in the learning """Estimate the mean and covariance matrix for each class in the
set.""" learning set."""
self.class_data = [] self.class_data = []
for c in self.classes: for c in self.classes:
indices = argwhere(array(map(lambda x: x if x == c else 0, indices = argwhere(array(map(lambda x: x if x == c else 0,
...@@ -56,16 +57,17 @@ class MEC: ...@@ -56,16 +57,17 @@ class MEC:
self.class_data.append((mu, S, coeff)) self.class_data.append((mu, S, coeff))
def classify(self, x): def classify(self, x):
"""Use the sum of all entries in the pdf matrix to determine the class
with the greatest probability."""
p = [coeff * e**(-.5 * dot(x - mu, dot(S.I, array([x - p = [coeff * e**(-.5 * dot(x - mu, dot(S.I, array([x -
mu]).T)).tolist()[0][0]) for mu, S, coeff in self.class_data] mu]).T)).tolist()[0][0]) for mu, S, coeff in self.class_data]
return self.classes[argmax([i.sum() for i in p])] return self.classes[argmax([i.sum() for i in p])]
class SVM: class SVM:
def __init__(self, X, c): def __init__(self, X, c):
self.n, self.N = X.shape px = svm_problem(c.tolist(), X.T)
self.X, self.c = X, c pm = svm_parameter(kernel_type=LINEAR)
self.model = svm_model(px, pm)
def classify(self, x): def classify(self, x):
d = self.X - tile(x.reshape(self.n, 1), self.N); return self.model.predict(x)
dsq = sum(d*d, 0)
return self.c[argmin(dsq)]
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment