From a4a1e5835d6c1f024dc8555884cca3da0c0a7f19 Mon Sep 17 00:00:00 2001
From: Fabian Pedregosa <fabian.pedregosa@inria.fr>
Date: Tue, 5 Jan 2010 15:10:56 +0000
Subject: [PATCH] Version 0.5, add custom confidence ellipsoids David
 Cournapeau <david@ar.media.kyoto-u.ac.jp> | 2006-08-07 18:46:57 +0900 (Mon,
 07 Aug 2006)

From: cdavid <cdavid@cb17146a-f446-4be1-a4f7-bd7c5bb65646>

git-svn-id: https://scikit-learn.svn.sourceforge.net/svnroot/scikit-learn/trunk@74 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8
---
 scikits/learn/pyem/Changelog                 |  7 +++++++
 scikits/learn/pyem/TODO                      |  7 +------
 scikits/learn/pyem/pyem/__init__.py          |  4 ++--
 scikits/learn/pyem/pyem/densities.py         | 20 ++++++++++++--------
 scikits/learn/pyem/pyem/profile_densities.py | 15 ++++++++-------
 5 files changed, 30 insertions(+), 23 deletions(-)

diff --git a/scikits/learn/pyem/Changelog b/scikits/learn/pyem/Changelog
index 17a022f1b1..fc57297919 100644
--- a/scikits/learn/pyem/Changelog
+++ b/scikits/learn/pyem/Changelog
@@ -1,3 +1,10 @@
+pyem (0.5) Fri, 04 Aug 2006 23:10:37 +0900
+
+	* put version to 0.5.0
+	* implement confidence interval using chi2
+
+-- David Cournapeau <david@ar.media.kyoto-u.ac.jp> 
+
 pyem (0.4) Fri, 04 Aug 2006 19:37:47 +0900
 
 	* put version to 0.4.2
diff --git a/scikits/learn/pyem/TODO b/scikits/learn/pyem/TODO
index 10b201cdfb..546bf2eb48 100644
--- a/scikits/learn/pyem/TODO
+++ b/scikits/learn/pyem/TODO
@@ -1,13 +1,8 @@
-# Last Change: Thu Jul 13 05:00 PM 2006 J
+# Last Change: Fri Aug 04 11:00 PM 2006 J
 
 Things which must be implemented for a 1.0 version:
     - test for various length and model size.
-    - refactoring with a class modelling mixture models.
-    - for Gaussian densities: compute level <-> confidence ellipses 
-    relationship with Chi2 model.
     - a small help/tutorial
-    - review the code, with the difference between generic numpy functions
-    and blas ones
 
 Things which would be nice:
     - C implementation of Gaussian densities at least.
diff --git a/scikits/learn/pyem/pyem/__init__.py b/scikits/learn/pyem/pyem/__init__.py
index b66857e8ce..0b5e774d00 100644
--- a/scikits/learn/pyem/pyem/__init__.py
+++ b/scikits/learn/pyem/pyem/__init__.py
@@ -1,7 +1,7 @@
 #! /usr/bin/env python
-# Last Change: Fri Aug 04 07:00 PM 2006 J
+# Last Change: Fri Aug 04 11:00 PM 2006 J
 
-version = '0.4.2'
+version = '0.5.0'
 
 from gauss_mix import GmParamError, GM
 from gmm_em import GmmParamError, GMM
diff --git a/scikits/learn/pyem/pyem/densities.py b/scikits/learn/pyem/pyem/densities.py
index 8d9427a4b9..0ac27b070b 100644
--- a/scikits/learn/pyem/pyem/densities.py
+++ b/scikits/learn/pyem/pyem/densities.py
@@ -1,11 +1,13 @@
 #! /usr/bin/python
 #
 # Copyrighted David Cournapeau
-# Last Change: Fri Aug 04 07:00 PM 2006 J
+# Last Change: Fri Aug 04 11:00 PM 2006 J
 
 import numpy as N
 import numpy.linalg as lin
 from numpy.random import randn
+from scipy.stats import chi2
+
 
 # Error classes
 class DenError(Exception):
@@ -163,15 +165,14 @@ def _full_gauss_den(x, mu, va, log):
     return y
 
 # To plot a confidence ellipse from multi-variate gaussian pdf
-def gauss_ell(mu, va, dim = [0, 1], npoints = 100):
+def gauss_ell(mu, va, dim = [0, 1], npoints = 100, level = 0.39):
     """ Given a mean and covariance for multi-variate
     gaussian, returns npoints points for the ellipse
-    of confidence 0.39
+    of confidence given by level (all points will be inside
+    the ellipsoides with a probability equal to level)
     
     Returns the coordinate x and y of the ellipse"""
     
-    # TODO: Get a confidence interval using the Chi2 distribution
-    # of points at a given mahalanobis distance...
     mu      = N.atleast_1d(mu)
     va      = N.atleast_1d(va)
     c       = N.array(dim)
@@ -187,11 +188,14 @@ def gauss_ell(mu, va, dim = [0, 1], npoints = 100):
         else:
             raise DenError("mean and variance are not dim conformant")
 
-    level   = 0.39
+    # TODO: Get a confidence interval using the Chi2 distribution
+    # of points at a given mahalanobis distance...
+    chi22d  = chi2(2)
+    mahal   = N.sqrt(chi22d.ppf(level))
     
     # Generates a circle of npoints
     theta   = N.linspace(0, 2 * N.pi, npoints)
-    circle  = N.array([N.cos(theta), N.sin(theta)])
+    circle  = mahal * N.array([N.cos(theta), N.sin(theta)])
 
     # Get the dimension which we are interested in:
     mu  = mu[dim]
@@ -395,7 +399,7 @@ if __name__ == "__main__":
     Yc      = Yc.transpose() + mu
 
     # Plotting
-    Xe, Ye  = gauss_ell(mu, va, npoints = 100)
+    Xe, Ye  = gauss_ell(mu, va, npoints = 100, level=0.95)
     pylab.figure()
     pylab.plot(Yc[:, 0], Yc[:, 1], '.')
     pylab.plot(Xe, Ye, 'r')
diff --git a/scikits/learn/pyem/pyem/profile_densities.py b/scikits/learn/pyem/pyem/profile_densities.py
index ee6f30f86c..786cbf25d2 100644
--- a/scikits/learn/pyem/pyem/profile_densities.py
+++ b/scikits/learn/pyem/pyem/profile_densities.py
@@ -1,4 +1,5 @@
 import numpy as N
+from numpy.random import randn
 import densities as D
 import tables
 
@@ -7,19 +8,19 @@ def bench1(mode = 'diag'):
     # Diag Gaussian of dimension 20
     #===========================================
     d       = 20
-    n       = 1e5
-    niter   = 1
+    n       = 1e4
+    niter   = 20
     mode    = 'diag'
 
     # Generate a model with k components, d dimensions
-    mu  = N.randn(1, d)
+    mu  = randn(1, d)
     if mode == 'diag':
-        va  = abs(N.randn(1, d))
+        va  = abs(randn(1, d))
     elif mode == 'full':
-        va  = N.randn(d, d)
-        va  = N.matrixmultiply(va, va.transpose())
+        va  = randn(d, d)
+        va  = N.dot(va, va.transpose())
 
-    X   = N.randn(n, d)
+    X   = randn(n, d)
     print "Compute %d times densities, %d dimension, %d frames" % (niter, d, n)
     for i in range(niter):
         Y   = D.gauss_den(X, mu, va)
-- 
GitLab