diff --git a/doc/install.rst b/doc/install.rst
index be89c862758093cc518010db88ebd5411bf407a1..4f111329e8eccc6a26851dbe1d6e9e80b526cc6f 100644
--- a/doc/install.rst
+++ b/doc/install.rst
@@ -137,10 +137,19 @@ installation, the package can be tested by executing from outside the
 source directory::
 
     python -c "import scikits.learn as skl; skl.test()"
-    
 
-It can also be tested without having the package installed. For this
-you must compile the sources inplace from the source directory::
+This should give you a lot of output (and some warnings) but
+eventually should finish with the a text similar to::
+
+           Ran 601 tests in 27.920s
+           OK (SKIP=2)
+
+otherwise please consider submitting a bug in the :ref:`bug_tracker`
+or to the :ref:`mailing_lists`.
+
+scikits.learn can also be tested without having the package
+installed. For this you must compile the sources inplace from the
+source directory::
 
     python setup.py build_ext --inplace
 
diff --git a/doc/support.rst b/doc/support.rst
index 38ef3cb2d7a18b6cf9eaf610f80c78f8592801ee..5ef09c946f7d0af5d4cd9d2ff69a44c83d2c217f 100644
--- a/doc/support.rst
+++ b/doc/support.rst
@@ -1,5 +1,7 @@
 There are several ways to get in touch with the developers
 
+.. _mailing_lists:
+
 Mailing List
 ============
 
@@ -8,6 +10,7 @@ Main mailing list is `scikit-learn-general
 is also a commit list, where updates to the main repo get notified: `scikit-learn-commits <https://lists.sourceforge.net/lists/listinfo/scikit-learn-commits>`_
 
 
+.. _bug_tracker:
 
 Bug tracker
 ===========
diff --git a/scikits/learn/gaussian_process/gaussian_process.py b/scikits/learn/gaussian_process/gaussian_process.py
index 3310fef044954070128a9c871d330fab060544ac..3a66ce02c5ba78145788d182c3ca7d8392369e52 100644
--- a/scikits/learn/gaussian_process/gaussian_process.py
+++ b/scikits/learn/gaussian_process/gaussian_process.py
@@ -192,12 +192,11 @@ class GaussianProcess(BaseEstimator, RegressorMixin):
     -------
     >>> import numpy as np
     >>> from scikits.learn.gaussian_process import GaussianProcess
-    >>> f = lambda x: x * np.sin(x)
     >>> X = np.atleast_2d([1., 3., 5., 6., 7., 8.]).T
-    >>> y = f(X).ravel()
-    >>> gp = GaussianProcess(theta0=1e-1, thetaL=1e-3, thetaU=1e0).fit(X, y)
-    >>> x = np.atleast_2d(np.linspace(0, 10, 1000)).T
-    >>> y_pred, MSE = gp.predict(x, eval_MSE=True)
+    >>> y = (X * np.sin(X)).ravel()
+    >>> gp = GaussianProcess(theta0=0.1, thetaL=.001, thetaU=1.)
+    >>> gp.fit(X, y) # doctest: +ELLIPSIS
+    GaussianProcess(normalize=True, ...)
 
     Implementation details
     ----------------------
diff --git a/scikits/learn/linear_model/sparse/stochastic_gradient.py b/scikits/learn/linear_model/sparse/stochastic_gradient.py
index bf0b610f672b1a6010bd03fe1951a67608cc3abd..876f9242de9b3418706f3b187e2ca585606941b3 100644
--- a/scikits/learn/linear_model/sparse/stochastic_gradient.py
+++ b/scikits/learn/linear_model/sparse/stochastic_gradient.py
@@ -83,9 +83,10 @@ class SGDClassifier(BaseSGDClassifier):
     Examples
     --------
     >>> import numpy as np
+    >>> from scikits.learn import linear_model
     >>> X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
     >>> y = np.array([1, 1, 2, 2])
-    >>> clf = SGDClassifier()
+    >>> clf = linear_model.sparse.SGDClassifier()
     >>> clf.fit(X, y)
     SGDClassifier(loss='hinge', n_jobs=1, shuffle=False, verbose=0, n_iter=5,
            fit_intercept=True, penalty='l2', rho=1.0, alpha=0.0001)
@@ -353,11 +354,12 @@ class SGDRegressor(BaseSGDRegressor):
     Examples
     --------
     >>> import numpy as np
+    >>> from scikits.learn import linear_model    
     >>> n_samples, n_features = 10, 5
     >>> np.random.seed(0)
     >>> y = np.random.randn(n_samples)
     >>> X = np.random.randn(n_samples, n_features)
-    >>> clf = SGDRegressor()
+    >>> clf = linear_model.sparse.SGDRegressor()
     >>> clf.fit(X, y)
     SGDRegressor(loss='squared_loss', shuffle=False, verbose=0, n_iter=5,
            fit_intercept=True, penalty='l2', p=0.1, rho=1.0, alpha=0.0001)
diff --git a/scikits/learn/linear_model/stochastic_gradient.py b/scikits/learn/linear_model/stochastic_gradient.py
index 61f201bc4201f0717d1bd5f1b0704bc4c5f813eb..b20bd4287485088bc08afae391fd3ae51be637d7 100644
--- a/scikits/learn/linear_model/stochastic_gradient.py
+++ b/scikits/learn/linear_model/stochastic_gradient.py
@@ -81,9 +81,10 @@ class SGDClassifier(BaseSGDClassifier):
     Examples
     --------
     >>> import numpy as np
+    >>> from scikits.learn import linear_model
     >>> X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
     >>> Y = np.array([1, 1, 2, 2])
-    >>> clf = SGDClassifier()
+    >>> clf = linear_model.SGDClassifier()
     >>> clf.fit(X, Y)
     SGDClassifier(loss='hinge', n_jobs=1, shuffle=False, verbose=0, n_iter=5,
            fit_intercept=True, penalty='l2', rho=1.0, alpha=0.0001)
@@ -320,11 +321,12 @@ class SGDRegressor(BaseSGDRegressor):
     Examples
     --------
     >>> import numpy as np
+    >>> from scikits.learn import linear_model
     >>> n_samples, n_features = 10, 5
     >>> np.random.seed(0)
     >>> y = np.random.randn(n_samples)
     >>> X = np.random.randn(n_samples, n_features)
-    >>> clf = SGDRegressor()
+    >>> clf = linear_model.SGDRegressor()
     >>> clf.fit(X, y)
     SGDRegressor(loss='squared_loss', shuffle=False, verbose=0, n_iter=5,
            fit_intercept=True, penalty='l2', p=0.1, rho=1.0, alpha=0.0001)
diff --git a/scikits/learn/metrics/pairwise.py b/scikits/learn/metrics/pairwise.py
index 377809c31efd0c041ee7c750afa0abb70ac150d8..cc9c486ac67ce20e728a45790dd86d8d2b0960eb 100644
--- a/scikits/learn/metrics/pairwise.py
+++ b/scikits/learn/metrics/pairwise.py
@@ -26,6 +26,7 @@ def euclidian_distances(X, Y):
 
     Examples
     --------
+    >>> from scikits.learn.metrics.pairwise import euclidian_distances
     >>> X = [[0, 1], [1, 1]]
     >>> # distrance between rows of X
     >>> euclidian_distances(X, X)
diff --git a/scikits/learn/neighbors.py b/scikits/learn/neighbors.py
index 50e8a0e088044ad7446e4f610592bdae5f675815..f181339e58e66f71e85013c07e87c730eef0df06 100644
--- a/scikits/learn/neighbors.py
+++ b/scikits/learn/neighbors.py
@@ -264,11 +264,10 @@ def barycenter_weights(x, X_neighbors, tol=1e-3):
 
     Examples
     --------
-    >>> X_neighbors = [[0], [2]]
-    >>> x = [0.5]
     >>> from scikits.learn.neighbors import barycenter_weights
-    >>> print barycenter_weights(x, X_neighbors)
-    [ 0.74968789  0.25031211]
+    >>> X_neighbors, x = [[0], [2]], [0.5]
+    >>> barycenter_weights(x, X_neighbors)
+    array([ 0.74968789,  0.25031211])
     """
     x = np.asanyarray(x)
     X_neighbors = np.asanyarray(X_neighbors)
@@ -320,11 +319,12 @@ def kneighbors_graph(X, n_neighbors, weight=None, ball_tree=None,
     Examples
     --------
     >>> X = [[0], [2], [1]]
-    >>> A = kneighbors_graph(X, n_neighbors=2, weight=None)
-    >>> print A.todense()
-    [[ 1.  0.  1.]
-     [ 0.  1.  1.]
-     [ 0.  1.  1.]]
+    >>> from scikits.learn.neighbors import kneighbors_graph
+    >>> A = kneighbors_graph(X, 2)
+    >>> A.todense()
+    matrix([[ 1.,  0.,  1.],
+            [ 0.,  1.,  1.],
+            [ 0.,  1.,  1.]])
     """
     from scipy import sparse
     X = np.asanyarray(X)
diff --git a/scikits/learn/pca.py b/scikits/learn/pca.py
index 00bab94e3b23f24e179803896d65e3a00925d818..38fc4fe1bbe180cb63a251b2e108d59821d6c74c 100644
--- a/scikits/learn/pca.py
+++ b/scikits/learn/pca.py
@@ -145,8 +145,8 @@ class PCA(BaseEstimator):
     Examples
     --------
     >>> import numpy as np
-    >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
     >>> from scikits.learn.pca import PCA
+    >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
     >>> pca = PCA(n_components=2)
     >>> pca.fit(X)
     PCA(copy=True, n_components=2, whiten=False)
@@ -318,8 +318,8 @@ class RandomizedPCA(BaseEstimator):
     Examples
     --------
     >>> import numpy as np
+    >>> from scikits.learn.pca import RandomizedPCA
     >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
-    >>> from scikits.learn.pca import PCA
     >>> pca = RandomizedPCA(n_components=2)
     >>> pca.fit(X)
     RandomizedPCA(copy=True, n_components=2, iterated_power=3, whiten=False)
diff --git a/scikits/learn/tests/test_hmm.py b/scikits/learn/tests/test_hmm.py
index cc77358bebef74b1a5f1f2db3b9799ca27f23622..41b93f0c83138f3c8134c31045c33ca3e7cd417d 100644
--- a/scikits/learn/tests/test_hmm.py
+++ b/scikits/learn/tests/test_hmm.py
@@ -21,8 +21,8 @@ class TestBaseHMM(SeedRandomNumberGeneratorTestCase):
 
     class StubHMM(hmm._BaseHMM):
 
-        def _compute_log_likelihood(self):
-            pass
+        def _compute_log_likelihood(self, X):
+            return self.framelogprob
 
         def _generate_sample_from_state(self):
             pass
@@ -64,7 +64,7 @@ class TestBaseHMM(SeedRandomNumberGeneratorTestCase):
                                [0.9, 0.2],
                                [0.9, 0.2]])
         # Add dummy observations to stub.
-        h._compute_log_likelihood = lambda obs: framelogprob
+        h.framelogprob = framelogprob
         return h, framelogprob
 
     def test_init(self):
@@ -141,7 +141,7 @@ class TestBaseHMM(SeedRandomNumberGeneratorTestCase):
 
         # Add dummy observations to stub.
         framelogprob = np.log(np.random.rand(nobs, n_states))
-        h._compute_log_likelihood = lambda obs: framelogprob
+        h.framelogprob = framelogprob
 
         # If startprob and transmat are uniform across all states (the
         # default), the transitions are uninformative - the model
@@ -162,7 +162,7 @@ class TestBaseHMM(SeedRandomNumberGeneratorTestCase):
 
         # Add dummy observations to stub.
         framelogprob = np.log(np.random.rand(nobs, n_states))
-        h._compute_log_likelihood = lambda obs: framelogprob
+        h.framelogprob = framelogprob
 
         # If startprob and transmat are uniform across all states (the
         # default), the transitions are uninformative - the model
@@ -500,6 +500,25 @@ class TestMultinomialHMM(MultinomialHMMParams,
         self.test_fit('e')
 
 
+def create_random_gmm(n_mix, n_features, cvtype):
+    from scikits.learn import mixture
+
+    g = mixture.GMM(n_mix, cvtype=cvtype)
+    g.means = np.random.randint(-20, 20, (n_mix, n_features))
+    mincv = 0.1
+    g.covars = {'spherical': (mincv
+                              + mincv * np.random.rand(n_mix)) ** 2,
+                'tied': _generate_random_spd_matrix(n_features)
+                       + mincv * np.eye(n_features),
+                'diag': (mincv
+                         + mincv * np.random.rand(n_mix, n_features)) ** 2,
+                'full': np.array([_generate_random_spd_matrix(n_features)
+                                  + mincv * np.eye(n_features)
+                                  for x in xrange(n_mix)])}[cvtype]
+    g.weights = hmm.normalize(np.random.rand(n_mix))
+    return g
+
+
 class GMMHMMParams(object):
     n_states = 3
     n_mix = 2
@@ -510,25 +529,6 @@ class GMMHMMParams(object):
     transmat = np.random.rand(n_states, n_states)
     transmat /= np.tile(transmat.sum(axis=1)[:,np.newaxis], (1, n_states))
 
-    @staticmethod
-    def create_random_gmm(n_mix, n_features, cvtype):
-        from scikits.learn import mixture
-
-        g = mixture.GMM(n_mix, cvtype=cvtype)
-        g.means = np.random.randint(-20, 20, (n_mix, n_features))
-        mincv = 0.1
-        g.covars = {'spherical': (mincv
-                                  + mincv * np.random.rand(n_mix)) ** 2,
-                    'tied': _generate_random_spd_matrix(n_features)
-                           + mincv * np.eye(n_features),
-                    'diag': (mincv
-                             + mincv * np.random.rand(n_mix, n_features)) ** 2,
-                    'full': np.array([_generate_random_spd_matrix(n_features)
-                                      + mincv * np.eye(n_features)
-                                      for x in xrange(n_mix)])}[cvtype]
-        g.weights = hmm.normalize(np.random.rand(n_mix))
-
-        return g
 
 
 class TestGMMHMM(GMMHMMParams, SeedRandomNumberGeneratorTestCase):
@@ -537,7 +537,7 @@ class TestGMMHMM(GMMHMMParams, SeedRandomNumberGeneratorTestCase):
         np.random.seed(self.seed)
         self.gmms = []
         for state in xrange(self.n_states):
-            self.gmms.append(self.create_random_gmm(
+            self.gmms.append(create_random_gmm(
                 self.n_mix, self.n_features, self.cvtype))
 
     def test_attributes(self):
diff --git a/scikits/learn/tests/test_mixture.py b/scikits/learn/tests/test_mixture.py
index e7e294246db692876f406bd0548e6cb88ba0324c..6a02eee9e391caed7b3769a61df0a195698528be 100644
--- a/scikits/learn/tests/test_mixture.py
+++ b/scikits/learn/tests/test_mixture.py
@@ -79,7 +79,7 @@ def test_sample_gaussian():
     cv = np.dot(A.T, A) + np.eye(n_features)
     samples = mixture.sample_gaussian(mu, cv, cvtype='full', n=n_samples)
     assert np.allclose(samples.mean(axis), mu, atol=0.3)
-    assert np.allclose(np.cov(samples), cv, atol=0.7)
+    assert np.allclose(np.cov(samples), cv, atol=1.)
 
 
 def _naive_lmvnpdf_diag(obs, mu, cv):