diff --git a/scikits/learn/mixture.py b/scikits/learn/mixture.py
index 2f66c780f9ad17a53e88abeacd4ddf68c5f3bad5..dcd78d6d7e60ee007620776785cc0c81d266cf29 100644
--- a/scikits/learn/mixture.py
+++ b/scikits/learn/mixture.py
@@ -79,23 +79,26 @@ def lmvnpdf(obs, means, covars, cvtype='diag'):
     return lmvnpdf_dict[cvtype](obs, means, covars)
 
 
-def sample_gaussian(mean, covar, cvtype='diag', n=1):
+def sample_gaussian(mean, covar, cvtype='diag', n_samples=1):
     """Generate random samples from a Gaussian distribution.
 
     Parameters
     ----------
     mean : array_like, shape (n_features,)
         Mean of the distribution.
-    covars : array_like
+
+    covars : array_like, optional
         Covariance of the distribution.  The shape depends on `cvtype`:
             scalar  if 'spherical',
             (D)     if 'diag',
             (D, D)  if 'tied', or 'full'
-    cvtype : string
+
+    cvtype : string, optional
         Type of the covariance parameters.  Must be one of
         'spherical', 'tied', 'diag', 'full'.  Defaults to 'diag'.
-    n : int
-        Number of samples to generate.
+
+    n_samples : int, optional
+        Number of samples to generate. Defaults to 1.
 
     Returns
     -------
@@ -103,8 +106,8 @@ def sample_gaussian(mean, covar, cvtype='diag', n=1):
         Randomly generated sample
     """
     ndim = len(mean)
-    rand = np.random.randn(ndim, n)
-    if n == 1:
+    rand = np.random.randn(ndim, n_samples)
+    if n_samples == 1:
         rand.shape = (ndim,)
 
     if cvtype == 'spherical':
diff --git a/scikits/learn/tests/test_mixture.py b/scikits/learn/tests/test_mixture.py
index 6a02eee9e391caed7b3769a61df0a195698528be..6135452c13014b8a2e6afccaa9de0e1f33840ecf 100644
--- a/scikits/learn/tests/test_mixture.py
+++ b/scikits/learn/tests/test_mixture.py
@@ -62,22 +62,26 @@ def test_sample_gaussian():
     mu = np.random.randint(10) * np.random.rand(n_features)
     cv = (np.random.rand(n_features) + 1.0) ** 2
 
-    samples = mixture.sample_gaussian(mu, cv, cvtype='diag', n=n_samples)
+    samples = mixture.sample_gaussian(
+        mu, cv, cvtype='diag', n_samples=n_samples)
 
     assert np.allclose(samples.mean(axis), mu, atol=0.3)
     assert np.allclose(samples.var(axis),  cv, atol=0.5)
 
     # the same for spherical covariances
     cv = (np.random.rand() + 1.0) ** 2
-    samples = mixture.sample_gaussian(mu, cv, cvtype='spherical', n=n_samples)
+    samples = mixture.sample_gaussian(
+        mu, cv, cvtype='spherical', n_samples=n_samples)
 
     assert np.allclose(samples.mean(axis), mu, atol=0.3)
-    assert np.allclose(samples.var(axis),  np.repeat(cv, n_features), atol=0.5)
+    assert np.allclose(
+        samples.var(axis), np.repeat(cv, n_features), atol=0.5)
 
     # and for full covariances
     A = np.random.randn(n_features, n_features)
     cv = np.dot(A.T, A) + np.eye(n_features)
-    samples = mixture.sample_gaussian(mu, cv, cvtype='full', n=n_samples)
+    samples = mixture.sample_gaussian(
+        mu, cv, cvtype='full', n_samples=n_samples)
     assert np.allclose(samples.mean(axis), mu, atol=0.3)
     assert np.allclose(np.cov(samples), cv, atol=1.)
 
@@ -219,7 +223,7 @@ class GMMTester():
         g._covars = 20 * self.covars[self.cvtype]
 
         # Create a training set by sampling from the predefined distribution.
-        train_obs = g.rvs(n=100)
+        train_obs = g.rvs(n_samples=100)
 
         g.fit(train_obs, n_iter=0, init_params=params)