diff --git a/doc/modules/svm.rst b/doc/modules/svm.rst
index 8696cdc4b004cc9bf2f1fac1ff79029383561c04..ce21f04eb3f81277df4746c3bf6c4bdfb553fef9 100644
--- a/doc/modules/svm.rst
+++ b/doc/modules/svm.rst
@@ -116,7 +116,7 @@ classifiers are constructed and each one trains data from two classes::
     >>> Y = [0, 1, 2, 3]
     >>> clf = svm.SVC()
     >>> clf.fit(X, Y)
-    SVC(C=1.0, coef0=0.0, degree=3, gamma=0.25, kernel='rbf', probability=False,
+    SVC(C=1.0, coef0=0.0, degree=3, gamma=1.0, kernel='rbf', probability=False,
       shrinking=True, tol=0.001)
     >>> dec = clf.decision_function([[1]])
     >>> dec.shape[1] # 4 classes: 4*3/2 = 6
diff --git a/sklearn/svm/classes.py b/sklearn/svm/classes.py
index c2a2c1e2ec09ffbe9f00cbcd4579f077fcd38e3a..a69500dee616aa8a5632f353ffb982d255891f89 100644
--- a/sklearn/svm/classes.py
+++ b/sklearn/svm/classes.py
@@ -147,7 +147,7 @@ class SVC(BaseLibSVM, ClassifierMixin):
     >>> from sklearn.svm import SVC
     >>> clf = SVC()
     >>> clf.fit(X, y)
-    SVC(C=1.0, coef0=0.0, degree=3, gamma=0.25, kernel='rbf', probability=False,
+    SVC(C=1.0, coef0=0.0, degree=3, gamma=0.5, kernel='rbf', probability=False,
       shrinking=True, tol=0.001)
     >>> print clf.predict([[-0.8, -1]])
     [ 1.]
@@ -248,8 +248,8 @@ class NuSVC(BaseLibSVM, ClassifierMixin):
     >>> from sklearn.svm import NuSVC
     >>> clf = NuSVC()
     >>> clf.fit(X, y)
-    NuSVC(coef0=0.0, degree=3, gamma=0.25, kernel='rbf', nu=0.5,
-       probability=False, shrinking=True, tol=0.001)
+    NuSVC(coef0=0.0, degree=3, gamma=0.5, kernel='rbf', nu=0.5, probability=False,
+       shrinking=True, tol=0.001)
     >>> print clf.predict([[-0.8, -1]])
     [ 1.]
 
@@ -338,7 +338,7 @@ class SVR(BaseLibSVM, RegressorMixin):
     >>> X = np.random.randn(n_samples, n_features)
     >>> clf = SVR(C=1.0, epsilon=0.2)
     >>> clf.fit(X, y)
-    SVR(C=1.0, coef0=0.0, degree=3, epsilon=0.2, gamma=0.1, kernel='rbf',
+    SVR(C=1.0, coef0=0.0, degree=3, epsilon=0.2, gamma=0.2, kernel='rbf',
       probability=False, shrinking=True, tol=0.001)
 
     See also
@@ -446,7 +446,7 @@ class NuSVR(BaseLibSVM, RegressorMixin):
     >>> X = np.random.randn(n_samples, n_features)
     >>> clf = NuSVR(C=1.0, nu=0.1)
     >>> clf.fit(X, y)
-    NuSVR(C=1.0, coef0=0.0, degree=3, gamma=0.1, kernel='rbf', nu=0.1,
+    NuSVR(C=1.0, coef0=0.0, degree=3, gamma=0.2, kernel='rbf', nu=0.1,
        probability=False, shrinking=True, tol=0.001)
 
     See also
diff --git a/sklearn/svm/sparse/classes.py b/sklearn/svm/sparse/classes.py
index 031c85c3b3f401c195acabf69349c32c83dd5499..7a237b19e9bb22c2d84d2653e24a47c8a8c518bd 100644
--- a/sklearn/svm/sparse/classes.py
+++ b/sklearn/svm/sparse/classes.py
@@ -23,7 +23,7 @@ class SVC(SparseBaseLibSVM, ClassifierMixin):
     >>> from sklearn.svm.sparse import SVC
     >>> clf = SVC()
     >>> clf.fit(X, y)
-    SVC(C=1.0, coef0=0.0, degree=3, gamma=0.25, kernel='rbf', probability=False,
+    SVC(C=1.0, coef0=0.0, degree=3, gamma=0.5, kernel='rbf', probability=False,
       shrinking=True, tol=0.001)
     >>> print clf.predict([[-0.8, -1]])
     [ 1.]
@@ -58,8 +58,8 @@ class NuSVC (SparseBaseLibSVM, ClassifierMixin):
     >>> from sklearn.svm.sparse import NuSVC
     >>> clf = NuSVC()
     >>> clf.fit(X, y)
-    NuSVC(coef0=0.0, degree=3, gamma=0.25, kernel='rbf', nu=0.5,
-       probability=False, shrinking=True, tol=0.001)
+    NuSVC(coef0=0.0, degree=3, gamma=0.5, kernel='rbf', nu=0.5, probability=False,
+       shrinking=True, tol=0.001)
     >>> print clf.predict([[-0.8, -1]])
     [ 1.]
     """
@@ -97,7 +97,7 @@ class SVR (SparseBaseLibSVM, RegressorMixin):
     >>> X = np.random.randn(n_samples, n_features)
     >>> clf = SVR(C=1.0, epsilon=0.2)
     >>> clf.fit(X, y)
-    SVR(C=1.0, coef0=0.0, degree=3, epsilon=0.2, gamma=0.1, kernel='rbf', nu=0.5,
+    SVR(C=1.0, coef0=0.0, degree=3, epsilon=0.2, gamma=0.2, kernel='rbf', nu=0.5,
       probability=False, shrinking=True, tol=0.001)
     """
 
@@ -134,7 +134,7 @@ class NuSVR (SparseBaseLibSVM, RegressorMixin):
     >>> X = np.random.randn(n_samples, n_features)
     >>> clf = NuSVR(nu=0.1, C=1.0)
     >>> clf.fit(X, y)
-    NuSVR(C=1.0, coef0=0.0, degree=3, epsilon=0.1, gamma=0.1, kernel='rbf',
+    NuSVR(C=1.0, coef0=0.0, degree=3, epsilon=0.1, gamma=0.2, kernel='rbf',
        nu=0.1, probability=False, shrinking=True, tol=0.001)
     """
 
diff --git a/sklearn/svm/tests/test_svm.py b/sklearn/svm/tests/test_svm.py
index 17813589a5e62209663d0ded3ad0a9e5cc3b26e7..e01497e72b56c890ce16af104b9d3a8887af075a 100644
--- a/sklearn/svm/tests/test_svm.py
+++ b/sklearn/svm/tests/test_svm.py
@@ -157,9 +157,10 @@ def test_oneclass():
     clf.fit(X)
     pred = clf.predict(T)
 
-    assert_array_almost_equal(pred, [1, -1, -1])
-    assert_array_almost_equal(clf.intercept_, [-1.351], decimal=3)
-    assert_array_almost_equal(clf.dual_coef_, [[0.750, 0.749, 0.749, 0.750]],
+    assert_array_almost_equal(pred, [-1, -1, -1])
+    assert_array_almost_equal(clf.intercept_, [-1.008], decimal=3)
+    assert_array_almost_equal(clf.dual_coef_,
+                              [[ 0.632, 0.233, 0.633, 0.234, 0.632, 0.633]],
                               decimal=3)
     assert_raises(NotImplementedError, lambda: clf.coef_)