diff --git a/scikits/learn/linear_model/logistic.py b/scikits/learn/linear_model/logistic.py index a4a18b558491448bdc448e35b62c3999a19cce08..bd37a2fdaa0940c61f9c1f6ad7316cf70911a9ac 100644 --- a/scikits/learn/linear_model/logistic.py +++ b/scikits/learn/linear_model/logistic.py @@ -42,6 +42,13 @@ class LogisticRegression(BaseLibLinear, ClassifierMixin): -------- LinearSVC + Notes + ----- + The underlying C implementation uses a random number generator to + select features when fitting the model. It is thus not uncommon, + to have slightly different results for the same input data. If + that happens, try with a smaller eps parameter. + References ---------- LIBLINEAR -- A Library for Large Linear Classification diff --git a/scikits/learn/linear_model/sparse/logistic.py b/scikits/learn/linear_model/sparse/logistic.py index ca04d7ecd16e0bec3ecc1ba1a5efa043888edede..0cea75125aa4a18851dc5c1b40fe1f44e9468b70 100644 --- a/scikits/learn/linear_model/sparse/logistic.py +++ b/scikits/learn/linear_model/sparse/logistic.py @@ -45,18 +45,17 @@ class LogisticRegression(SparseBaseLibLinear, ClassifierMixin): intercept (a.k.a. bias) added to the decision function. It is available only when parameter intercept is set to True - Methods - ------- - fit(X, Y) : self - Fit the model - - predict(X) : array - Predict using the model. - See also -------- LinearSVC + Notes + ----- + The underlying C implementation uses a random number generator to + select features when fitting the model. It is thus not uncommon, + to have slightly different results for the same input data. If + that happens, try with a smaller eps parameter. + References ---------- LIBLINEAR -- A Library for Large Linear Classification diff --git a/scikits/learn/svm/liblinear.py b/scikits/learn/svm/liblinear.py index e02be399d4f7ef7625ca358e16d6e1ba37c8c59d..7bc02de95d4a2c441c686b5d3059bbb5300f8df5 100644 --- a/scikits/learn/svm/liblinear.py +++ b/scikits/learn/svm/liblinear.py @@ -43,6 +43,17 @@ class LinearSVC(BaseLibLinear, ClassifierMixin): `intercept_` : array, shape = [1] if n_classes == 2 else [n_classes] Constants in decision function. + Notes + ----- + The underlying C implementation uses a random number generator to + select features when fitting the model. It is thus not uncommon, + to have slightly different results for the same input data. If + that happens, try with a smaller eps parameter. + + See also + -------- + SVC + References ---------- LIBLINEAR -- A Library for Large Linear Classification diff --git a/scikits/learn/svm/sparse/liblinear.py b/scikits/learn/svm/sparse/liblinear.py index 57557b7f24d5f6fd04090b01fe1b4067aaf85ec7..acac283d77e5b50d331fc8f969c167b8d82bf283 100644 --- a/scikits/learn/svm/sparse/liblinear.py +++ b/scikits/learn/svm/sparse/liblinear.py @@ -32,7 +32,6 @@ class LinearSVC(SparseBaseLibLinear, ClassifierMixin): Select the algorithm to either solve the dual or primal optimization problem. - Attributes ---------- `coef_` : array, shape = [n_features] if n_classes == 2 else [n_classes, n_features] @@ -42,6 +41,17 @@ class LinearSVC(SparseBaseLibLinear, ClassifierMixin): `intercept_` : array, shape = [1] if n_classes == 2 else [n_classes] constants in decision function + Notes + ----- + The underlying C implementation uses a random number generator to + select features when fitting the model. It is thus not uncommon, + to have slightly different results for the same input data. If + that happens, try with a smaller eps parameter. + + See also + -------- + SVC + References ---------- LIBLINEAR -- A Library for Large Linear Classification