diff --git a/scikits/learn/logistic.py b/scikits/learn/logistic.py
index b202109d945002d5d281537b5d09fdcfe8259be5..d78a2929541ec32324e1029b570b97416925ab0d 100644
--- a/scikits/learn/logistic.py
+++ b/scikits/learn/logistic.py
@@ -96,13 +96,12 @@ class LogisticRegression(object):
     def intercept_(self):
         if self.bias_ > 0:
             return self.raw_coef_[:,-1]
-        else:
-            raise ValueError('intercept_ not estimated')
+        return 0.0
+            
 
     @property
     def coef_(self):
         if self.bias_ > 0:
             return self.raw_coef_[:,:-1]
-        else:
-            return self.raw_coef_
+        return self.raw_coef_
 
diff --git a/scikits/learn/src/liblinear_helper.c b/scikits/learn/src/liblinear_helper.c
index cf67880e4343c3072351ed018e161263a4d72918..714832bc9f019572ad12a27b82b798c891b2b172 100644
--- a/scikits/learn/src/liblinear_helper.c
+++ b/scikits/learn/src/liblinear_helper.c
@@ -13,8 +13,9 @@
  * Special care must be taken with indices, since libsvm indices start
  * at 1 and not at 0.
  *
+ * If bias is > 0, we append an item at the end.
  */
-struct feature_node **dense_to_sparse (double *x, npy_intp *dims)
+struct feature_node **dense_to_sparse (double *x, npy_intp *dims, double bias)
 {
     struct feature_node **sparse;
     register int i, j;              /* number of nonzero elements in row i */
@@ -40,53 +41,15 @@ struct feature_node **dense_to_sparse (double *x, npy_intp *dims)
         }
 
         /* set bias element */
-        T->value = 1.0;
-        T->index = j;
-        ++ T;
-
-        /* set sentinel */
-        T->index = -1;
-        ++ T;
-
-        /* allocate memory and copy collected items*/
-        count = T - temp;
-        sparse[i] = (struct feature_node *) malloc(count * sizeof(struct feature_node));
-        if (sparse[i] == NULL) return NULL;
-        memcpy(sparse[i], temp, count * sizeof(struct feature_node));
-    }
-
-    free(temp);
-    return sparse;
-}
-
-struct feature_node **dense_to_sparse_nobias (double *x, npy_intp *dims)
-{
-    struct feature_node **sparse;
-    register int i, j;              /* number of nonzero elements in row i */
-    struct feature_node *temp;          /* stack for nonzero elements */
-    struct feature_node *T;             /* pointer to the top of the stack */
-    int count;
-
-    sparse = (struct feature_node **) malloc (dims[0] * sizeof(struct feature_node *));
-    temp = (struct feature_node *) malloc ((dims[1]+1) * sizeof(struct feature_node));
-
-    if (sparse == NULL || temp == NULL) return NULL;
-
-    for (i=0; i<dims[0]; ++i) {
-        T = temp; /* reset stack pointer */
-
-        for (j=1; j<=dims[1]; ++j) {
-            T->value = *x;
-            if (T->value != 0) {
+        if (bias > 0) {
+                T->value = 1.0;
                 T->index = j;
-                ++T;
+                ++ T;
             }
-            ++x; /* go to next element */
-        }
 
         /* set sentinel */
         T->index = -1;
-        ++T;
+        ++ T;
 
         /* allocate memory and copy collected items*/
         count = T - temp;
@@ -99,6 +62,7 @@ struct feature_node **dense_to_sparse_nobias (double *x, npy_intp *dims)
     return sparse;
 }
 
+
 struct problem * set_problem(char *X,char *Y, npy_intp *dims, double bias)
 {
     struct problem *problem;
@@ -113,7 +77,7 @@ struct problem * set_problem(char *X,char *Y, npy_intp *dims, double bias)
         problem->n = (int) dims[1];
     }
     problem->y = (int *) Y;
-    problem->x = dense_to_sparse((double *) X, dims); /* TODO: free */
+    problem->x = dense_to_sparse((double *) X, dims, bias); /* TODO: free */
     problem->bias = bias;
     if (problem->x == NULL) { 
         free(problem);
@@ -191,7 +155,7 @@ int copy_predict(char *train, struct model *model, npy_intp *train_dims,
     register int i, n;
     struct feature_node **train_nodes;
     n = train_dims[0];
-    train_nodes = dense_to_sparse((double *) train, train_dims);
+    train_nodes = dense_to_sparse((double *) train, train_dims, -1.0);
     if (train_nodes == NULL)
         return -1;
     for(i=0; i<n; ++i) {
@@ -214,7 +178,7 @@ int copy_prob_predict(char *predict, struct model *model, npy_intp *predict_dims
     int n, m;
     n = predict_dims[0];
     m = model->nr_class;
-    predict_nodes = dense_to_sparse((double *) predict, predict_dims);
+    predict_nodes = dense_to_sparse((double *) predict, predict_dims, -1.0);
     if (predict_nodes == NULL)
         return -1;
     for(i=0; i<n; ++i) {
diff --git a/scikits/learn/svm.py b/scikits/learn/svm.py
index 32647428b039efb09adbb6c3285901e49645a6a5..849a2c2b9946f79b0dcc1e70f8caac72c82a24db 100644
--- a/scikits/learn/svm.py
+++ b/scikits/learn/svm.py
@@ -355,9 +355,13 @@ class LinearSVC(object):
 
     @property
     def intercept_(self):
-        return self.raw_coef[:,-1]
+        if self.bias_ > 0:
+            return self.raw_coef[:,-1]
+        return 0.0
 
     @property
     def coef_(self):
-        return self.raw_coef[:,:-1]
+        if self.bias_ > 0:
+            return self.raw_coef[:,:-1]
+        return self.raw_coef_
 
diff --git a/scikits/learn/tests/test_logistic.py b/scikits/learn/tests/test_logistic.py
index 84da42798091680bd078ed1e2ca6bead16d9975a..93fb2040af39455d05759ade4d8ee30fa5c656fb 100644
--- a/scikits/learn/tests/test_logistic.py
+++ b/scikits/learn/tests/test_logistic.py
@@ -8,11 +8,19 @@ X = [[0, 0], [0, 1], [1, 1]]
 Y1 = [0,1,1]
 Y2 = [0,1,2]
 
+
 def test_predict_2_classes():
     clf = logistic.LogisticRegression()
     clf.fit(X, Y1)
+    assert_array_almost_equal(clf.coef_, [[-0.27501564, -0.60803562]])
+    assert_array_almost_equal(clf.intercept_, [-0.08642295])
     assert_array_equal(clf.predict([[-1, -1], [0, 1],]), [0, 1])
 
+    clf = logistic.LogisticRegression(intercept=False)
+    clf.fit(X, Y1)
+    assert_array_almost_equal(clf.coef_, [[-0.28540916, -0.63236105]])
+    assert_array_almost_equal(clf.intercept_, [0])
+
 def test_predict_3_classes():
     clf = logistic.LogisticRegression()
     clf.fit(X, Y2)
@@ -22,7 +30,7 @@ def test_predict_proba():
     clf = logistic.LogisticRegression()
     clf.fit(X, Y2)
     assert_array_almost_equal(clf.predict_proba([[1, 1]]),
-                              [[ 0.23148573,  0.31760051,  0.45091376]])
+                              [[ 0.21490268,  0.32639437,  0.45870294]])
 
     clf = logistic.LogisticRegression(penalty='l1')
     clf.fit(X, Y2)