-
Notifications
You must be signed in to change notification settings - Fork 0
/
logisticregression.py
49 lines (40 loc) · 1.48 KB
/
logisticregression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import numpy as np
class logistic:
def __init__(self,learning_rate=0.001,n_i=1000):
self.lr=learning_rate
self.n=n_iter
self.weights=None
self.bias=None
def fit(self,X,y):
# Now initialising the parameters
samples,features=X.shape
self.weights=np.zeros(samples)
self.bias=0
#Gradient descent part comes here
for _ in range(self.n):
# simple linear regression part comes here (wx+b)
linear_model=np.dot(X,self.weights)+self.bias
# applying sigmoid function
y_predict=self.sigmoid(linear_model)
#Now we compute the gradient here
#dw=(1/n)*(2x*(yi-i))
dw=(1/samples)*np.dot(X.T,(y_predict-y))
#db=(1/n)*sum(yi-i))
db=(1/samples)*np.sum(y_predict-y)
#updating the parameter we do here(w=w-lr*dw) & (b=b-lr*db)
self.weights -=self.lr*dw
self.bias -=self.lr*db
# predicting the value for the test data
def predict(self,X):
linear_model=np.dot(X,self.weights)+self.bias
y_predict=self.sigmoid(linear_model)
y_predicted_class=[]
for i in range(y_predicted):
if i > 0.5:
y_predicted_class[i]=1
else:
y_predicted_class[i]=0
return np.array(y_predicted_class)
#sigmoid function for logistic regression
def sigmoid(self,x):
return (1/(1+np.exp(-x)))