Add LogisticRegression.py
This commit is contained in:
commit
3aa6c7148f
41
LogisticRegression.py
Normal file
41
LogisticRegression.py
Normal file
@ -0,0 +1,41 @@
|
||||
import numpy as np
|
||||
|
||||
class LogisticRegression:
|
||||
def __init__(self, weights, bias, learning_rate=0.001, epochs=1000):
|
||||
self.learning_rate = learning_rate
|
||||
self.epochs = epochs
|
||||
self.weights = weights
|
||||
self.bias = bias
|
||||
|
||||
def sigmoid(self, z):
|
||||
return 1 / (1 + np.exp(-z))
|
||||
|
||||
def cost_function(self, X, y):
|
||||
N = y.shape[0]
|
||||
pred = self.sigmoid(np.dot(X.T, self.weights) + self.bias)
|
||||
cost = -(1/N) * np.sum(y * np.log(pred) + (1 - y) * np.log(1 - pred))
|
||||
return cost
|
||||
|
||||
def fit(self, X, y):
|
||||
size, features = X.shape
|
||||
self.weights = np.zeros(features)
|
||||
self.bias = 0
|
||||
|
||||
for epoch in range(self.epochs):
|
||||
linear = np.dot(X.T, self.weights) + self.bias
|
||||
pred = self.sigmoid(linear)
|
||||
|
||||
E = 1/size * np.dot(X.T, (pred - y))
|
||||
dB = 1/size * np.sum(pred - y)
|
||||
|
||||
self.weights -= self.learning_rate * E
|
||||
self.bias -= self.learning_rate * dB
|
||||
|
||||
if epoch % 100 == 0:
|
||||
cost = self.cost_function(X, y)
|
||||
print(f'epoch: {epoch}, cost: {cost}')
|
||||
|
||||
def predict(self, X):
|
||||
linear = np.dot(X.T, self.weights) + self.bias
|
||||
pred = self.sigmoid(linear)
|
||||
return [1 if i > 0.5 else 0 for i in pred] # seuil de 0.5 pour la classification
|
Loading…
x
Reference in New Issue
Block a user