commit 3aa6c7148fa8077e9369d5b680875734d426644b Author: Nabil Ould Hamou Date: Tue Feb 11 18:21:03 2025 +0100 Add LogisticRegression.py diff --git a/LogisticRegression.py b/LogisticRegression.py new file mode 100644 index 0000000..84f6616 --- /dev/null +++ b/LogisticRegression.py @@ -0,0 +1,41 @@ +import numpy as np + +class LogisticRegression: + def __init__(self, weights, bias, learning_rate=0.001, epochs=1000): + self.learning_rate = learning_rate + self.epochs = epochs + self.weights = weights + self.bias = bias + + def sigmoid(self, z): + return 1 / (1 + np.exp(-z)) + + def cost_function(self, X, y): + N = y.shape[0] + pred = self.sigmoid(np.dot(X.T, self.weights) + self.bias) + cost = -(1/N) * np.sum(y * np.log(pred) + (1 - y) * np.log(1 - pred)) + return cost + + def fit(self, X, y): + size, features = X.shape + self.weights = np.zeros(features) + self.bias = 0 + + for epoch in range(self.epochs): + linear = np.dot(X.T, self.weights) + self.bias + pred = self.sigmoid(linear) + + E = 1/size * np.dot(X.T, (pred - y)) + dB = 1/size * np.sum(pred - y) + + self.weights -= self.learning_rate * E + self.bias -= self.learning_rate * dB + + if epoch % 100 == 0: + cost = self.cost_function(X, y) + print(f'epoch: {epoch}, cost: {cost}') + + def predict(self, X): + linear = np.dot(X.T, self.weights) + self.bias + pred = self.sigmoid(linear) + return [1 if i > 0.5 else 0 for i in pred] # seuil de 0.5 pour la classification \ No newline at end of file