Logistic regression

Jaygodara
1 min readSep 14, 2023

--

class LogisticRegressionMultiClass:
def __init__(self, num_classes, learning_rate, num_epochs, batch_size):
self.num_classes = num_classes
self.learning_rate = learning_rate
self.num_epochs = num_epochs
self.batch_size = batch_size
self.class_probabilities = [[] for _ in range(self.num_classes)]

def softmax(self, z):
exp_z = np.exp(z - np.max(z, axis=1, keepdims=True))
return exp_z / np.sum(exp_z, axis=1, keepdims=True)

def initialize_weights(self, num_features):
self.W = np.zeros((num_features, self.num_classes))
self.b = np.zeros(self.num_classes)

def fit(self, X, y):
num_samples, num_features = X.shape
self.initialize_weights(num_features)

for epoch in range(self.num_epochs):
# shuffling data
permutation = np.random.permutation(num_samples)
X_shuffled = X[permutation]
y_shuffled = y[permutation]

for i in range(0, num_samples, self.batch_size):
X_batch = X_shuffled[i:i + self.batch_size]
y_batch = y_shuffled[i:i + self.batch_size]

# Computing logits
logits = np.dot(X_batch, self.W) + self.b
probabilities = self.softmax(logits)

# calculating gradients
grad_W = (1 / self.batch_size) * np.dot(X_batch.T, (probabilities - self.one_hot_encode(y_batch)))
grad_b = (1 / self.batch_size) * np.sum(probabilities - self.one_hot_encode(y_batch), axis=0)

# updating weights, biases
self.W -= self.learning_rate * grad_W
self.b -= self.learning_rate * grad_b

mean_probabilities = np.mean(probabilities, axis=0)
for class_idx in range(self.num_classes):
self.class_probabilities[class_idx].append(mean_probabilities[class_idx])

def plot_class_probabilities(self):
epochs = range(1, self.num_epochs + 1)
plt.plot(epochs, self.class_probabilities[0], label=f'Class {0}', linestyle='-', marker='^')
plt.plot(epochs, self.class_probabilities[1], label=f'Class {1}', linestyle='-', marker='o')
plt.plot(epochs, self.class_probabilities[2], label=f'Class {2}')
plt.xlabel('Epochs')
plt.ylabel('Mean Class Probability')
plt.legend()
plt.title('Mean Class Probabilities vs. Epochs for Each Class (Experiment 2)')
plt.show()

def predict(self, X):
logits = np.dot(X, self.W) + self.b
probabilities = self.softmax(logits)
return np.argmax(probabilities, axis=1)

def one_hot_encode(self, y):
one_hot = np.zeros((len(y), self.num_classes))
for i in range(len(y)):
one_hot[i, y[i]] = 1
return one_hot

--

--