from abc import ABC, abstractmethod
import numpy as np
from sklearn.model_selection import train_test_split
import pandas as pd
# Load data and split into training and test sets
df = pd.read_csv('/content/drive/MyDrive/Data_hocmay2/data_logistic.csv')
data = df.to_numpy()
X = data[:, :-1] # All columns except the last one are features
y = data[:, -1].reshape(-1, 1) # Last column is the target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3,
random_state=42)
class Layer(ABC):
def __init__(self):
[Link] = None
[Link] = None
self.input_shape = None
self.output_shape = None
@abstractmethod
def forward_propagation(self, input):
raise NotImplementedError
@abstractmethod
def backward_propagation(self, output_error, learning_rate):
raise NotImplementedError
class FCLayer(Layer):
def __init__(self, input_shape, output_shape):
super().__init__()
self.input_shape = input_shape
self.output_shape = output_shape
[Link] = [Link](input_shape[1], output_shape[1]) - 0.5
[Link] = [Link](1, output_shape[1]) - 0.5
def forward_propagation(self, input):
[Link] = input
[Link] = [Link](input, [Link]) + [Link]
return [Link]
def backward_propagation(self, output_error, learning_rate):
# Ensure output_error is a 2D column vector
output_error = output_error.reshape(-1, 1)
input_error = [Link]([Link], output_error).reshape([Link])
weights_error = [Link]([Link](-1, 1), output_error.T)
# Update parameters
[Link] -= learning_rate * weights_error
[Link] -= learning_rate * output_error.T # Transpose to match the bias
shape
return input_error
class ActivationLayer(Layer):
def __init__(self, input_shape, output_shape, activation, activation_prime):
super().__init__()
self.input_shape = input_shape
self.output_shape = output_shape
[Link] = activation
self.activation_prime = activation_prime
def forward_propagation(self, input):
[Link] = input
[Link] = [Link](input)
return [Link]
def backward_propagation(self, output_error, learning_rate):
# Reshape output_error to match the input shape for safe element-wise
multiplication
return self.activation_prime([Link]).reshape(output_error.shape) *
output_error
# Define the activation functions and their derivatives
def relu(z):
return [Link](0, z)
def relu_prime(z):
z = [Link](z)
return [Link](z > 0, 1, 0)
# Define the loss function and its derivative
def loss(y_true, y_pred):
return 0.5 * [Link]((y_pred - y_true) ** 2)
def loss_prime(y_true, y_pred):
return y_pred - y_true
class Network:
def __init__(self):
[Link] = []
[Link] = None
self.loss_prime = None
def add(self, layer):
[Link](layer)
def setup_loss(self, loss, loss_prime):
[Link] = loss
self.loss_prime = loss_prime
def predict(self, input_data):
results = []
for i in range(len(input_data)):
output = input_data[i]
for layer in [Link]:
output = layer.forward_propagation(output)
[Link](output)
return results
def fit(self, x_train, y_train, learning_rate, epochs):
for epoch in range(epochs):
err = 0
for x, y in zip(x_train, y_train):
# Forward propagation
output = x
for layer in [Link]:
output = layer.forward_propagation(output)
# Compute error for display
err += [Link](y, output)
# Backward propagation
error = self.loss_prime(y, output)
for layer in reversed([Link]):
error = layer.backward_propagation(error, learning_rate)
err /= len(x_train)
print(f'Epoch {epoch+1}/{epochs} error: {err}')
# Initialize and add layers to the network
net = Network()
[Link](FCLayer((1, 2), (1, 3)))
[Link](ActivationLayer((1, 3), (1, 3), relu, relu_prime))
[Link](FCLayer((1, 3), (1, 1)))
[Link](ActivationLayer((1, 1), (1, 1), relu, relu_prime))
# Set up the loss function
net.setup_loss(loss, loss_prime)
# Train the network
[Link](X_train, y_train, learning_rate=0.01, epochs=100)
# Predict using the network
out = [Link]([[4.8, 9.6]])
print(out)