import numpy as np from neural_net.activation_layers.activation_layer import ActivationLayer from neural_net.functions.activation import sigmoid_derivative_activation class SigmoidLayer(ActivationLayer): def __init__(self, input_dim, output_dim, weights=None, biases=None): super().__init__(input_dim, output_dim, weights, biases) self.subtype = 'Sigmoid' def initialize_weights(self): # Xavier initialization for sigmoid activation limit = np.sqrt(6 / (self.input_dim + self.output_dim)) self.weights = np.random.uniform(-limit, limit, (self.input_dim, self.output_dim)) def initialize_biases(self): self.biases = np.zeros((1, self.output_dim)) # Biases initialized to zero def activation(self, outputs: np.array): return sigmoid_derivative_activation(outputs) def activation_derivative(self, outputs: np.array): return sigmoid_derivative_activation(outputs)