|
- import numpy as np
-
- def relu_activation(outputs):
- return np.maximum(0, outputs)
-
- def relu_derivative_activation(outputs):
- return np.where(outputs > 0, 1, 0)
-
- def sigmoid_activation(outputs):
- return 1 / (1 + np.exp(-outputs))
-
- def sigmoid_derivative_activation(outputs):
- return outputs * (1 - outputs)
|