25'ten fazla konu seçemezsiniz Konular bir harf veya rakamla başlamalı, kısa çizgiler ('-') içerebilir ve en fazla 35 karakter uzunluğunda olabilir.

28 satır
988B

  1. import numpy as np
  2. def cross_entropy_loss(outputs, targets, clip=True):
  3. """
  4. outputs: [
  5. [ 0.32, 0.12, 0.04 ],
  6. [ 0.62, 0.02, 0.14 ]
  7. ]
  8. targets: [ 2, 1 ]
  9. :param outputs: np.array: Vector of all the predicted probabilities vectors
  10. :param targets: np.array: Vector of one-hot vectors representing the actual values
  11. :param clip: boolean, whether to clip the output probabilities
  12. :return:
  13. """
  14. if clip:
  15. # Clipping the predictions for numerical stability
  16. outputs = np.clip(outputs, 1e-12, 1 - 1e-12)
  17. # Calculate cross-entropy loss and average over batch size
  18. m = targets.shape[0]
  19. log_likelihood = -np.log(outputs[range(m), targets])
  20. return np.sum(log_likelihood) / m # Average loss
  21. def cross_entropy_derivative_loss(outputs, targets):
  22. # One-hot encode the labels
  23. y_true = np.eye(outputs.shape[1])[targets]
  24. # Derivative of cross-entropy with respect to softmax inputs
  25. return outputs - y_true