[Deep Learning] 활성화 함수 구현
Sigmoid import numpy as np def sigmoid(x): return 1/(1+np.exp(-x)) sigmoid(4) 0.9820137900379085 import matplotlib.pyplot as plt x = np.arange(-10, 10, 0.01) y = sigmoid(x) plt.plot(x, y) ReLU def relu(x): return np.maximum(0, x) x = np.arange(-10, 10, 0.01) y = relu(x) plt.plot(x, y) Softmax def origin_softmax(x): f_x = np.exp(x) / np.sum(np.exp(x)) return f_x x = np.array([1.3, 5.1, 2.2, 0.7, ..
2022. 11. 17.