def sigmoid(X): # define activation: sigmoid
output = 1 / (1 + np.exp(-X))
return output
def sigmoid_gradient(X):
output = sigmoid(X)*(1-sigmoid(X))
return output
def softmax(X): # define activation: softmax
return np.exp(X) / np.sum(np.exp(X), axis=1, keepdims=True)
def cross_entropy(p, q):
epsilon = 1e-15
H = 0
for i in range(len(p)):
H += -p[i]*np.log(q[i]+epsilon)
H = H.sum()/p.shape[0]
return H
from tensorflow import keras
# 做 One-hot encoding
y = keras.utils.to_categorical(array)
沒有留言:
張貼留言