From 21a726e134b2b6295655f5307726d88645db3dcf Mon Sep 17 00:00:00 2001 From: p4w2aybsf <2363061197@qq.com> Date: Thu, 29 Apr 2021 17:15:40 +0800 Subject: [PATCH] Add 'functions.py' --- functions.py | 61 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 functions.py diff --git a/functions.py b/functions.py new file mode 100644 index 0000000..ec02dd0 --- /dev/null +++ b/functions.py @@ -0,0 +1,61 @@ +# coding: utf-8 +import numpy as np + + +def identity_function(x): + return x + + +def step_function(x): + return np.array(x > 0, dtype=np.int) + + +def sigmoid(x): + return 1 / (1 + np.exp(-x)) + + +def sigmoid_grad(x): + return (1.0 - sigmoid(x)) * sigmoid(x) + + +def relu(x): + return np.maximum(0, x) + + +def relu_grad(x): + grad = np.zeros(x) + grad[x>=0] = 1 + return grad + + +def softmax(x): + if x.ndim == 2: + x = x.T + x = x - np.max(x, axis=0) + y = np.exp(x) / np.sum(np.exp(x), axis=0) + return y.T + + x = x - np.max(x) # 溢出对策 + return np.exp(x) / np.sum(np.exp(x)) + + +def mean_squared_error(y, t): + return 0.5 * np.sum((y-t)**2) + + +def cross_entropy_error(y, t): + if y.ndim == 1: + t = t.reshape(1, t.size) + y = y.reshape(1, y.size) + + # 监督数据是one-hot-vector的情况下,转换为正确解标签的索引 + if t.size == y.size: + t = t.argmax(axis=1) + + batch_size = y.shape[0] + return -np.sum(np.log(y[np.arange(batch_size), t] + 1e-7)) / batch_size + + +def softmax_loss(X, t): + y = softmax(X) + return cross_entropy_error(y, t)