Merge pull request '初始文件同步' (#1) from ready into master

master
pl5g92irw 3 years ago
commit db09feff98

124
nn.py

@ -0,0 +1,124 @@
import numpy as np
# Create a neural net
def create_neural_net(layer_array, input_dims):
weights = []
biases = []
activations = []
for i in range(len(layer_array)):
node_num = layer_array[i][0]
weights_of_layer = []
biases_of_layer = []
if i == 0:
last_layer_node_number = input_dims
else:
last_layer_node_number = layer_array[i - 1][0]
for n in range(0, node_num):
weights_of_node = []
for l in range(0, last_layer_node_number):
weights_of_node.append(1)
weights_of_layer.append(weights_of_node)
biases_of_layer.append(0)
weights.append(weights_of_layer)
biases.append(biases_of_layer)
activations.append(layer_array[i][1])
return [weights, biases, activations]
# Activations
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def sigmoid_deriv(x):
return x * (1 - x)
def relu(x):
if x < 0:
return 0
else:
return x
# prediction
def predict_ratio(data, neural_net):
weights = neural_net[0]
biases = neural_net[1]
activations = neural_net[2]
layer_num = len(weights)
for l in range(0, layer_num):
data = np.dot(weights[l], data)
for t in range(len(data)):
data[t] += biases[l][t]
if activations[l] == 'sigmoid':
data = sigmoid(data)
elif activations[l] == 'relu':
data = relu(data)
else:
# If not identified, do it with sigmoid
data = sigmoid(data)
print('activation function', activations[l], 'cannot be found. Sigmoid is used')
return data
def predict(data, neural_net):
data = predict_ratio(data, neural_net)
class_num = len(data)
highest_class = None
highest_class_probability = -1
for i in range(0, class_num):
if highest_class == None:
highest_class = i
highest_class_probability = data[i]
elif data[i] > highest_class_probability:
highest_class = i
highest_class_probability = data[i]
return highest_class, highest_class_probability
# Training
def train_network(X, Y, labels, neural_net, epochs=1000):
for epoch in range(0, epochs):
for d in range(0, len(X)):
prediction = predict_ratio(X[d], neural_net)
# Calculate total error per label
true_prediction = []
for i in range(0, len(labels)):
true_prediction.append(0)
true_prediction[labels.index(Y[d])] = 1
errors = []
for t in range(len(prediction)):
errors.append(true_prediction[t] - prediction[t])
adjust_deriv = errors * sigmoid_deriv(prediction)
for k in range(0, len(adjust_deriv)):
adjustment = np.dot(X[d], adjust_deriv[k])
neural_net[0][0][k] += adjustment
return neural_net
if __name__ == '__main__':
X = [[1, 1, 0, 0, 1, 1, 1, 1], [1, 1, 0, 0, 0, 1, 1, 1], [1, 0, 0, 0, 1, 1, 1, 1], [0, 1, 1, 1, 0, 0, 0, 1], [0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 1, 1, 0, 0, 0]]
Y = [1, 1, 1, 0, 0, 0]
labels = [0, 1]
layer_array = [[len(labels), 'sigmoid']]
input_dims = 8
neural_net = create_neural_net(layer_array, input_dims)
print('weights:', neural_net[0], '\nbiases:', neural_net[1], '\nactivations:', neural_net[2])
neural_net = train_network(X, Y, labels, neural_net, epochs=1000)
for i in range(len(X)):
print(predict(X[i], neural_net))

@ -0,0 +1,25 @@
//Task version
task automatic sigmoid_deriv_task;
input [7:0] x;
output [7:0] y;
begin
y = x * (1-x);
end
endtask
//Function version
function automatic [7:0] sigmoid_deriv_func;
input signed [7:0] x;
begin
sigmoid_deriv_func = x * (1-x);
end
endfunction
function automatic [7:0] sigmoid;
input signed [7:0] x;
begin
sigmoid = 1 / (1+ 2.71828**(-x));
end
endfunction

@ -0,0 +1,41 @@
module main;
`include "tasks.v"
// global vars
reg [7:0] a [0:3];
reg [7:0] b [0:3] [0:3];
reg signed [7:0] i = 2;
reg signed [7:0] j;
reg [8*14:1] string_test = "ad";
// Include all tasks
// main()
initial begin
for (i=0; i<=3; i++) begin
sigmoid_deriv_task(i,j); // Demonstrating task in for loop
$display(j);
end
for (i=0; i<=3; i++) begin
j = sigmoid_deriv_func(i); // Demonstrating function in for loop
$display(j);
end
for (i=0; i<=3; i++) begin
for (j=0; j<=3; j++) begin // Creating 4D array
b[i][j] = i*j;
end
end
j = 2
i = sigmoid(j)
for (i=0; i<=3; i++) begin
for (j=0; j<=3; j++) begin
$write(b[i][j]);
end
$display();
end
end
endmodule
Loading…
Cancel
Save