1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125
| import numpy as np import scipy.special
class NeuralNetwork: def __init__(self, input_nodes, hidden_nodes, output_nodes, learning_rate): self.inodes = input_nodes self.hnodes = hidden_nodes self.onodes = output_nodes
self.lr = learning_rate
self.wih = np.random.normal(0.0, pow(self.hnodes, -0.5), (self.hnodes, self.inodes)) self.who = np.random.normal(0.0, pow(self.onodes, -0.5), (self.onodes, self.hnodes))
self.activation_function = lambda x: scipy.special.expit(x)
def train(self, input_list, target_list): inputs = np.array(input_list, ndmin=2).T targets = np.array(target_list, ndmin=2).T
hidden_inputs = np.dot(self.wih, inputs) hidden_outputs = self.activation_function(hidden_inputs)
final_inputs = np.dot(self.who, hidden_outputs) final_outputs = self.activation_function(final_inputs)
output_errors = targets - final_outputs hidden_errors = np.dot(self.who.T, output_errors)
self.who += self.lr * np.dot(output_errors * final_outputs * (1.0 - final_outputs), np.transpose(hidden_outputs)) self.wih += self.lr * np.dot(hidden_errors * hidden_outputs * (1.0 - hidden_outputs), np.transpose(inputs))
def query(self, input_list): inputs = np.array(input_list, ndmin=2).T
hidden_inputs = np.dot(self.wih, inputs) hidden_outputs = self.activation_function(hidden_inputs)
final_inputs = np.dot(self.who, hidden_outputs) final_outputs = self.activation_function(final_inputs)
return final_outputs
if __name__ == '__main__': input_nodes = 784 hidden_nodes = 100 output_nodes = 10
learning_rate = 0.2
nn = NeuralNetwork(input_nodes, hidden_nodes, output_nodes, learning_rate)
training_data_file = open('mnist_train.csv', 'r') training_data_list = training_data_file.readlines() training_data_file.close()
epochs = 5 for e in range(epochs): for record in training_data_list: all_values = record.split(',') inputs = np.asfarray(all_values[1:]) / 255 * 0.99 + 0.01
targets = np.zeros(output_nodes) + 0.01 targets[int(all_values[0])] = 0.99 nn.train(inputs, targets)
test_data_file = open('mnist_test.csv', 'r') test_data_list = test_data_file.readlines() test_data_file.close() scorecard = [] for record in test_data_list: all_values = record.split(',') correct_label = int(all_values[0]) inputs = np.asfarray(all_values[1:]) / 255 * 0.99 + 0.01 outputs = nn.query(inputs) label = np.argmax(outputs) scorecard.append(1 if label == correct_label else 0) scorecard_array = np.array(scorecard) print('performance =', scorecard_array.sum() / scorecard_array.size)
|