diff --git a/NeuralNetwork.py b/NeuralNetwork.py new file mode 100644 index 0000000..c8f3b0a --- /dev/null +++ b/NeuralNetwork.py @@ -0,0 +1,53 @@ +import numpy as np +import random + +class NeuralNetwork: + def __init__(self, layout, activation): + self.activation = activation + self.layout = layout + self.weights = [[[random.random()*2-1 for connection in range(layout[layer-1])] for neuron in range(layout[layer])] for layer in range(1, len(layout))] + self.biases = [[ random.random()*2-1 for neuron in range(layout[layer])] for layer in range(1, len(layout))] + + def toString(self): + return "weights:\n" + str(self.weights) + "\nbiases:\n" + str(self.biases) + + def getActivations(self, inputs): + lastLayer = inputs + result = [] + for layer in range(len(self.weights)): + lastLayer = [ self.activation(self.biases[layer][neuron]+sum( + [ self.weights[layer][neuron][connection]*lastLayer[connection] for connection in range(len(self.weights[layer][0])) ] ) ) + for neuron in range(len(self.weights[layer])) ] + result.append(lastLayer) + return result + + def evaluate(self, inputs): + return self.getActivations(inputs)[-1] + + def fit(self, data, trainingSpeed): + weightAdjust=[[[0 for connection in range(len(self.weights[layer][neuron]))] for neuron in range(len(self.weights[layer]))] for layer in range(len(self.weights))] + biasAdjust = [[0 for neuron in range(len(self.weights[layer]))] for layer in range(len(self.weights))] + error = 0 + for dataEntry in data: + activations = [ dataEntry[0] ] + self.getActivations(dataEntry[0]) + optimum = dataEntry[1] + adjust = [ optimum[i] - activations[-1][i] for i in range(len(optimum)) ] + error += sum([ x**2 for x in adjust ]) + print(activations, adjust) + for layerNr in range(len(self.weights)): # backpropagation + layer = len(self.weights) - layerNr - 1 + newAdjust = [0] * len(self.weights[layer][0]) + for neuron in range(len(self.weights[layer])): + for connection in range(len(self.weights[layer][neuron])): + weightAdjust[layer][neuron][connection] += adjust[neuron] * activations[layer][connection] + newAdjust[connection] += activations[layer][connection] * adjust[neuron] + biasAdjust[layer][neuron] += adjust[neuron] + adjust = newAdjust + maxAdjust = max( + max(max(max(weightAdjust))), + max(max(biasAdjust))) + self.weights = [[[self.weights[layer][neuron][connection] + weightAdjust[layer][neuron][connection] / maxAdjust * trainingSpeed \ + for connection in range(len(self.weights[layer][neuron]))] for neuron in range(len(self.weights[layer]))] for layer in range(len(self.weights))] + self.biases = [[ self.biases[layer][neuron] + biasAdjust[layer][neuron] / maxAdjust * trainingSpeed \ + for neuron in range(len(self.biases[layer]))] for layer in range(len(self.biases))] + return error diff --git a/NeuralNetwork.py b/NeuralNetwork.py new file mode 100644 index 0000000..c8f3b0a --- /dev/null +++ b/NeuralNetwork.py @@ -0,0 +1,53 @@ +import numpy as np +import random + +class NeuralNetwork: + def __init__(self, layout, activation): + self.activation = activation + self.layout = layout + self.weights = [[[random.random()*2-1 for connection in range(layout[layer-1])] for neuron in range(layout[layer])] for layer in range(1, len(layout))] + self.biases = [[ random.random()*2-1 for neuron in range(layout[layer])] for layer in range(1, len(layout))] + + def toString(self): + return "weights:\n" + str(self.weights) + "\nbiases:\n" + str(self.biases) + + def getActivations(self, inputs): + lastLayer = inputs + result = [] + for layer in range(len(self.weights)): + lastLayer = [ self.activation(self.biases[layer][neuron]+sum( + [ self.weights[layer][neuron][connection]*lastLayer[connection] for connection in range(len(self.weights[layer][0])) ] ) ) + for neuron in range(len(self.weights[layer])) ] + result.append(lastLayer) + return result + + def evaluate(self, inputs): + return self.getActivations(inputs)[-1] + + def fit(self, data, trainingSpeed): + weightAdjust=[[[0 for connection in range(len(self.weights[layer][neuron]))] for neuron in range(len(self.weights[layer]))] for layer in range(len(self.weights))] + biasAdjust = [[0 for neuron in range(len(self.weights[layer]))] for layer in range(len(self.weights))] + error = 0 + for dataEntry in data: + activations = [ dataEntry[0] ] + self.getActivations(dataEntry[0]) + optimum = dataEntry[1] + adjust = [ optimum[i] - activations[-1][i] for i in range(len(optimum)) ] + error += sum([ x**2 for x in adjust ]) + print(activations, adjust) + for layerNr in range(len(self.weights)): # backpropagation + layer = len(self.weights) - layerNr - 1 + newAdjust = [0] * len(self.weights[layer][0]) + for neuron in range(len(self.weights[layer])): + for connection in range(len(self.weights[layer][neuron])): + weightAdjust[layer][neuron][connection] += adjust[neuron] * activations[layer][connection] + newAdjust[connection] += activations[layer][connection] * adjust[neuron] + biasAdjust[layer][neuron] += adjust[neuron] + adjust = newAdjust + maxAdjust = max( + max(max(max(weightAdjust))), + max(max(biasAdjust))) + self.weights = [[[self.weights[layer][neuron][connection] + weightAdjust[layer][neuron][connection] / maxAdjust * trainingSpeed \ + for connection in range(len(self.weights[layer][neuron]))] for neuron in range(len(self.weights[layer]))] for layer in range(len(self.weights))] + self.biases = [[ self.biases[layer][neuron] + biasAdjust[layer][neuron] / maxAdjust * trainingSpeed \ + for neuron in range(len(self.biases[layer]))] for layer in range(len(self.biases))] + return error diff --git a/main.py b/main.py new file mode 100644 index 0000000..ac9be70 --- /dev/null +++ b/main.py @@ -0,0 +1,31 @@ +from NeuralNetwork import NeuralNetwork +import numpy as np +layout = [3, 2] + +def cap(x): + return min(1, max(-1, x)) + +def sigmoid(x): + return 1 / (1 + np.exp(x)) + +NN = NeuralNetwork(layout, sigmoid) + +data = [ + [[0, 0, 0], [0, 0]], + [[0, 0, 1], [0, 1]], + [[0, 1, 0], [1, 0]], + [[0, 1, 1], [1, 1]], + [[1, 0, 0], [0, 0]], + [[1, 0, 1], [0, 1]], + [[1, 1, 0], [1, 0]], +] + +# NN.train(data) + +print(NN.toString()) +print("") +# print(NN.evaluate(data[0][0])) +for i in range(10): + print(NN.fit(data, (10-i) / 20)) +print(NN.evaluate([1, 1, 1])) +print(NN.toString())