From 11f8c306c89eb812b33f7ab9cad169c4c81dead4 Mon Sep 17 00:00:00 2001 From: ved-et9 <98445270+ved-et9@users.noreply.github.com> Date: Tue, 3 Oct 2023 12:38:05 +0530 Subject: [PATCH 1/2] Updated simple_neural_network.py --- neural_network/simple_neural_network.py | 179 ++++++++++++++++-------- 1 file changed, 119 insertions(+), 60 deletions(-) diff --git a/neural_network/simple_neural_network.py b/neural_network/simple_neural_network.py index f2a3234873b5..e61dbd223cbd 100644 --- a/neural_network/simple_neural_network.py +++ b/neural_network/simple_neural_network.py @@ -1,63 +1,122 @@ -""" -Forward propagation explanation: -https://towardsdatascience.com/forward-propagation-in-neural-networks-simplified-math-and-code-version-bbcfef6f9250 -""" - -import math -import random - - -# Sigmoid -def sigmoid_function(value: float, deriv: bool = False) -> float: - """Return the sigmoid function of a float. - - >>> sigmoid_function(3.5) - 0.9706877692486436 - >>> sigmoid_function(3.5, True) - -8.75 - """ - if deriv: - return value * (1 - value) - return 1 / (1 + math.exp(-value)) - - -# Initial Value -INITIAL_VALUE = 0.02 - -def forward_propagation(expected: int, number_propagations: int) -> float: - """Return the value found after the forward propagation training. - - >>> res = forward_propagation(32, 10000000) - >>> res > 31 and res < 33 - True - - >>> res = forward_propagation(32, 1000) - >>> res > 31 and res < 33 - False - """ - - # Random weight - weight = float(2 * (random.randint(1, 100)) - 1) - - for _ in range(number_propagations): - # Forward propagation - layer_1 = sigmoid_function(INITIAL_VALUE * weight) - # How much did we miss? - layer_1_error = (expected / 100) - layer_1 - # Error delta - layer_1_delta = layer_1_error * sigmoid_function(layer_1, True) - # Update weight - weight += INITIAL_VALUE * layer_1_delta - - return layer_1 * 100 - - -if __name__ == "__main__": - import doctest +""" +Simple Neural Network - doctest.testmod() +https://machinelearningmastery.com/implement-backpropagation-algorithm-scratch-python/ - expected = int(input("Expected value: ")) - number_propagations = int(input("Number of propagations: ")) - print(forward_propagation(expected, number_propagations)) +""" +from random import seed +from random import random +from math import exp + +#Initializing Network +def initialize_network(n_input,n_hidden,n_output): + network=list() + hidden_layer=[{'weights':[random() for i in range(n_input+1)]} for i in range(n_hidden)] + network.append(hidden_layer) + output_layer=[{'weights':[random() for i in range(n_hidden+1)]} for i in range(n_output)] + network.append(output_layer) + return network + + + +# Forward Propagate + # 1.Neuron Activation. + # 2.Neuron Transfer. + # 3.Forward Propagation. + +# Neuron activation is calculated as the weighted sum of the inputs +def activate(weights,inputs): + activation=weights[-1] + for i in range(len(weights)-1): + activation+=weights[i]*inputs[i] + return activation +def transfer(activation): + return 1.0/(1.0+exp(-activation)) + + +def forward_propogate(network,row): + inputs=row + for layer in network: + new_inputs=[] + for neuron in layer: + activation=activate(neuron['weights'],inputs) + neuron['output']=transfer(activation) + new_inputs.append(neuron['output']) + inputs=new_inputs + + return inputs + + + +#Back Propagation + # 1.Transfer Derivative. + # 2.Error Backpropagation. +def transfer_derivative(output): + return output*(1.0-output) + + +def back_propogate_error(network,expected): + for i in reversed(range(len(network))): + layer=network[i] + errors=list() + + if i != len(network)-1: + for j in range(len(layer)): + error=0.0 + for neuron in network[i+1]: + error += (neuron['weights'][j]*neuron['delta']) + errors.append(error) + else: + for j in range(len(layer)): + neuron=layer[j] + errors.append(neuron['output']-expected[j]) + + for j in range(len(layer)): + neuron=layer[j] + neuron['delta']=errors[j]*transfer_derivative(neuron['output']) + +# Once errors are calculated for each neuron in the network via the back propagation method above, +# they can be used to update weights. +def update_weights(network, row, l_rate): + for i in range(len(network)): + inputs = row[:-1] + if i != 0: + inputs = [neuron['output'] for neuron in network[i - 1]] + for neuron in network[i]: + for j in range(len(inputs)): + neuron['weights'][j] -= l_rate * neuron['delta'] * inputs[j] + neuron['weights'][-1] -= l_rate * neuron['delta'] + + +##Training + +def train_network(network, train, l_rate, n_epoch, n_outputs): + for epoch in range(n_epoch): + sum_error = 0 + for row in train: + outputs = forward_propogate(network, row) + expected = [0 for i in range(n_outputs)] + expected[row[-1]] = 1 + sum_error += sum([(expected[i]-outputs[i])**2 for i in range(len(expected))]) + back_propogate_error(network, expected) + update_weights(network, row, l_rate) + print('>epoch=%d, lrate=%.3f, error=%.3f' % (epoch, l_rate, sum_error)) + +seed(1) +dataset = [[2.7810836,2.550537003,0], + [1.465489372,2.362125076,0], + [3.396561688,4.400293529,0], + [1.38807019,1.850220317,0], + [3.06407232,3.005305973,0], + [7.627531214,2.759262235,1], + [5.332441248,2.088626775,1], + [6.922596716,1.77106367,1], + [8.675418651,-0.242068655,1], + [7.673756466,3.508563011,1]] +n_inputs = len(dataset[0]) - 1 +n_outputs = len(set([row[-1] for row in dataset])) +network = initialize_network(n_inputs, 2, n_outputs) +train_network(network, dataset, 0.7, 30, n_outputs) +for layer in network: + print(layer) From b1411610eaff9ddf956256ae5c409f1398095a50 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 3 Oct 2023 07:25:23 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- neural_network/simple_neural_network.py | 170 +++++++++++++----------- 1 file changed, 91 insertions(+), 79 deletions(-) diff --git a/neural_network/simple_neural_network.py b/neural_network/simple_neural_network.py index e61dbd223cbd..cbe649ce48b3 100644 --- a/neural_network/simple_neural_network.py +++ b/neural_network/simple_neural_network.py @@ -1,4 +1,3 @@ - """ Simple Neural Network @@ -9,114 +8,127 @@ from random import random from math import exp -#Initializing Network -def initialize_network(n_input,n_hidden,n_output): - network=list() - hidden_layer=[{'weights':[random() for i in range(n_input+1)]} for i in range(n_hidden)] - network.append(hidden_layer) - output_layer=[{'weights':[random() for i in range(n_hidden+1)]} for i in range(n_output)] - network.append(output_layer) - return network +# Initializing Network +def initialize_network(n_input, n_hidden, n_output): + network = list() + hidden_layer = [ + {"weights": [random() for i in range(n_input + 1)]} for i in range(n_hidden) + ] + network.append(hidden_layer) + output_layer = [ + {"weights": [random() for i in range(n_hidden + 1)]} for i in range(n_output) + ] + network.append(output_layer) + return network # Forward Propagate - # 1.Neuron Activation. - # 2.Neuron Transfer. - # 3.Forward Propagation. +# 1.Neuron Activation. +# 2.Neuron Transfer. +# 3.Forward Propagation. + # Neuron activation is calculated as the weighted sum of the inputs -def activate(weights,inputs): - activation=weights[-1] - for i in range(len(weights)-1): - activation+=weights[i]*inputs[i] - return activation -def transfer(activation): - return 1.0/(1.0+exp(-activation)) +def activate(weights, inputs): + activation = weights[-1] + for i in range(len(weights) - 1): + activation += weights[i] * inputs[i] + return activation + +def transfer(activation): + return 1.0 / (1.0 + exp(-activation)) -def forward_propogate(network,row): - inputs=row - for layer in network: - new_inputs=[] - for neuron in layer: - activation=activate(neuron['weights'],inputs) - neuron['output']=transfer(activation) - new_inputs.append(neuron['output']) - inputs=new_inputs - return inputs +def forward_propogate(network, row): + inputs = row + for layer in network: + new_inputs = [] + for neuron in layer: + activation = activate(neuron["weights"], inputs) + neuron["output"] = transfer(activation) + new_inputs.append(neuron["output"]) + inputs = new_inputs + return inputs -#Back Propagation - # 1.Transfer Derivative. - # 2.Error Backpropagation. +# Back Propagation +# 1.Transfer Derivative. +# 2.Error Backpropagation. def transfer_derivative(output): - return output*(1.0-output) + return output * (1.0 - output) + +def back_propogate_error(network, expected): + for i in reversed(range(len(network))): + layer = network[i] + errors = list() -def back_propogate_error(network,expected): - for i in reversed(range(len(network))): - layer=network[i] - errors=list() + if i != len(network) - 1: + for j in range(len(layer)): + error = 0.0 + for neuron in network[i + 1]: + error += neuron["weights"][j] * neuron["delta"] + errors.append(error) + else: + for j in range(len(layer)): + neuron = layer[j] + errors.append(neuron["output"] - expected[j]) - if i != len(network)-1: - for j in range(len(layer)): - error=0.0 - for neuron in network[i+1]: - error += (neuron['weights'][j]*neuron['delta']) - errors.append(error) - else: - for j in range(len(layer)): - neuron=layer[j] - errors.append(neuron['output']-expected[j]) + for j in range(len(layer)): + neuron = layer[j] + neuron["delta"] = errors[j] * transfer_derivative(neuron["output"]) - for j in range(len(layer)): - neuron=layer[j] - neuron['delta']=errors[j]*transfer_derivative(neuron['output']) # Once errors are calculated for each neuron in the network via the back propagation method above, # they can be used to update weights. def update_weights(network, row, l_rate): - for i in range(len(network)): - inputs = row[:-1] - if i != 0: - inputs = [neuron['output'] for neuron in network[i - 1]] - for neuron in network[i]: - for j in range(len(inputs)): - neuron['weights'][j] -= l_rate * neuron['delta'] * inputs[j] - neuron['weights'][-1] -= l_rate * neuron['delta'] + for i in range(len(network)): + inputs = row[:-1] + if i != 0: + inputs = [neuron["output"] for neuron in network[i - 1]] + for neuron in network[i]: + for j in range(len(inputs)): + neuron["weights"][j] -= l_rate * neuron["delta"] * inputs[j] + neuron["weights"][-1] -= l_rate * neuron["delta"] ##Training + def train_network(network, train, l_rate, n_epoch, n_outputs): - for epoch in range(n_epoch): - sum_error = 0 - for row in train: - outputs = forward_propogate(network, row) - expected = [0 for i in range(n_outputs)] - expected[row[-1]] = 1 - sum_error += sum([(expected[i]-outputs[i])**2 for i in range(len(expected))]) - back_propogate_error(network, expected) - update_weights(network, row, l_rate) - print('>epoch=%d, lrate=%.3f, error=%.3f' % (epoch, l_rate, sum_error)) + for epoch in range(n_epoch): + sum_error = 0 + for row in train: + outputs = forward_propogate(network, row) + expected = [0 for i in range(n_outputs)] + expected[row[-1]] = 1 + sum_error += sum( + [(expected[i] - outputs[i]) ** 2 for i in range(len(expected))] + ) + back_propogate_error(network, expected) + update_weights(network, row, l_rate) + print(">epoch=%d, lrate=%.3f, error=%.3f" % (epoch, l_rate, sum_error)) + seed(1) -dataset = [[2.7810836,2.550537003,0], - [1.465489372,2.362125076,0], - [3.396561688,4.400293529,0], - [1.38807019,1.850220317,0], - [3.06407232,3.005305973,0], - [7.627531214,2.759262235,1], - [5.332441248,2.088626775,1], - [6.922596716,1.77106367,1], - [8.675418651,-0.242068655,1], - [7.673756466,3.508563011,1]] +dataset = [ + [2.7810836, 2.550537003, 0], + [1.465489372, 2.362125076, 0], + [3.396561688, 4.400293529, 0], + [1.38807019, 1.850220317, 0], + [3.06407232, 3.005305973, 0], + [7.627531214, 2.759262235, 1], + [5.332441248, 2.088626775, 1], + [6.922596716, 1.77106367, 1], + [8.675418651, -0.242068655, 1], + [7.673756466, 3.508563011, 1], +] n_inputs = len(dataset[0]) - 1 n_outputs = len(set([row[-1] for row in dataset])) network = initialize_network(n_inputs, 2, n_outputs) train_network(network, dataset, 0.7, 30, n_outputs) for layer in network: - print(layer) + print(layer)