Skip to content

Commit

Permalink
Merge pull request #31 from javidahmed64592/cleanup-nn
Browse files Browse the repository at this point in the history
NN v1.12.0 Remove Node and NodeConnection class
  • Loading branch information
javidahmed64592 authored Sep 12, 2024
2 parents 68cef67 + ed6dafe commit 8ffb53d
Show file tree
Hide file tree
Showing 7 changed files with 18 additions and 355 deletions.
30 changes: 3 additions & 27 deletions neural_network/neural_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from neural_network.math.matrix import Matrix
from neural_network.math.nn_math import calculate_error_from_expected, calculate_next_errors
from neural_network.nn.layer import HiddenLayer, InputLayer, Layer, OutputLayer
from neural_network.nn.node import NodeConnection


class NeuralNetwork:
Expand Down Expand Up @@ -66,10 +65,6 @@ def layers_reversed(self) -> list[Layer]:
def weights(self) -> list[Matrix]:
return np.array([layer.weights for layer in self.layers])

@property
def connection_weights(self) -> list[Matrix]:
return np.array([layer.connection_weights for layer in self.layers])

@weights.setter
def weights(self, new_weights: list[Matrix]) -> None:
for layer, weights in zip(self.layers, new_weights, strict=False):
Expand All @@ -84,11 +79,6 @@ def bias(self, new_bias: list[Matrix]) -> None:
for layer, bias in zip(self.layers, new_bias, strict=False):
layer.bias = bias

@property
def connections(self) -> list[NodeConnection]:
_connections = [node._node_connections for layer in self.layers[1:] for node in layer._nodes]
return np.array([nc for node in _connections for nc in node])

def save(self, filepath: str) -> None:
"""
Save neural network layer weights and biases to JSON file.
Expand All @@ -106,10 +96,9 @@ def save(self, filepath: str) -> None:
with open(filepath, "w") as file:
json.dump(_data, file)

def mutate(self, shift_vals: float, prob_new_node: float, prob_toggle_connection: float) -> None:
def mutate(self, shift_vals: float) -> None:
"""
Mutate NeuralNetwork Layers by adjusting weights and biases, and potentially adding new Nodes. NodeConnections
also get randomly toggled between active and inactive.
Mutate NeuralNetwork Layers by adjusting weights and biases, and potentially adding new Nodes.
Parameters:
shift_vals (float): Factor to adjust Layer weights and biases by
Expand All @@ -119,17 +108,6 @@ def mutate(self, shift_vals: float, prob_new_node: float, prob_toggle_connection
for layer in self.layers[1:]:
layer.mutate(shift_vals)

add_node_array = np.random.uniform(low=0, high=1, size=len(self._hidden_layers))
masked_layers = np.array(self._hidden_layers)[add_node_array < prob_new_node]
for layer in masked_layers:
layer._add_node()

connections = self.connections
toggle_connections_array = np.random.uniform(low=0, high=1, size=len(connections))
masked_connections = connections[toggle_connections_array < prob_toggle_connection]
for connection in masked_connections:
connection.toggle_active()

def feedforward(self, inputs: list[float]) -> list[float]:
"""
Feedforward a list of inputs.
Expand Down Expand Up @@ -195,9 +173,7 @@ def crossover(
new_biases = []

for index, layer in enumerate(self.layers[1:]):
new_weight = Matrix.mix_matrices(
nn.connection_weights[index], other_nn.connection_weights[index], self.connection_weights[index]
)
new_weight = Matrix.mix_matrices(nn.weights[index], other_nn.weights[index], self.weights[index])
new_weight = Matrix.mutated_matrix(new_weight, mutation_rate, layer._weights_range)
new_bias = Matrix.mix_matrices(nn.bias[index], other_nn.bias[index], self.bias[index])
new_bias = Matrix.mutated_matrix(new_bias, mutation_rate, layer._bias_range)
Expand Down
62 changes: 13 additions & 49 deletions neural_network/nn/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from neural_network.math import nn_math
from neural_network.math.activation_functions import ActivationFunction
from neural_network.math.matrix import Matrix
from neural_network.nn.node import InputNode, Node


class Layer:
Expand All @@ -31,58 +30,48 @@ def __init__(
"""
self._prev_layer: Layer = None
self._next_layer: Layer = None
self._nodes: list[Node] = []

self._size = size
self._activation = activation
self._weights_range = weights_range
self._bias_range = bias_range

self._weights: Matrix = None
self._bias: Matrix = None

@property
def size(self) -> int:
self._size = len(self._nodes)
return self._size

@property
def num_inputs(self) -> int:
return self._prev_layer.size

@property
def new_node(self) -> Node:
return Node.fully_connected(self._size, self._weights_range, self._bias_range, self._prev_layer._nodes)

@property
def weights(self) -> Matrix:
_weights = Matrix.from_array([node.weights for node in self._nodes])
return _weights
if not self._weights:
self._weights = Matrix.random_matrix(
self._size, self.num_inputs, self._weights_range[0], self._weights_range[1]
)
return self._weights

@weights.setter
def weights(self, new_weights: Matrix) -> None:
for index, node in enumerate(self._nodes):
node.weights = new_weights.vals[index]

@property
def connection_weights(self) -> Matrix:
_weights = Matrix.from_array([node.connection_weights for node in self._nodes])
return _weights
self._weights = new_weights

@property
def random_weight(self) -> float:
return np.random.uniform(low=self._weights_range[0], high=self._weights_range[1])

@property
def bias(self) -> Matrix:
_bias = Matrix.from_array([node._bias for node in self._nodes])
return _bias
if not self._bias:
self._bias = Matrix.random_column(self._size, self._bias_range[0], self._bias_range[1])
return self._bias

@bias.setter
def bias(self, new_bias: Matrix) -> None:
for index, node in enumerate(self._nodes):
node._bias = new_bias.vals[index][0]

def _create_nodes(self) -> None:
if not self._nodes:
self._nodes = [self.new_node for _ in range(self._size)]
self._bias = new_bias

def set_prev_layer(self, prev_layer: Layer) -> None:
"""
Expand All @@ -93,7 +82,6 @@ def set_prev_layer(self, prev_layer: Layer) -> None:
"""
self._prev_layer = prev_layer
prev_layer._next_layer = self
self._create_nodes()

def mutate(self, shift_vals: float) -> None:
"""
Expand Down Expand Up @@ -156,16 +144,11 @@ def __init__(
activation (ActivationFunction): Layer activation function
"""
super().__init__(size, activation, [1, 1], [0, 0])
self._nodes: list[InputNode] = [self.new_node for _ in range(self._size)]

@property
def num_inputs(self) -> int:
return 1

@property
def new_node(self) -> Node:
return InputNode(self._size)

def feedforward(self, vals: Matrix) -> Matrix:
"""
Set InputLayer values.
Expand Down Expand Up @@ -204,25 +187,6 @@ def __init__(
"""
super().__init__(size, activation, weights_range, bias_range)

def _add_node(self) -> None:
"""
Add a random Node to HiddenLayer.
"""
new_node = self.new_node
self._nodes.append(new_node)

for node in self._next_layer._nodes:
node.add_node(new_node, self._next_layer.random_weight)

def _toggle_connection(self) -> None:
"""
Toggle random Node between active and inactive.
"""
index = np.random.randint(low=0, high=self.size)

for node in self._next_layer._nodes:
node.toggle_node_connection(index)


class OutputLayer(Layer):
"""
Expand Down
160 changes: 0 additions & 160 deletions neural_network/nn/node.py

This file was deleted.

2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from setuptools import find_packages, setup

__version__ = "1.11"
__version__ = "1.12.0"

setup(
name="neural_network",
Expand Down
Loading

0 comments on commit 8ffb53d

Please sign in to comment.