danielwilczak101 / EasyNN

EasyNN is a python package designed to provide an easy-to-use Neural Network. The package is designed to work right out of the box, while also allowing the user to customize features as they see fit.
MIT License
5 stars 2 forks source link

overall code structure #1

Closed NathanRoseCE closed 3 years ago

danielwilczak101 commented 3 years ago

User Interaction

from EasyNN import DeepNeuralNetwork
from EasyNN.examples import mnist

# model = DeepNeuralNetwork()

# model.train()

image = random.choice(mnist.testing_images)  # random image

mnist.show(image)  # mnist knows how to show it

# model(image)
# tell me what the probabilities are

Output:

1: 10%
2:0%
3:20% 
etc

Class structure

NN()

from examples import *

class NN:
"""Main function that does all the work for the basic neural network. This
class will be the main class that will be called by the user. Wiki link
https://github.com/danielwilczak101/EasyNN/wiki/Basic-Neural-Network---NN()"""

  def __init__(self):
    self.layer_list = []
    # Dont know whether to use mnist example or binary example
    self.training_data = examples.random

DeepNN()


from example import *

class DeepNN:
"""Main function that does all the work for the deep neural network. This
class will be the main class that will be called by the user.Wiki link reference:
https://github.com/danielwilczak101/EasyNN/wiki/Deep-Neural-Network---DeepNN()"""

  def __init__(self):
    self.layer_list    = []
    # Dont know whether to use mnist example or binary example
    self.training_data = example.mnist.training
    self.output_data   = example.mnist.output

DeepNN - class layer

class layer:
"""This class will handle the construction of all the layer with all the
different types"""    
NathanRoseCE commented 3 years ago

Neural Network

class NN:
""" A Neural network that is useful in cases where it is not deep and you cannot leverage the linear algebra as much, hoewver in return for this, you get greater flexibility in the model"""
    def __init__(self, numInputs=someDefault, numOutput=someOtherDefault, nodes = someDefaultList):
        self.nodes = nodes
        #not sure how you would designate input nodes or link input to non-randomized values

Node Class

class Node:
    """This class is for the non-dense neural network where you need to determine the inputs and whatnot
    def __init__(self, preDefinedInputs=[], activationFunction=ReLU):
          # activation function is a clas
          #randomize inputs if none listed
          self.inputs = inputs
          self.activationFuntion = activationFunction

ActivationFunction

class activationMethods(Enum.enum):
     RELU,
     someOtherOnesImTired

class ActivationFunction:
    """ does the activationMethod """

    def __init__(self, activationMethod=RELU):
        self.method = activationMethod

    def forward(self, inputs):
        #forard pass

    def backward(self, values):
         #backward pass

Dense Neural Network

class DenseNN:
"""Dense Neural Network for that sweet sweet efficency"""

    def __init__(self, numInputs=someDefault, numOutputs=NumOutputs, numLayers=default, numNodesInLayer=default):
         layers = #some setup stuff

    def input(self, values):
         #returns NN output

Layer Class

class Layer:
    def __init__(self, numNodes, activationFunction):
        #set up all those weights and biases
    def forward(self, values):
        #forward pass
    def backward(self, values):
         #backpropigation
     def optimize(self)
         #will have to think more about how to do this

Optimizer

class Optimizer:
""" Optimizer can probably be common like activation function
    def __init__(self, ):
        #set up all those weights and biases
    def forward(self, values):
        #forward pass
    def backward(self, values):
         #backpropigation
     def optimize(self)
         #will have to think more about how to do this
SimpleArt commented 3 years ago

Use Case/Example

General Neural Network

Visual representation of unstructured neural network: (maybe we could add visual constructions as a web app sorta thing, for both this and the layered NN)






















from EasyNN import NeuralNetwork as NN
from EasyNN.activation_functions import logistic

model = NN(
    # using dict only to make indexes clearer
    nodes = {
        # input nodes connect to nothing
        0: [],
        1: [],
        2: [],
        3: [],
        # hidden nodes
        4: [0, 1, 2],
        5: [1, 3, 4],
        6: [0, 2, 4, 5],
        7: [4, 5, 6],
        # output nodes are not in any connections
        8: [4, 5, 6, 7],
        9: [4, 6],
        10: [4, 5, 7],
        11: [4, 7],
    },
    # using dict to only specify last 4 nodes
    activation_function = {
        # output is between 0 and 1
        8: logistic,
        9: logistic,
        10: logistic,
        11: logistic,
    },
)

model.train(  # or model.fit(...)
    # inputs
    x = [[0, 0, 0, 0],
         [0, 1, 0, 1],
         [1, 1, 1, 0],
         [0, 0, 1, 1]],
    # outputs
    y = [[0, 0, 0, 0],
         [1, 0, 1, 0],
         [1, 0, 0, 1],
         [1, 1, 1, 0]],
)

print(model([1, 1, 0, 0]))  # the output when given [1, 1, 0, 0]

DeepNN

from EasyNN import DeepNN as DNN
from EasyNN.activation_functions import logistic

model = DNN(
    layers = [4, 3, 3, 4],
    activation_function = {3: logistic},
)

model.train(  # or model.fit(...)
    # inputs
    x = [[0, 0, 0, 0],
         [0, 1, 0, 1],
         [1, 1, 1, 0],
         [0, 0, 1, 1]],
    # outputs
    y = [[0, 0, 0, 0],
         [1, 0, 1, 0],
         [1, 0, 0, 1],
         [1, 1, 1, 0]],
)

print(model([1, 1, 0, 0]))  # the output when given [1, 1, 0, 0]

Classes

Neural Network

class NeuralNetwork:
    """General NN structure which allows greater flexibility (and neuroevolution?)."""
    nodes: Sequence[NeuralNode]

    def __init__(self, nodes: Sequence[int], activation: Dict[int, Callable[[float], float]] = {}):
        """Convert input integers to actual nodes, with activation functions appropriately."""
        pass

    def __call__(self, input_values: Sequence[float], pad: float = 0) -> Sequence[float]:
        """
        Fill in node.value's with the input_values,
        fill in remaining nodes with the pad value,
        perform feed-forward propagation,
        and return the node.value's from the output_nodes.
        """
        pass

    # Possibly add @properties for getting input/output nodes

Neural Node

class NeuralNode:
    """General Neural Node structure which allows greater flexibility (and neuroevolution?)."""
    value: float  # scalar
    bias: float  # scalar
    activation_function: Callable[[float], float]
    nodes: Sequence[NeuralNode]  # vector
    weights: Sequence[float]  # vector
    # for back_propogation
    value_change: Gradient  # scalar
    bias_change: Gradient  # scalar
    weights_change: Gradient  # vector

    def __init__(
        self,
        bias: float = None,
        activation_function: Callable[[float], float] = lambda x: x,  # no change
        nodes: Sequence[NeuralNode] = (,)
    ):
        """
        Initialize with float (random by default),
        activation function (no change by default),
        and connected nodes (no connections by default)
        and random weights.
        Initialize change to 0.
        """
        pass

    def __mul__(self, scalar: float) -> float:
        """
        Multiply self.value by the given scalar and return the result.
        Useful for easy numpy dot products.
        """
        return self.value * scalar

    def __rmul__(self, scalar: float) -> float:
        """
        Multiply self.value by the given scalar and return the result.
        Useful for easy numpy dot products.
        """
        return self.value * scalar

    def feed_forward(self):
        """Uses connected nodes to compute self.value."""
        pass

    def back_propogate(self):
        """
        Modify change attributes to more closely match value_change.
        Modify nodes.value_change to more closely match value_change.
        Modify respective attributes based on their attr_change.
        """
        pass

DeepNN

class DeepNN:
    """Layered NN structure, which allows more efficient operations."""
    layers: Sequence[NeuralLayer]

    def __init__(self, layers: Sequence[int], activation: Dict[int, Callable[[float], float]] = {}):
        """Convert input integers to actual layers, with activation functions appropriately."""
        pass

    def __call__(self, input_values: Sequence[float], pad: float = 0) -> Sequence[float]:
        """
        Fill in the first layer with the input_values,
        fill in remaining nodes/layers with the pad value,
        perform feed-forward propagation,
        and return the last layer's values.
        """
        pass

    # Possibly add @properties for getting input/output nodes for consistency

NeuralLayer

class NeuralLayer:
    """General Neural Node structure which allows greater flexibility (and neuroevolution?)."""
    values: Sequence[float]  # vector
    bias: Sequence[float]  # vector
    activation_function: Callable[[float], float]
    nodes: Sequence[NeuralNode]  # vector
    weights: Sequence[Sequence[float]]  # matrix
    # for back_propogation
    value_change: Gradient  # vector
    bias_change: Gradient  # vector
    weights_change: Gradient  # matrix

    def __init__(
        self,
        bias: float = None,
        activation_function: Callable[[float], float] = lambda x: x,  # no change
        previous_layer: Sequence[NeuralNode] = (,),
    ):
        """
        Initialize with float (random by default),
        activation function (no change by default),
        and previous_layer (empty layer by default)
        with random weights.
        Initialize change to 0 vector.
        """
        pass

    def __getitem__(self, index):
        """Returns indexed value."""
        pass

    def __iter__(self):
        """Loop over values (slightly redundant)."""
        pass

    def __len__(self):
        """Returns the length of the values."""
        pass

    def feed_forward(self):
        """Uses connected nodes to compute self.value."""
        pass

    def back_propogate(self):
        """
        Modify change attributes to more closely match value_change.
        Modify nodes.value_change to more closely match value_change.
        Modify respective attributes based on their attr_change.
        """
        pass

Gradient

class Gradient:
    """Class for handling gradient calculations."""
    # may be a scalar, vector, or matrix
    derivative: Union[float, Sequence[float], Sequence[Sequence[float]]]
    iteration: int
    # hyperparameters and other attributes,
    # depending on the specific gradient descent method.

    def __init__(self, shape: Tuple[int, int]):
        """Initialize derivative based on given shape and default iteration."""
        # Default iteration of 0 or 1? Should it be an option?

    # Add @properties for modifying the derivative
    # for example, hyperparameters may influence derivative getters and setters