Eliguli / NYU-CAS-Meows-Corp-Association

1 stars 0 forks source link

Simple Neuron Network #1

Open Eliguli opened 2 months ago

Eliguli commented 2 months ago

// @TensorFlow: Processing, matLab
#import java.util.*;

public class NeuronNetwork {
    public static final double MAX_STANDARD_DEVIATION = 25d;
    public static final double MAX_BIAS = 15d;
    private static int depth = 0;
    private static final List<List<Double>> networkLayers = new ArrayList<>();

    public void addNeuralNetworkLayers(int inputSize, int maxBound) {
        List<Double> inputs = new ArrayList<>();
        for (int i = 0; i < inputSize; i++) {
            double val = 2 * Math.random() * maxBound - maxBound;
            inputs.add(Generator.RELU.apply(val));
        }
        networkLayers.add(inputs); // Add input layer to the list

        int previousLayerSize = inputSize;
        while (previousLayerSize > 1) {
            int layerSize = generateRandomSize(previousLayerSize);
            List<Double> layer = generateLayer(previousLayerSize, layerSize);
            if (!layer.isEmpty()) {
                networkLayers.add(layer);
                previousLayerSize = layerSize;
                depth++;
            }
        }
    }

    private static int generateRandomSize(int previousSize) {
        int size;
        do {
            size = (int) (Math.random() * previousSize) + 1;
        } while (size == previousSize);
        return size;
    }

    private static List<Double> generateLayer(int previousLayerSize, int currentLayerSize) {
        List<Double> layer = new ArrayList<>();
        for (int i = 0; i < currentLayerSize; i++) {
            double sum = calculateNeuronOutput(previousLayerSize);
            layer.add(sum);
        }
        return layer;
    }

    private static double calculateNeuronOutput(int previousLayerSize) {
        double sum = 0;
        int connections = (int)(Math.random() * previousLayerSize) + 1;
        for (int j = 0; j < connections; j++) {
            double picked = networkLayers.get(depth).get((int)(Math.random() * networkLayers.get(depth).size()));
            double standardDeviation = Math.random() * MAX_STANDARD_DEVIATION;
            double bias = Math.random() * MAX_BIAS;
            double weight = standardDeviation * Generator.SIGMOID.apply(Math.random()) + bias;
            sum += picked * weight;
        }
        return sum;
    }

    public void printNetworkInfo() {
        networkLayers.forEach(layer -> System.out.println(Arrays.toString(layer.toArray())));
        System.out.println("Current depth: " + depth);
        double output = networkLayers.get(depth).get(0);
        System.out.println("Output: " + output);
    }

    public static void main(String[] args) {
        NeuronNetwork nn = new NeuronNetwork();
        nn.addNeuralNetworkLayers(10, 20);
        nn.printNetworkInfo();
    }

    interface Generator {
        double apply(double x);

        Generator RELU = x -> Math.max(0, x);
        Generator SIGMOID = x -> 1 / (1 + Math.exp(-x));
    }
}