• Fichier: NeuralNetworkLayer.cs
  • Path: /ia_chiffre/IAChiffre/Assets/Scripts/NeuralNetworkLayer.cs
  • File size: 2.28 KB
  • MIME-type: text/plain
  • Charset: utf-8
 
Retour
using UnityEngine;

public class NeuralNetworkLayer
{
    public enum Connection
    {
        Linear
    }

    public enum Activation
    {
        ReLU,
        Softmax
    }

    private Connection m_connectionType;
    private Activation m_activationType;
    private int m_inputSize;
    private int m_outputSize;
    private float[] m_weights;
    private float[][] m_biases;

    public NeuralNetworkLayer (int inputSize, int outputSize, Connection connectionType, Activation activationType, float[] weights, params float[][] biases)
    {
        m_connectionType = connectionType;
        m_activationType = activationType;
        m_inputSize = inputSize;
        m_outputSize = outputSize;
        m_weights = weights;
        m_biases = biases;
    }

    public float[] Compute(float[] inputs)
    {
        float[] outputs = new float[m_outputSize];

        switch (m_connectionType)
        {
            case Connection.Linear: // Simple matrix multiplication
                {
                    for(int i = 0; i < m_outputSize; i++)
                    {
                        for(int u = 0; u < m_inputSize; u++)
                        {
                            outputs[i] += inputs[u] * m_weights[(i*m_inputSize)+u];
                        }
                        
                        for(int u = 0; u < m_biases.Length; u++)
                        {
                            outputs[i] += m_biases[u][i];
                        }
                    }
                }
            break;
        }

        switch(m_activationType)
        {
            case Activation.ReLU:
                {
                    for(int i = 0; i < outputs.Length; i++)
                    {
                        outputs[i] = Mathf.Max(0, outputs[i]);
                    }
                }
            break;
            
            case Activation.Softmax:
                {
                    for(int i = 0; i < outputs.Length; i++)
                    {
                        float expSum = 0;
			            for (int u = 0; u < outputs.Length; u++)
			            {
				            expSum += Mathf.Exp(outputs[u]);
			            }

			            outputs[i] = Mathf.Exp(outputs[i]) / expSum;
                    }
                }
            break;
        }

        return outputs;
    }
}