https://gist.github.com/espiritusanti/b ... 62c8c39d8f
Code: Select all
#ifdef __PLATFORM__WINDOWS
#include "c:\harbour\include\hbclass.ch"
#else
#include "/usr/include/harbour/hbclass.ch"
#endif
#define NUM_INPUTS 2
#define A_HIDDENLAYERSNEURONS { 2 } // one hiddenlayers with 2 neurons
#define NUM_OUTPUTS 1
static nLearningRate := 0.1
function Main()
local oNN := TNeuralNetwork():New( NUM_INPUTS, A_HIDDENLAYERSNEURONS, NUM_OUTPUTS )
oNN:Learn( { 0, 0 }, { 0 } )
oNN:Learn( { 1, 0 }, { 1 } )
oNN:Learn( { 0, 1 }, { 1 } )
oNN:Learn( { 1, 1 }, { 0 } )
? Time()
return nil
CLASS TNeuralNetwork
DATA oInputLayer
DATA aHiddenLayers INIT {}
DATA oOutputLayer
METHOD New( nInputs, aHiddenLayersNeurons, nOutputs )
METHOD Learn( aInputs, aOutputs )
METHOD Propagation( aInputs, aOutputs )
METHOD BackPropagation( aInputs, aOutputs )
ENDCLASS
METHOD New( nInputs, aHiddenLayersNeurons, nOutputs ) CLASS TNeuralNetwork
local n
::oInputLayer = TNeuralLayer():New( nInputs, nInputs )
for n = 1 to Len( aHiddenLayersNeurons ) // number of hidden layers
AAdd( ::aHiddenLayers, TNeuralLayer():New( aHiddenLayersNeurons[ n ],;
If( n == 1, nInputs, aHiddenLayersNeurons[ n - 1 ] ) ) )
next
::oOutputLayer = TNeuralLayer():New( nOutputs, ATail( aHiddenLayersNeurons ) )
return Self
METHOD Learn( aInputs, aOutputs ) CLASS TNeuralNetwork
local n
::Propagation( aInputs, aOutputs )
::BackPropagation( aInputs, aOutputs )
? "Inputs:", aInputs
for n = 1 to Len( ::oOutputLayer:aNeurons )
?? ", output:", ::oOutputLayer:aNeurons[ n ]:nValue
?? ", expected output:", aOutputs[ n ]
?? ", error:", ::oOutputLayer:aNeurons[ n ]:nDeltaError
next
return nil
METHOD Propagation( aInputs, aOutputs ) CLASS TNeuralNetwork
local oInputNeuron, oHiddenLayer, oHiddenLayerNeuron, oOutputNeuron, nSum
for each oInputNeuron in ::oInputLayer:aNeurons
oInputNeuron:nValue = aInputs[ oInputNeuron:__enumIndex ]
next
for each oHiddenLayer in ::aHiddenLayers
if oHiddenLayer:__enumIndex == 1
for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
nSum = oHiddenLayerNeuron:nBias
for each oInputNeuron in ::oInputLayer:aNeurons
nSum += oInputNeuron:nValue * oHiddenLayerNeuron:aWeights[ oInputNeuron:__enumIndex ]
next
oHiddenLayerNeuron:nValue = Sigmoid( nSum )
next
endif
next
for each oOutputNeuron in ::oOutputLayer:aNeurons
nSum = oOutputNeuron:nBias
for each oHiddenLayerNeuron in ATail( ::aHiddenLayers ):aNeurons
nSum += oHiddenLayerNeuron:nValue * oOutputNeuron:aWeights[ oHiddenLayerNeuron:__enumIndex ]
next
oOutputNeuron:nValue = Sigmoid( nSum )
next
return nil
METHOD BackPropagation( aInputs, aOutputs ) CLASS TNeuralNetwork
local oOutputNeuron, oHiddenLayer, oHiddenLayerNeuron, oInputNeuron, nError
for each oOutputNeuron in ::oOutputLayer:aNeurons
nError = aOutputs[ oOutputNeuron:__enumIndex ] - oOutputNeuron:nValue
oOutputNeuron:nDeltaError = nError * dSigmoid( oOutputNeuron:nValue )
next
for each oHiddenLayer in ::aHiddenLayers // how to step it backwards ?
if oHiddenLayer:__enumIndex == Len( ::aHiddenLayers )
for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
nError = 0
for each oOutputNeuron in ::oOutputLayer:aNeurons
nError += oOutputNeuron:nDeltaError * oHiddenLayerNeuron:aWeights[ oOutputNeuron:__enumIndex ]
next
oHiddenLayerNeuron:nDeltaError = nError * dSigmoid( oHiddenLayerNeuron:nValue )
next
else
endif
next
for each oOutputNeuron in ::oOutputLayer:aNeurons
oOutputNeuron:nBias += oOutputNeuron:nDeltaError * nLearningRate
for each oHiddenLayer in ::aHiddenLayers
if oHiddenLayer:__enumIndex == Len( ::aHiddenLayers )
for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
for each oOutputNeuron in ::oOutputLayer:aNeurons
oOutputNeuron:aWeights[ oHiddenLayerNeuron:__enumIndex ] += oHiddenLayerNeuron:nValue * ;
oOutputNeuron:nDeltaError * nLearningRate
next
next
endif
next
next
for each oHiddenLayerNeuron in ::aHiddenLayers[ 1 ]:aNeurons
oHiddenLayerNeuron:nBias += oHiddenLayerNeuron:nDeltaError * nLearningRate
for each oInputNeuron in ::oInputLayer:aNeurons
oHiddenLayerNeuron:aWeights[ oInputNeuron:__enumIndex ] += aInputs[ oHiddenLayerNeuron:__enumIndex ] * ;
oHiddenLayerNeuron:nDeltaError * nLearningRate
next
next
return nil
CLASS TNeuralLayer
DATA aNeurons INIT {}
METHOD New( nNeurons, nInputs )
ENDCLASS
METHOD New( nNeurons, nInputs ) CLASS TNeuralLayer
local n
for n = 1 to nNeurons
AAdd( ::aNeurons, TNeuron():New( nInputs ) )
next
return Self
CLASS TNeuron
DATA nBias INIT hb_Random()
DATA aWeights
DATA nValue
DATA nDeltaError
METHOD New( nInputs )
ENDCLASS
METHOD New( nInputs ) CLASS TNeuron
local n
::aWeights = Array( nInputs )
for n = 1 to nInputs
::aWeights[ n ] = hb_Random()
next
return Self
function Sigmoid( nValue )
return 1 / ( 1 + Math_E() ^ -nValue )
function dSigmoid( nValue ) // returns the derivative of the sigmoid function
local n := Sigmoid( nValue )
return n * ( 1 - n )