rhlawek wrote:I've been looking for some old source code to prove it to myself but this looks very similar to what I was taught as Predictor/Corrector methods back in the mid-80s
Yes, it's a very old concept. But still interesting.
EMG
rhlawek wrote:I've been looking for some old source code to prove it to myself but this looks very similar to what I was taught as Predictor/Corrector methods back in the mid-80s
#include "FiveWin.ch"
function Main()
local oNet := TNet():New( { 1, 2, 1 } ), n
local x
while oNet:nRecentAverageError < 0.95
oNet:FeedForward( { x := nRandom( 1000 ) } )
oNet:Backprop( { If( x % 5 == 0, 5, 1 ) } )
end
oNet:FeedForward( { 15 } )
XBROWSER ArrTranspose( { "Layer 1 1st neuron" + CRLF + "Input:" + Str( oNet:aLayers[ 1 ][ 1 ]:nOutput ) + ;
CRLF + "Weigth 1:" + Str( oNet:aLayers[ 1 ][ 1 ]:aWeights[ 1 ], 4, 2 ), ;
{ "Layer 2, 1st neuron" + CRLF + "Weigth 1: " + Str( oNet:aLayers[ 2 ][ 1 ]:aWeights[ 1 ] ) + ;
CRLF + "Output: " + Str( oNet:aLayers[ 2 ][ 1 ]:nOutput ),;
"Layer 2, 2nd neuron" + CRLF + "Weight 1: " + Str( oNet:aLayers[ 2 ][ 2 ]:aWeights[ 1 ] ) + ;
CRLF + "Output: " + Str( oNet:aLayers[ 2 ][ 2 ]:nOutput ) },;
"Layer 3 1st neuron" + CRLF + "Weigth 1: " + Str( oNet:aLayers[ 3 ][ 1 ]:aWeights[ 1 ] ) + ;
CRLF + "Weigth 2: " + Str( oNet:aLayers[ 3 ][ 1 ]:aWeights[ 2 ] ) + ;
CRLF + "Output: " + Str( oNet:aLayers[ 2 ][ 2 ]:nOutput ) } ) ;
SETUP ( oBrw:nDataLines := 4,;
oBrw:aCols[ 1 ]:nWidth := 180,;
oBrw:aCols[ 2 ]:nWidth := 180,;
oBrw:aCols[ 3 ]:nWidth := 180,;
oBrw:nMarqueeStyle := 3 )
return nil
#include "FiveWin.ch"
function Main()
local oNeuron := TPerceptron():New( 1 )
local n, nValue
for n = 1 to 50
oNeuron:Learn( { nValue := nRandom( 1000 ) }, ExpectedResult( nValue ) )
next
MsgInfo( oNeuron:aWeights[ 1 ] )
MsgInfo( oNeuron:Calculate( { 5 } ) )
return nil
function ExpectedResult( nValue )
return nValue * 2
CLASS TPerceptron
DATA aWeights
METHOD New( nInputs )
METHOD Learn( aInputs, nExpectedResult )
METHOD Calculate( aInputs )
ENDCLASS
METHOD New( nInputs ) CLASS TPerceptron
local n
::aWeights = Array( nInputs )
for n = 1 to nInputs
::aWeights[ n ] = 0
next
return Self
METHOD Learn( aInputs, nExpectedResult ) CLASS TPerceptron
local nSum := ::Calculate( aInputs )
if nSum < nExpectedResult
::aWeights[ 1 ] += 0.1
endif
if nSum > nExpectedResult
::aWeights[ 1 ] -= 0.1
endif
return nil
METHOD Calculate( aInputs ) CLASS TPerceptron
local n, nSum := 0
for n = 1 to Len( aInputs )
nSum += aInputs[ n ] * ::aWeights[ n ]
next
return nSum
Return to AI Introduction (Harbour code and samples)
Users browsing this forum: No registered users and 2 guests