The smallest neural network for Harbour

The smallest neural network for Harbour

Postby Antonio Linares » Tue Sep 13, 2022 12:29 am

No need for classes. Really easy to understand code :-)
Code: Select all  Expand view
#define INPUT_SIZE 2
#define HIDDEN_SIZE 2
#define OUTPUT_SIZE 1

#define LEARNING_RATE 0.5

function Main()

   local aInput[ INPUT_SIZE ]
   local aHidden[ HIDDEN_SIZE ]
   local nOutput
   local aWih[ INPUT_SIZE ][ HIDDEN_SIZE ] // weights from input to hidden layer
   local aWho[ HIDDEN_SIZE ] // weights from hidden to output layer
   local aData := { { 0, 0, 0 }, { 0, 1, 1 }, { 1, 0, 1 }, { 1, 1, 0 } }  // Xor values
   local nError := 1, nAt, nDelta, nHDelta

   SET DECIMALS TO 9

    for n = 1 to INPUT_SIZE
        for m = 1 to HIDDEN_SIZE
            aWih[ n, m ] = hb_Random()
        next
    next         

    for n = 1 to HIDDEN_SIZE
        aWho[ n ] = hb_Random()
    next   

    for n = 1 to 100000

        nAt = hb_RandomInt( 1, 4 )

        aInput[ 1 ] = aData[ nAt ][ 1 ]
        aInput[ 2 ] = aData[ nAt ][ 2 ]

        // feed forward
        for m = 1 to HIDDEN_SIZE
            aHidden[ m ] = 0
            for p = 1 to INPUT_SIZE
                aHidden[ m ] += aInput[ p ] * aWih[ p ][ m ]
            next   
            aHidden[ m ] = 1 / ( 1 + Math_E() ^ -aHidden[ m ] )
        next   

        nOutput = 0
        for m = 1 to HIDDEN_SIZE
            nOutput += aHidden[ m ] * aWho[ m ]
        next
        nOutput = 1 / ( 1 + Math_E() ^ -nOutput )

        // backpropagation
        nError = aData[ nAt ][ 3 ] - nOutput
        nDelta = nError * nOutput * ( 1 - nOutput )

        for m = 1 to HIDDEN_SIZE
            aWho[ m ] += LEARNING_RATE * aHidden[ m ] * nDelta
       next    

        for m = 1 to HIDDEN_SIZE
            nHDelta = nDelta * aWho[ m ] * aHidden[ m ] * ( 1 - aHidden[ m ] )
            for p = 1 to INPUT_SIZE
                aWih[ p ][ m ] += LEARNING_RATE * aInput[ p ] * nHDelta
            next
        next       
   next

    for n = 1 to 4    // test
        aInput[ 1 ] = aData[ n ][ 1 ]
        aInput[ 2 ] = aData[ n ][ 2 ]

        for m = 1 to HIDDEN_SIZE
            aHidden[ m ] = 0
            for p = 1 to INPUT_SIZE
                aHidden[ m ] += aInput[ p ] * aWih[ p ][ m ]
            next   
            aHidden[ m ] = 1 / ( 1 + Math_E() ^ -aHidden[ m ] )
        next

        nOutput = 0
        for m = 1 to HIDDEN_SIZE
            nOutput += aHidden[ m ] * aWho[ m ]
        next   
        nOutput = 1 / ( 1 + Math_E() ^ -nOutput )

        ? AllTrim( Str( aData[ n ][ 1 ] ) ), " XOR ", AllTrim( Str( aData[ n ][ 2 ] ) ), "=", nOutput
    next

return nil

#pragma BEGINDUMP

#include <hbapi.h>
#include <math.h>

#ifndef M_E
   #define M_E  2.71828182845904523536
#endif  

HB_FUNC( MATH_E )
{
   hb_retnd( M_E );
}

#pragma ENDDUMP

See the level of accuracy predicting the result values:
0 XOR 0 = 0.028447448
0 XOR 1 = 0.944000896
1 XOR 0 = 0.952882189
1 XOR 1 = 0.094916901
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42098
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: The smallest neural network for Harbour

Postby Antonio Linares » Tue Sep 13, 2022 3:20 pm

regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42098
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain


Return to AI Introduction (Harbour code and samples)

Who is online

Users browsing this forum: No registered users and 1 guest

cron