Backpropagation algorithm for an XOR logical gate with 2 inputs

25 Views Asked by At

I have trouble to understand what is wrong with my code, maybe I didn't understand how the backpropagation works. Can I get some hints ?

import numpy as np
import matplotlib.pyplot as plt


def sigmoide(x):
    y=1/(1+np.exp(-x))
    return y


def XOR(entradas,x):
    if entradas==2:
        y_d=np.array([-1,1,1,-1])
    elif entradas==4:
        y_d=np.array([0]) #a hacer despues
    ERRVEC=[]
    ITVEC=[]
    
    eta=1
    Err = 1
    max_iteracion=300
    iteracion=0
    
    #Initialisacion de pesos y bias
    w13=np.random.uniform(0,1)
    w24=np.random.uniform(0,1)
    w14=np.random.uniform(0,1)
    w23=np.random.uniform(0,1)
    
    w35=np.random.uniform(0,1)
    w45=np.random.uniform(0,1)
    
    b3=np.random.uniform(0,1) #Il y a un biais par neurone de couche (pas entrée)
    b4=np.random.uniform(0,1)
    b5=np.random.uniform(0,1)


    y_salida=np.zeros(len(y_d))
    while Err > 0 and iteracion<=max_iteracion:
        Err = 0
        permutacion = np.random.permutation(len(x))
        
        for idx in permutacion:
            xtemp = x[idx,:]
            xtemp=xtemp.reshape(-1, 1)
            
            #Por la entrada
            y3=sigmoide(w13*xtemp[0]+w23*xtemp[1]+b3)
            y4=sigmoide(w14*xtemp[0]+w24*xtemp[1]+b4)
            y5=sigmoide(w35*y3+w45*y4+b5)
            y=y5
            
            #backpropagation
            delta5=y*(1-y)*(y_d[idx] - y)
            delta3=y3*(1-y3)*w35*delta5
            delta4=y4*(1-y4)*w45*delta5
        
            w45+=eta*delta5*y4
            w35+=eta*delta5*y3
            
            w14+=eta*delta4*xtemp[0]
            w24+=eta*delta4*xtemp[1]
        
            w13+=eta*delta3*xtemp[0]
            w23+=eta*delta3*xtemp[1]
            
            b3+=eta*delta3
            b4+=eta*delta4
            b5+=eta*delta5
            
        #Determinar Error
        y_salida[idx]=y
        Err += 0.5 * ((y_d[idx] - y) ** 2) #Si y_d e y son lo mismo, va a terminar el codigo
        iteracion+=1

        ERRVEC.append(Err)
        ITVEC.append(iteracion)

        print(Err)
        print(iteracion)
        print(y_salida)
    return

x = np.array([[-1, -1],[1, -1],[-1, 1],[1, 1]])

XOR(2,x)

The value of y_salida at the end must be :[-1,1,1,-1], here y_salida=[0,0,0,0].

The network is schematize in the picture attached. (https://i.stack.imgur.com/uRhTt.png)

For now the code is not optimize at all, it just to understand what went wrong in my code and what part of the backpropagation algorithm I get wrong.

0

There are 0 best solutions below