As I know this two code should have same output, but it is not. Can somebody help me?
Code 1.
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
class Percep:
def __init__(self, input_size, hidden_size, output_size):
self.W1 = np.random.randn(input_size, hidden_size) / np.sqrt(input_size)
self.b1 = np.zeros(hidden_size)
self.W2 = np.random.randn(hidden_size, output_size) / np.sqrt(hidden_size)
self.b2 = np.zeros(output_size)
def forward(self, x):
self.h = sigmoid(np.dot(x, self.W1) + self.b1)
y = np.dot(self.h, self.W2) + self.b2
return y
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
Y = np.array([[0], [1], [1], [0]])
mlp = Percep(2, 2, 1)
lr = 0.1
epochs = 10000
for epoch in range(epochs):
y_pred = mlp.forward(X)
error = Y - y_pred
delta2 = error * (y_pred * (1 - y_pred))
delta1 = np.dot(delta2, mlp.W2.T) * (mlp.h * (1 - mlp.h))
mlp.W2 += lr * np.dot(mlp.h.T, delta2)
mlp.b2 += lr * np.sum(delta2, axis=0)
mlp.W1 += lr * np.dot(X.T, delta1)
mlp.b1 += lr * np.sum(delta1, axis=0)
print('Output:')
print(mlp.forward(X))
code2. use the weight and bias that we got from above
import numpy as np
def identify(x):
return x
def sigmoid(x):
return 1 / (1 + np.exp(-x))
x= np.array([[0,0], [0,1], [1,0],[1,1]])
v1= np.dot(x, mlp.W1)+ mlp.b1
h1= sigmoid(v1)
v2 = np.dot(h1, mlp.W2)+ mlp.b2
h2 = sigmoid(v2)
y= identify(h2)
print('Output:')
print(y)