Python broadcast error even with identical shapes

I am trying to implement a simple learning algorithm with error backpropagation. The logic is classic and simple. However, I get a weird error operands could not be broadcast together with shapes (3,) (3,2) when subtracting two NumPy arrays with the same shape.

Here is what I tried

import numpy as np
from numpy import linalg as LA
import sys
learning_factor=0.3
entry=[1,2,3]
theoretical_output=[0.1,0.3,0.7]
boltzmann=lambda x: 1/(1+np.exp(-x))
normalize=np.vectorize(boltzmann)


def train(wr,wc,zr,zc,x,t):
 # Transform X and T to numpy arrays
 t = np.asarray(t)
 x = np.asarray(x)

 # Generate random weights matrix W and Z
 W = np.random.rand(wr,wc)
 Z=np.random.rand(zr,zc)

 # Add Epsilon to make T different from O
 o = t + sys.float_info.epsilon
 while(LA.norm(t-o)>sys.float_info.epsilon):
    b=np.matmul(W,x)
    h = normalize(b)
    a = np.matmul(Z,h)
    o = normalize(a)
    error = t-o
    output_error=np.matmul(o,1-o,error)
    Z=Z+learning_factor*output_error[:, np.newaxis] * (np.transpose(h))
    hidden_error = np.matmul(h, 1 - h, (np.transpose(Z) * output_error))
    W = W + learning_factor * hidden_error[:, np.newaxis] * (np.transpose(x))
    print("--- W ---", "\n", W, "\n")
    print("--- Z ---", "\n", Z, "\n")





   train(2,3,3,2,entry,theoretical_output)

Thank you for your help!

This topic was automatically closed 182 days after the last reply. New replies are no longer allowed.