Compute the output of each neuron in our network, step by step.

For more about neural networks, you can visit http://datahacker.rs/neural_networks_representation/.

In [20]:

```
x1 = np.linspace(-2,2,50)
x2 = np.linspace(-2,2,50)
x3 = np.linspace(-1,1,50*50)
result = np.zeros((50*50,3))
out11 = np.zeros((50*50, 1))
out12 = np.zeros((50*50, 1))
out13 = np.zeros((50*50, 1))
out14 = np.zeros((50*50, 1))
out15 = np.zeros((50*50, 1))
out16 = np.zeros((50*50, 1))
out21 = np.zeros((50*50, 1))
for i in range(len(x1)):
for j in range(len(x2 )):
# Input layer
n11 = x1[i] * weights0[0][0] + x2[j] * weights0[1][0] + biases0[0]
a11 = ReLU(n11)
n12 = x1[i] * weights0[0][1] + x2[j] * weights0[1][1] + biases0[1]
a12 = ReLU(n12)
n13 = x1[i] * weights0[0][2] + x2[j] * weights0[1][2] + biases0[2]
a13 = ReLU(n13)
n14 = x1[i] * weights0[0][3] + x2[j] * weights0[1][3] + biases0[3]
a14 = ReLU(n14)
n15 = x1[i] * weights0[0][4] + x2[j] * weights0[1][4] + biases0[4]
a15 = ReLU(n15)
n16 = x1[i] * weights0[0][5] + x2[j] * weights0[1][5] + biases0[5]
a16 = ReLU(n16)
# Output layer
n21 = a11 *weights1[0][0] + a12 *weights1[1][0] + a13 * weights1[2][0] + \
a14 *weights1[3][0] + a15 *weights1[4][0] +a16 *weights1[5][0] + biases1[0]
a21 = sigmoid(n21)
result[i*50+j,0 ] = x1[i]
result[i*50+j, 1] = x2[j]
out11[i*50+j] = a11
out12[i*50+j] = a12
out13[i*50+j] = a13
out14[i*50+j] = a14
out15[i*50+j] = a15
out16[i*50+j] = a16
out21[i*50+j] = a21
```