# 3层神经网络的实现
#   神经网络的各层运算是通过矩阵的乘法运算打包进行的(宏观视角进行考虑)

import numpy as np


#  符号确认——图一

# sigmoid 函数实现
def sigmoid(i):
    return 1 / (1 + np.exp(-i))


#   恒等函数会将输入按照原样进行输出
def identity_function(i):
    return i


#  各层信号的传递 A(1) = XW(1) + B(1)
X = np.array([1.0, 0.5])
W1 = np.array([[0.1, 0.3, 0.5], [0.2, 0.4, 0.6]])
B1 = np.array([0.1, 0.2, 0.3])
print(X.shape)
print(W1.shape)
print(B1.shape)
print(np.dot(X, W1))
A1 = np.dot(X, W1) + B1
print(A1)
Z1 = sigmoid(A1)
print(Z1)

#  第一层Z1 编程第二层输入 通过NumPy数组,可以将层到层的信号传递过程简单的写出
W2 = np.array([[0.1, 0.4], [0.2, 0.5], [0.3, 0.6]])
B2 = np.array([0.1, 0.2])
print(Z1.shape)
print(W2.shape)
print(B2.shape)

A2 = np.dot(Z1, W2) + B2
print(A2)
Z2 = sigmoid(A2)
print(Z2)

W3 = np.array([[0.1, 0.3], [0.2, 0.4]])
B3 = np.array([0.1, 0.2])
A3 = np.dot(Z2, W3) + B3
print(A3)
Y = identity_function(A3)
print(Y)


#  TODO 3层神经网络代码小结
def init_network():
    network = {
        'W1': np.array([[0.1, 0.3, 0.5], [0.2, 0.4, 0.6]]),
        'b1': np.array([0.1, 0.2, 0.3]),
        'W2': np.array([[0.1, 0.4], [0.2, 0.5], [0.3, 0.6]]),
        'b2': np.array([0.1, 0.2]),
        'W3': np.array([[0.1, 0.3], [0.2, 0.4]]),
        'b3': np.array([0.1, 0.2])
    }
    return network


def forward(network, x):
    W_1, W_2, W_3 = network['W1'], network['W2'], network['W3']
    b1, b2, b3 = network['b1'], network['b2'], network['b3']

    a1 = np.dot(x, W_1) + b1
    print(a1)
    z1 = sigmoid(a1)
    print(z1)
    a2 = np.dot(z1, W_2) + b2
    print(a2)
    z2 = sigmoid(a2)
    print(z2)
    a3 = np.dot(z2, W_3) + b3
    print(a3)
    z3 = sigmoid(a3)
    print(z3)
    y1 = identity_function(a3)
    return y1


network = init_network()
print(network)
x11 = np.array([1.0, 0.5])
y11 = forward(network, x11)
print(y11)
#  TODO 3层神经网络代码小结

深度学习入门——基于python的理论和实践
在这里插入图片描述

在这里插入图片描述

Logo

技术共进,成长同行——讯飞AI开发者社区

更多推荐