您的位置:首页 > 编程语言 > Python开发

基于python的BP神经网络及异或实现过程解析

2019-12-08 07:09 796 查看

BP神经网络是最简单的神经网络模型了,三层能够模拟非线性函数效果。

难点:

  • 如何确定初始化参数?
  • 如何确定隐含层节点数量?
  • 迭代多少次?如何更快收敛?
  • 如何获得全局最优解?
'''
neural networks

created on 2019.9.24
author: vince
'''
import math
import logging
import numpy
import random
import matplotlib.pyplot as plt

'''
neural network
'''
class NeuralNetwork:

def __init__(self, layer_nums, iter_num = 10000, batch_size = 1):
self.__ILI = 0;
self.__HLI = 1;
self.__OLI = 2;
self.__TLN = 3;

if len(layer_nums) != self.__TLN:
raise Exception("layer_nums length must be 3");

self.__layer_nums = layer_nums; #array [layer0_num, layer1_num ...layerN_num]
self.__iter_num = iter_num;
self.__batch_size = batch_size;

def train(self, X, Y):
X = numpy.array(X);
Y = numpy.array(Y);

self.L = [];
#initialize parameters
self.__weight = [];
self.__bias = [];
self.__step_len = [];
for layer_index in range(1, self.__TLN):
self.__weight.append(numpy.random.rand(self.__layer_nums[layer_index - 1], self.__layer_nums[layer_index]) * 2 - 1.0);
self.__bias.append(numpy.random.rand(self.__layer_nums[layer_index]) * 2 - 1.0);
self.__step_len.append(0.3);

logging.info("bias:%s" % (self.__bias));
logging.info("weight:%s" % (self.__weight));

for iter_index in range(self.__iter_num):
sample_index = random.randint(0, len(X) - 1);
logging.debug("-----round:%s, select sample %s-----" % (iter_index, sample_index));
output = self.forward_pass(X[sample_index]);
g = (-output[2] + Y[sample_index]) * self.activation_drive(output[2]);
logging.debug("g:%s" % (g));
for j in range(len(output[1])):
self.__weight[1][j] += self.__step_len[1] * g * output[1][j];
self.__bias[1] -= self.__step_len[1] * g;

e = [];
for i in range(self.__layer_nums[self.__HLI]):
e.append(numpy.dot(g, self.__weight[1][i]) * self.activation_drive(output[1][i]));
e = numpy.array(e);
logging.debug("e:%s" % (e));
for j in range(len(output[0])):
self.__weight[0][j] += self.__step_len[0] * e * output[0][j];
self.__bias[0] -= self.__step_len[0] * e;

l = 0;
for i in range(len(X)):
predictions = self.forward_pass(X[i])[2];
l += 0.5 * numpy.sum((predictions - Y[i]) ** 2);
l /= len(X);
self.L.append(l);

logging.debug("bias:%s" % (self.__bias));
logging.debug("weight:%s" % (self.__weight));
logging.debug("loss:%s" % (l));
logging.info("bias:%s" % (self.__bias));
logging.info("weight:%s" % (self.__weight));
logging.info("L:%s" % (self.L));

def activation(self, z):
return (1.0 / (1.0 + numpy.exp(-z)));

def activation_drive(self, y):
return y * (1.0 - y);

def forward_pass(self, x):
data = numpy.copy(x);
result = [];
result.append(data);
for layer_index in range(self.__TLN - 1):
data = self.activation(numpy.dot(data, self.__weight[layer_index]) - self.__bias[layer_index]);
result.append(data);
return numpy.array(result);

def predict(self, x):
return self.forward_pass(x)[self.__OLI];

def main():
logging.basicConfig(level = logging.INFO,
format = '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt = '%a, %d %b %Y %H:%M:%S');

logging.info("trainning begin.");
nn = NeuralNetwork([2, 2, 1]);
X = numpy.array([[0, 0], [1, 0], [1, 1], [0, 1]]);
Y = numpy.array([0, 1, 0, 1]);
nn.train(X, Y);

logging.info("trainning end. predict begin.");
for x in X:
print(x, nn.predict(x));

plt.plot(nn.L)
plt.show();

if __name__ == "__main__":
main();

具体收敛效果

以上就是本文的全部内容,希望对大家的学习有所帮助

您可能感兴趣的文章:

内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  python bp 神经网络