# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# BP-ANN.py
# Created on: 2014-06-12 09:49:56.00000
# Description:
# --------------------------------------------------------------------------- import os
import math
import time
import datetime #输入层n个神经元
n = 4
#隐含层p个神经元
p = 6
#输出层q个神经元
q = 1 #输入向量
trainsample = 60
x = [[0 for i in range(0, n)] for j in range(trainsample)]
x = [[0.27500,0.60000,0.14085,0.07143],[0.22500,0.43333,0.14085,0.07143],[0.17500,0.50000,0.12676,0.07143],[0.15000,0.46667,0.15493,0.07143],[0.25000,0.63333,0.14085,0.07143],[0.35000,0.73333,0.18310,0.14286],[0.15000,0.56667,0.14085,0.10714],[0.25000,0.56667,0.15493,0.07143],[0.10000,0.40000,0.14085,0.07143],[0.22500,0.46667,0.15493,0.03571],[0.35000,0.66667,0.15493,0.07143],[0.20000,0.56667,0.16901,0.07143],[0.20000,0.43333,0.14085,0.03571],[0.07500,0.43333,0.09859,0.03571],[0.45000,0.76667,0.11268,0.07143],[0.42500,0.90000,0.15493,0.14286],[0.35000,0.73333,0.12676,0.14286],[0.27500,0.60000,0.14085,0.10714],[0.42500,0.70000,0.18310,0.10714],[0.27500,0.70000,0.15493,0.10714],[0.75000,0.50000,0.60563,0.50000],[0.60000,0.50000,0.57746,0.53571],[0.72500,0.46667,0.63380,0.53571],[0.37500,0.20000,0.50704,0.46429],[0.62500,0.36667,0.59155,0.53571],[0.42500,0.36667,0.57746,0.46429],[0.57500,0.53333,0.60563,0.57143],[0.22500,0.23333,0.40845,0.35714],[0.65000,0.40000,0.59155,0.46429],[0.30000,0.33333,0.49296,0.50000],[0.25000,0.10000,0.43662,0.35714],[0.47500,0.43333,0.53521,0.53571],[0.50000,0.16667,0.50704,0.35714],[0.52500,0.40000,0.60563,0.50000],[0.40000,0.40000,0.45070,0.46429],[0.67500,0.46667,0.56338,0.50000],[0.40000,0.43333,0.57746,0.53571],[0.45000,0.33333,0.52113,0.35714],[0.55000,0.16667,0.57746,0.53571],[0.40000,0.26667,0.49296,0.39286],[0.57500,0.53333,0.78873,0.89286],[0.45000,0.33333,0.66197,0.67857],[0.77500,0.43333,0.77465,0.75000],[0.57500,0.40000,0.73239,0.64286],[0.62500,0.43333,0.76056,0.78571],[0.90000,0.43333,0.87324,0.75000],[0.22500,0.26667,0.57746,0.60714],[0.82500,0.40000,0.83099,0.64286],[0.67500,0.26667,0.76056,0.64286],[0.80000,0.63333,0.80282,0.89286],[0.62500,0.50000,0.66197,0.71429],[0.60000,0.33333,0.69014,0.67857],[0.70000,0.43333,0.71831,0.75000],[0.42500,0.26667,0.64789,0.71429],[0.45000,0.36667,0.66197,0.85714],[0.60000,0.50000,0.69014,0.82143],[0.62500,0.43333,0.71831,0.64286],[0.92500,0.70000,0.88732,0.78571],[0.92500,0.30000,0.91549,0.82143],[0.50000,0.16667,0.64789,0.53571]] d = [0 for i in range(0, trainsample)]
d = [0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7] #隐含层(p个隐含层神经元, 每个隐含层神经元有对应输入层神经元有一个系数)
hi = [0 for i in range(0, p)]
ho = [0 for i in range(0, p)]
b1 = [0.3 for i in range(0, p)]
Whi = [[0.5 for i in range(0, n)] for j in range(p)]
pp = [0 for i in range(0, p)]
rateWhi = 0.05 #输出层(q个输出层神经元, 每个输出层神经元有对应隐含层神经元有一个系数)
yi = [0 for i in range(0, q)]
yo = [1 for i in range(0, q)]
b2 = [0.3 for i in range(0, q)]
Woh = [[0.5 for i in range(0, p)] for j in range(q)]
qq = [0 for i in range(0, q)]
rateWoh = 0.05
#误差函数
e=1.0 def f1(x):
y = 1.0/(1.0+math.exp(-1.0*x))
return y def f2(x):
y = f1(x)
y = y * (1.0 - y)
return y def train():
e = 0.0
for trainIndex in range(0, trainsample): # trainsample
#print(x[trainIndex])
#隐含层 输入 输出
for i in range(0, p):
hi[i] = 0
for j in range(0, n):
hi[i] = hi[i] + Whi[i][j] * x[trainIndex][j]
ho[i] = f1(hi[i]+b1[i])
#输出层 输入 输出
for i in range(0, q):
yi[i] = 0
for j in range(0, p):
yi[i] = yi[i] + Woh[i][j] * ho[j]
yo[i] = f1(yi[i]+b2[i])
#print(hi)
#误差函数对输出层的各神经元的偏导数
for i in range(0, q):
qq[i] = 0
qq[i] = (d[trainIndex] - yo[i]) * yo[i] * (1 - yo[i])# * ho[i]
for k in range(0, p):
Woh[i][k] = Woh[i][k] + rateWoh * qq[i] * ho[k] for i in range(0, p):
pp[i] = 0
for j in range(0, q):
pp[i] = pp[i] + qq[j] * Woh[j][i]
pp[i] = pp[i] * ho[i] * (1 - ho[i])
for k in range(0, n):
Whi[i][k] = Whi[i][k] + rateWhi * pp[i] * x[trainIndex][k] for i in range(0, q):
e = e + 0.5 * math.pow((d[trainIndex] - yo[i]), 2) #
for i in range(0, q):
b2[i] = b2[i] + rateWoh * qq[i]
for i in range(0, p):
b1[i] = b1[i] + rateWhi * pp[i]
return e def recognize(vv):
#隐含层 输入 输出
for i in range(0, p):
hi[i] = 0
for j in range(0, n):
hi[i] = hi[i] + Whi[i][j] * vv[j]
ho[i] = f1(hi[i]+b1[i])
#输出层 输入 输出
for i in range(0, q):
yi[i] = 0
for j in range(0, p):
yi[i] = yi[i] + Woh[i][j] * ho[j]
yo[i] = f1(yi[i]+b2[i])
print('-------------------------------------------------------')
print(yo) times = 0
while e > 0.001:
#计算各层输入和输出
e = train()
times = times + 1
print(str(times)+" " + str(e))
if times > 10000:
break
recognize([0.7, 0.5, 0.774647887, 0.821428571])
recognize([0.4,0.433333333,0.521126761,0.464285714])
recognize([0.25,0.5,0.112676056,0.071428571])

BP神经网络实现的更多相关文章

  1. BP神经网络原理及python实现

    [废话外传]:终于要讲神经网络了,这个让我踏进机器学习大门,让我读研,改变我人生命运的四个字!话说那么一天,我在乱点百度,看到了这样的内容: 看到这么高大上,这么牛逼的定义,怎么能不让我这个技术宅男心 ...

  2. BP神经网络

    秋招刚结束,这俩月没事就学习下斯坦福大学公开课,想学习一下深度学习(这年头不会DL,都不敢说自己懂机器学习),目前学到了神经网络部分,学习起来有点吃力,把之前学的BP(back-progagation ...

  3. 数据挖掘系列(9)——BP神经网络算法与实践

    神经网络曾经很火,有过一段低迷期,现在因为深度学习的原因继续火起来了.神经网络有很多种:前向传输网络.反向传输网络.递归神经网络.卷积神经网络等.本文介绍基本的反向传输神经网络(Backpropaga ...

  4. BP神经网络推导过程详解

    BP算法是一种最有效的多层神经网络学习方法,其主要特点是信号前向传递,而误差后向传播,通过不断调节网络权重值,使得网络的最终输出与期望输出尽可能接近,以达到训练的目的. 一.多层神经网络结构及其描述 ...

  5. 极简反传(BP)神经网络

    一.两层神经网络(感知机) import numpy as np '''极简两层反传(BP)神经网络''' # 样本 X = np.array([[0,0,1],[0,1,1],[1,0,1],[1, ...

  6.  BP神经网络

     BP神经网络基本原理 BP神经网络是一种单向传播的多层前向网络,具有三层或多层以上的神经网络结构,其中包含输入层.隐含层和输出层的三层网络应用最为普遍. 网络中的上下层之间实现全连接,而每层神经元之 ...

  7. BP神经网络学习笔记_附源代码

    BP神经网络基本原理: 误差逆传播(back propagation, BP)算法是一种计算单个权值变化引起网络性能变化的较为简单的方法.由于BP算法过程包含从输出节点开始,反向地向第一隐含层(即最接 ...

  8. 机器学习(一):梯度下降、神经网络、BP神经网络

    这几天围绕论文A Neural Probability Language Model 看了一些周边资料,如神经网络.梯度下降算法,然后顺便又延伸温习了一下线性代数.概率论以及求导.总的来说,学到不少知 ...

  9. 基于Storm 分布式BP神经网络,将神经网络做成实时分布式架构

    将神经网络做成实时分布式架构: Storm 分布式BP神经网络:    http://bbs.csdn.net/topics/390717623 流式大数据处理的三种框架:Storm,Spark和Sa ...

  10. BP神经网络算法学习

    BP(Back Propagation)网络是1986年由Rumelhart和McCelland为首的科学家小组提出,是一种按误差逆传播算法训练的多层前馈网络,是眼下应用最广泛的神经网络模型之中的一个 ...

随机推荐

  1. NASA的下一个十年(译)

    原文 MICHAEL ROSTON (New York Times) 从左起:木卫二:土卫六:经过火星的水手谷星的合成图:金星的拼接图 大多数人已经从人类第一次近距离看到冥王星的兴奋中冷静下来.下一个 ...

  2. [java] 汇率换算器实现(1)

    [java] 汇率换算器实现(1) // */ // ]]>   [java] 汇率换算器实现(1) Table of Contents 1 问题描述 2 类设计 3 初步实现 3.1 建立项目 ...

  3. JAVA之IO文件读写

    IO概述:                                                          IO(Input output)流 作用:IO流用来处理设备之间的数据传输 ...

  4. Dripicons – 精美的扁平风格的免费矢量图标字体

    Dripicons 是一个好看的免费图标集,有超过 90 个扁平风格的图标,适用在 Web 应用程序中使用,如:箭头,媒体,图表等等. 集合多种矢量格式(PSD,AI,EPS,PDF,SVG),也可以 ...

  5. 多iframe使用tab标签方式添加、删除、切换的处理实例

    紧接着上一篇随笔iframe的内容增高或缩减时设置其iframe的高度的处理方案 如果采用iframe来切换显示内容的方式来展现办公Web.那么需要解决几个问题 1.tab标签需要和显示的iframe ...

  6. C#密封类

    密封类 密封类使用sealed修饰符声明. 密封类中不可能有抽象方法[因为:抽象方法必须在抽象类中,而抽象类不能是密封的或者是静态的,也就是说abstract 和sealed不能同时修饰一个类]   ...

  7. SQL--局部变量

    取值的话:print @变量名, 变量不设置值的话,默认打印出来是:NULL, 输出变量的值:

  8. ASP.NET MVC显示HTML字符串

    一些html经HtmlEncode后,如“<span>测试数据</span>”.现需要把这些内容正常显示于asp.net mvc的视图内. 举个例子来解决与说明,先创建一个mo ...

  9. ASP.NET Core 开发-缓存(Caching)

    ASP.NET Core 缓存Caching,.NET Core 中为我们提供了Caching 的组件. 目前Caching 组件提供了三种存储方式. Memory Redis SqlServer 学 ...

  10. 使用Unity3d做异形窗口

    项目马上上线,因为之前的登录器是使用VS2010的MFC做的,在很多电脑上会提示缺失mfcXXXX.dll,中间找寻这种解决方案,最后确定将vcredist2010_x86和我的程序打包到安装包里面, ...