# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# BP-ANN.py
# Created on: 2014-06-12 09:49:56.00000
# Description:
# --------------------------------------------------------------------------- import os
import math
import time
import datetime #输入层n个神经元
n = 4
#隐含层p个神经元
p = 6
#输出层q个神经元
q = 1 #输入向量
trainsample = 60
x = [[0 for i in range(0, n)] for j in range(trainsample)]
x = [[0.27500,0.60000,0.14085,0.07143],[0.22500,0.43333,0.14085,0.07143],[0.17500,0.50000,0.12676,0.07143],[0.15000,0.46667,0.15493,0.07143],[0.25000,0.63333,0.14085,0.07143],[0.35000,0.73333,0.18310,0.14286],[0.15000,0.56667,0.14085,0.10714],[0.25000,0.56667,0.15493,0.07143],[0.10000,0.40000,0.14085,0.07143],[0.22500,0.46667,0.15493,0.03571],[0.35000,0.66667,0.15493,0.07143],[0.20000,0.56667,0.16901,0.07143],[0.20000,0.43333,0.14085,0.03571],[0.07500,0.43333,0.09859,0.03571],[0.45000,0.76667,0.11268,0.07143],[0.42500,0.90000,0.15493,0.14286],[0.35000,0.73333,0.12676,0.14286],[0.27500,0.60000,0.14085,0.10714],[0.42500,0.70000,0.18310,0.10714],[0.27500,0.70000,0.15493,0.10714],[0.75000,0.50000,0.60563,0.50000],[0.60000,0.50000,0.57746,0.53571],[0.72500,0.46667,0.63380,0.53571],[0.37500,0.20000,0.50704,0.46429],[0.62500,0.36667,0.59155,0.53571],[0.42500,0.36667,0.57746,0.46429],[0.57500,0.53333,0.60563,0.57143],[0.22500,0.23333,0.40845,0.35714],[0.65000,0.40000,0.59155,0.46429],[0.30000,0.33333,0.49296,0.50000],[0.25000,0.10000,0.43662,0.35714],[0.47500,0.43333,0.53521,0.53571],[0.50000,0.16667,0.50704,0.35714],[0.52500,0.40000,0.60563,0.50000],[0.40000,0.40000,0.45070,0.46429],[0.67500,0.46667,0.56338,0.50000],[0.40000,0.43333,0.57746,0.53571],[0.45000,0.33333,0.52113,0.35714],[0.55000,0.16667,0.57746,0.53571],[0.40000,0.26667,0.49296,0.39286],[0.57500,0.53333,0.78873,0.89286],[0.45000,0.33333,0.66197,0.67857],[0.77500,0.43333,0.77465,0.75000],[0.57500,0.40000,0.73239,0.64286],[0.62500,0.43333,0.76056,0.78571],[0.90000,0.43333,0.87324,0.75000],[0.22500,0.26667,0.57746,0.60714],[0.82500,0.40000,0.83099,0.64286],[0.67500,0.26667,0.76056,0.64286],[0.80000,0.63333,0.80282,0.89286],[0.62500,0.50000,0.66197,0.71429],[0.60000,0.33333,0.69014,0.67857],[0.70000,0.43333,0.71831,0.75000],[0.42500,0.26667,0.64789,0.71429],[0.45000,0.36667,0.66197,0.85714],[0.60000,0.50000,0.69014,0.82143],[0.62500,0.43333,0.71831,0.64286],[0.92500,0.70000,0.88732,0.78571],[0.92500,0.30000,0.91549,0.82143],[0.50000,0.16667,0.64789,0.53571]] d = [0 for i in range(0, trainsample)]
d = [0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7] #隐含层(p个隐含层神经元, 每个隐含层神经元有对应输入层神经元有一个系数)
hi = [0 for i in range(0, p)]
ho = [0 for i in range(0, p)]
b1 = [0.3 for i in range(0, p)]
Whi = [[0.5 for i in range(0, n)] for j in range(p)]
pp = [0 for i in range(0, p)]
rateWhi = 0.05 #输出层(q个输出层神经元, 每个输出层神经元有对应隐含层神经元有一个系数)
yi = [0 for i in range(0, q)]
yo = [1 for i in range(0, q)]
b2 = [0.3 for i in range(0, q)]
Woh = [[0.5 for i in range(0, p)] for j in range(q)]
qq = [0 for i in range(0, q)]
rateWoh = 0.05
#误差函数
e=1.0 def f1(x):
y = 1.0/(1.0+math.exp(-1.0*x))
return y def f2(x):
y = f1(x)
y = y * (1.0 - y)
return y def train():
e = 0.0
for trainIndex in range(0, trainsample): # trainsample
#print(x[trainIndex])
#隐含层 输入 输出
for i in range(0, p):
hi[i] = 0
for j in range(0, n):
hi[i] = hi[i] + Whi[i][j] * x[trainIndex][j]
ho[i] = f1(hi[i]+b1[i])
#输出层 输入 输出
for i in range(0, q):
yi[i] = 0
for j in range(0, p):
yi[i] = yi[i] + Woh[i][j] * ho[j]
yo[i] = f1(yi[i]+b2[i])
#print(hi)
#误差函数对输出层的各神经元的偏导数
for i in range(0, q):
qq[i] = 0
qq[i] = (d[trainIndex] - yo[i]) * yo[i] * (1 - yo[i])# * ho[i]
for k in range(0, p):
Woh[i][k] = Woh[i][k] + rateWoh * qq[i] * ho[k] for i in range(0, p):
pp[i] = 0
for j in range(0, q):
pp[i] = pp[i] + qq[j] * Woh[j][i]
pp[i] = pp[i] * ho[i] * (1 - ho[i])
for k in range(0, n):
Whi[i][k] = Whi[i][k] + rateWhi * pp[i] * x[trainIndex][k] for i in range(0, q):
e = e + 0.5 * math.pow((d[trainIndex] - yo[i]), 2) #
for i in range(0, q):
b2[i] = b2[i] + rateWoh * qq[i]
for i in range(0, p):
b1[i] = b1[i] + rateWhi * pp[i]
return e def recognize(vv):
#隐含层 输入 输出
for i in range(0, p):
hi[i] = 0
for j in range(0, n):
hi[i] = hi[i] + Whi[i][j] * vv[j]
ho[i] = f1(hi[i]+b1[i])
#输出层 输入 输出
for i in range(0, q):
yi[i] = 0
for j in range(0, p):
yi[i] = yi[i] + Woh[i][j] * ho[j]
yo[i] = f1(yi[i]+b2[i])
print('-------------------------------------------------------')
print(yo) times = 0
while e > 0.001:
#计算各层输入和输出
e = train()
times = times + 1
print(str(times)+" " + str(e))
if times > 10000:
break
recognize([0.7, 0.5, 0.774647887, 0.821428571])
recognize([0.4,0.433333333,0.521126761,0.464285714])
recognize([0.25,0.5,0.112676056,0.071428571])

BP神经网络实现的更多相关文章

  1. BP神经网络原理及python实现

    [废话外传]:终于要讲神经网络了,这个让我踏进机器学习大门,让我读研,改变我人生命运的四个字!话说那么一天,我在乱点百度,看到了这样的内容: 看到这么高大上,这么牛逼的定义,怎么能不让我这个技术宅男心 ...

  2. BP神经网络

    秋招刚结束,这俩月没事就学习下斯坦福大学公开课,想学习一下深度学习(这年头不会DL,都不敢说自己懂机器学习),目前学到了神经网络部分,学习起来有点吃力,把之前学的BP(back-progagation ...

  3. 数据挖掘系列(9)——BP神经网络算法与实践

    神经网络曾经很火,有过一段低迷期,现在因为深度学习的原因继续火起来了.神经网络有很多种:前向传输网络.反向传输网络.递归神经网络.卷积神经网络等.本文介绍基本的反向传输神经网络(Backpropaga ...

  4. BP神经网络推导过程详解

    BP算法是一种最有效的多层神经网络学习方法,其主要特点是信号前向传递,而误差后向传播,通过不断调节网络权重值,使得网络的最终输出与期望输出尽可能接近,以达到训练的目的. 一.多层神经网络结构及其描述 ...

  5. 极简反传(BP)神经网络

    一.两层神经网络(感知机) import numpy as np '''极简两层反传(BP)神经网络''' # 样本 X = np.array([[0,0,1],[0,1,1],[1,0,1],[1, ...

  6.  BP神经网络

     BP神经网络基本原理 BP神经网络是一种单向传播的多层前向网络,具有三层或多层以上的神经网络结构,其中包含输入层.隐含层和输出层的三层网络应用最为普遍. 网络中的上下层之间实现全连接,而每层神经元之 ...

  7. BP神经网络学习笔记_附源代码

    BP神经网络基本原理: 误差逆传播(back propagation, BP)算法是一种计算单个权值变化引起网络性能变化的较为简单的方法.由于BP算法过程包含从输出节点开始,反向地向第一隐含层(即最接 ...

  8. 机器学习(一):梯度下降、神经网络、BP神经网络

    这几天围绕论文A Neural Probability Language Model 看了一些周边资料,如神经网络.梯度下降算法,然后顺便又延伸温习了一下线性代数.概率论以及求导.总的来说,学到不少知 ...

  9. 基于Storm 分布式BP神经网络,将神经网络做成实时分布式架构

    将神经网络做成实时分布式架构: Storm 分布式BP神经网络:    http://bbs.csdn.net/topics/390717623 流式大数据处理的三种框架:Storm,Spark和Sa ...

  10. BP神经网络算法学习

    BP(Back Propagation)网络是1986年由Rumelhart和McCelland为首的科学家小组提出,是一种按误差逆传播算法训练的多层前馈网络,是眼下应用最广泛的神经网络模型之中的一个 ...

随机推荐

  1. 35款加速网站开发的 CSS 开发工具

    网络有很很多的 CSS 工具和教程可用,它可以帮助设计人员和开发人员轻松.快速地学习 CSS 技术.这些工具中在高效开发 Web 应用程序中发挥重要作用. 在这篇文章中,我们收集了35个最好的 CSS ...

  2. HTML&CSS学习总结(一)

    上周用了一周的时间,周一到周五平均每天2-3小时,周六.周日每天各8小时,看网易云课堂燕十八的HTML+div+CSS视频,感觉还不错,按照视频的讲课思路大概做个总结吧. 基本思路:从大的方面(整体结 ...

  3. 在Android中调用WebService

    某些情况下我们可能需要与Mysql或者Oracle数据库进行数据交互,有些朋友的第一反应就是直接在Android中加载驱动然后进行数据的增删改查.我个人不推荐这种做法,一是手机毕竟不是电脑,操作大量数 ...

  4. Azure ARM (8) ARM Template - VS Code

    <Windows Azure Platform 系列文章目录> 在上一节内容中,笔者介绍了如何使用Visual Studio来编辑ARM Template. 但是在某些时候,Visual ...

  5. Windows Azure Web Site (6) 使用FTP发布Azure Web Site

    <Windows Azure Platform 系列文章目录> 笔者在之前的文章中介绍的都是使用IDE,也就是Visual Studio,将本地的aspx发布到Azure Web Site ...

  6. Azure Redis Cache (4) 配置和管理Redis Cache

    <Windows Azure Platform 系列文章目录> 我们在创建完Azure Redis Cache后,经常需要切换Redis Cache的服务级别,这里我简单介绍一下使用Azu ...

  7. jQuery DOM操作

    对节点的操作 查找节点 查找节点可以直接利用jQuery选择器来完成,非常便利. 插入节点 jQuery提供了8种插入节点的方法. 序号 方法 描述 实例 1 append() 向每个匹配的元素内部追 ...

  8. 大M法(Big M Method)

    前面一篇讲的单纯形方法的实现,但程序输入的必须是已经有初始基本可行解的单纯形表. 但实际问题中很少有现成的基本可行解,比如以下这个问题: min f(x) = –3x1 +x2 + x3 s.t. x ...

  9. Git学习笔记(三)

    一.标签是什么 发布一个版本时,我们通常先在版本库中打一个标签,这样,就唯一确定了打标签时刻的版本.将来无论什么时候,取某个标签的版本,就是把那个打标签的时刻的历史版本取出来.所以,标签也是版本库的一 ...

  10. Redis 集群方案

    根据一些测试整理出来的一份方案(转自http://www.cnblogs.com/lulu/): 1. Redis 性能 对于redis 的一些简单测试,仅供参考: 测试环境:Redhat6.2 , ...