# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# BP-ANN.py
# Created on: 2014-06-12 09:49:56.00000
# Description:
# --------------------------------------------------------------------------- import os
import math
import time
import datetime #输入层n个神经元
n = 4
#隐含层p个神经元
p = 6
#输出层q个神经元
q = 1 #输入向量
trainsample = 60
x = [[0 for i in range(0, n)] for j in range(trainsample)]
x = [[0.27500,0.60000,0.14085,0.07143],[0.22500,0.43333,0.14085,0.07143],[0.17500,0.50000,0.12676,0.07143],[0.15000,0.46667,0.15493,0.07143],[0.25000,0.63333,0.14085,0.07143],[0.35000,0.73333,0.18310,0.14286],[0.15000,0.56667,0.14085,0.10714],[0.25000,0.56667,0.15493,0.07143],[0.10000,0.40000,0.14085,0.07143],[0.22500,0.46667,0.15493,0.03571],[0.35000,0.66667,0.15493,0.07143],[0.20000,0.56667,0.16901,0.07143],[0.20000,0.43333,0.14085,0.03571],[0.07500,0.43333,0.09859,0.03571],[0.45000,0.76667,0.11268,0.07143],[0.42500,0.90000,0.15493,0.14286],[0.35000,0.73333,0.12676,0.14286],[0.27500,0.60000,0.14085,0.10714],[0.42500,0.70000,0.18310,0.10714],[0.27500,0.70000,0.15493,0.10714],[0.75000,0.50000,0.60563,0.50000],[0.60000,0.50000,0.57746,0.53571],[0.72500,0.46667,0.63380,0.53571],[0.37500,0.20000,0.50704,0.46429],[0.62500,0.36667,0.59155,0.53571],[0.42500,0.36667,0.57746,0.46429],[0.57500,0.53333,0.60563,0.57143],[0.22500,0.23333,0.40845,0.35714],[0.65000,0.40000,0.59155,0.46429],[0.30000,0.33333,0.49296,0.50000],[0.25000,0.10000,0.43662,0.35714],[0.47500,0.43333,0.53521,0.53571],[0.50000,0.16667,0.50704,0.35714],[0.52500,0.40000,0.60563,0.50000],[0.40000,0.40000,0.45070,0.46429],[0.67500,0.46667,0.56338,0.50000],[0.40000,0.43333,0.57746,0.53571],[0.45000,0.33333,0.52113,0.35714],[0.55000,0.16667,0.57746,0.53571],[0.40000,0.26667,0.49296,0.39286],[0.57500,0.53333,0.78873,0.89286],[0.45000,0.33333,0.66197,0.67857],[0.77500,0.43333,0.77465,0.75000],[0.57500,0.40000,0.73239,0.64286],[0.62500,0.43333,0.76056,0.78571],[0.90000,0.43333,0.87324,0.75000],[0.22500,0.26667,0.57746,0.60714],[0.82500,0.40000,0.83099,0.64286],[0.67500,0.26667,0.76056,0.64286],[0.80000,0.63333,0.80282,0.89286],[0.62500,0.50000,0.66197,0.71429],[0.60000,0.33333,0.69014,0.67857],[0.70000,0.43333,0.71831,0.75000],[0.42500,0.26667,0.64789,0.71429],[0.45000,0.36667,0.66197,0.85714],[0.60000,0.50000,0.69014,0.82143],[0.62500,0.43333,0.71831,0.64286],[0.92500,0.70000,0.88732,0.78571],[0.92500,0.30000,0.91549,0.82143],[0.50000,0.16667,0.64789,0.53571]] d = [0 for i in range(0, trainsample)]
d = [0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7,0.7] #隐含层(p个隐含层神经元, 每个隐含层神经元有对应输入层神经元有一个系数)
hi = [0 for i in range(0, p)]
ho = [0 for i in range(0, p)]
b1 = [0.3 for i in range(0, p)]
Whi = [[0.5 for i in range(0, n)] for j in range(p)]
pp = [0 for i in range(0, p)]
rateWhi = 0.05 #输出层(q个输出层神经元, 每个输出层神经元有对应隐含层神经元有一个系数)
yi = [0 for i in range(0, q)]
yo = [1 for i in range(0, q)]
b2 = [0.3 for i in range(0, q)]
Woh = [[0.5 for i in range(0, p)] for j in range(q)]
qq = [0 for i in range(0, q)]
rateWoh = 0.05
#误差函数
e=1.0 def f1(x):
y = 1.0/(1.0+math.exp(-1.0*x))
return y def f2(x):
y = f1(x)
y = y * (1.0 - y)
return y def train():
e = 0.0
for trainIndex in range(0, trainsample): # trainsample
#print(x[trainIndex])
#隐含层 输入 输出
for i in range(0, p):
hi[i] = 0
for j in range(0, n):
hi[i] = hi[i] + Whi[i][j] * x[trainIndex][j]
ho[i] = f1(hi[i]+b1[i])
#输出层 输入 输出
for i in range(0, q):
yi[i] = 0
for j in range(0, p):
yi[i] = yi[i] + Woh[i][j] * ho[j]
yo[i] = f1(yi[i]+b2[i])
#print(hi)
#误差函数对输出层的各神经元的偏导数
for i in range(0, q):
qq[i] = 0
qq[i] = (d[trainIndex] - yo[i]) * yo[i] * (1 - yo[i])# * ho[i]
for k in range(0, p):
Woh[i][k] = Woh[i][k] + rateWoh * qq[i] * ho[k] for i in range(0, p):
pp[i] = 0
for j in range(0, q):
pp[i] = pp[i] + qq[j] * Woh[j][i]
pp[i] = pp[i] * ho[i] * (1 - ho[i])
for k in range(0, n):
Whi[i][k] = Whi[i][k] + rateWhi * pp[i] * x[trainIndex][k] for i in range(0, q):
e = e + 0.5 * math.pow((d[trainIndex] - yo[i]), 2) #
for i in range(0, q):
b2[i] = b2[i] + rateWoh * qq[i]
for i in range(0, p):
b1[i] = b1[i] + rateWhi * pp[i]
return e def recognize(vv):
#隐含层 输入 输出
for i in range(0, p):
hi[i] = 0
for j in range(0, n):
hi[i] = hi[i] + Whi[i][j] * vv[j]
ho[i] = f1(hi[i]+b1[i])
#输出层 输入 输出
for i in range(0, q):
yi[i] = 0
for j in range(0, p):
yi[i] = yi[i] + Woh[i][j] * ho[j]
yo[i] = f1(yi[i]+b2[i])
print('-------------------------------------------------------')
print(yo) times = 0
while e > 0.001:
#计算各层输入和输出
e = train()
times = times + 1
print(str(times)+" " + str(e))
if times > 10000:
break
recognize([0.7, 0.5, 0.774647887, 0.821428571])
recognize([0.4,0.433333333,0.521126761,0.464285714])
recognize([0.25,0.5,0.112676056,0.071428571])

BP神经网络实现的更多相关文章

  1. BP神经网络原理及python实现

    [废话外传]:终于要讲神经网络了,这个让我踏进机器学习大门,让我读研,改变我人生命运的四个字!话说那么一天,我在乱点百度,看到了这样的内容: 看到这么高大上,这么牛逼的定义,怎么能不让我这个技术宅男心 ...

  2. BP神经网络

    秋招刚结束,这俩月没事就学习下斯坦福大学公开课,想学习一下深度学习(这年头不会DL,都不敢说自己懂机器学习),目前学到了神经网络部分,学习起来有点吃力,把之前学的BP(back-progagation ...

  3. 数据挖掘系列(9)——BP神经网络算法与实践

    神经网络曾经很火,有过一段低迷期,现在因为深度学习的原因继续火起来了.神经网络有很多种:前向传输网络.反向传输网络.递归神经网络.卷积神经网络等.本文介绍基本的反向传输神经网络(Backpropaga ...

  4. BP神经网络推导过程详解

    BP算法是一种最有效的多层神经网络学习方法,其主要特点是信号前向传递,而误差后向传播,通过不断调节网络权重值,使得网络的最终输出与期望输出尽可能接近,以达到训练的目的. 一.多层神经网络结构及其描述 ...

  5. 极简反传(BP)神经网络

    一.两层神经网络(感知机) import numpy as np '''极简两层反传(BP)神经网络''' # 样本 X = np.array([[0,0,1],[0,1,1],[1,0,1],[1, ...

  6.  BP神经网络

     BP神经网络基本原理 BP神经网络是一种单向传播的多层前向网络,具有三层或多层以上的神经网络结构,其中包含输入层.隐含层和输出层的三层网络应用最为普遍. 网络中的上下层之间实现全连接,而每层神经元之 ...

  7. BP神经网络学习笔记_附源代码

    BP神经网络基本原理: 误差逆传播(back propagation, BP)算法是一种计算单个权值变化引起网络性能变化的较为简单的方法.由于BP算法过程包含从输出节点开始,反向地向第一隐含层(即最接 ...

  8. 机器学习(一):梯度下降、神经网络、BP神经网络

    这几天围绕论文A Neural Probability Language Model 看了一些周边资料,如神经网络.梯度下降算法,然后顺便又延伸温习了一下线性代数.概率论以及求导.总的来说,学到不少知 ...

  9. 基于Storm 分布式BP神经网络,将神经网络做成实时分布式架构

    将神经网络做成实时分布式架构: Storm 分布式BP神经网络:    http://bbs.csdn.net/topics/390717623 流式大数据处理的三种框架:Storm,Spark和Sa ...

  10. BP神经网络算法学习

    BP(Back Propagation)网络是1986年由Rumelhart和McCelland为首的科学家小组提出,是一种按误差逆传播算法训练的多层前馈网络,是眼下应用最广泛的神经网络模型之中的一个 ...

随机推荐

  1. JavaScript垃圾回收(二)——垃圾回收算法

    一.引用计数(Reference Counting)算法 Internet Explorer 8以下的DOM和BOM使用COM组件所以是引用计数来为DOM对象处理内存,引用计数的含义是跟踪记录每个值被 ...

  2. Cool!15个超炫的 CSS3 文本特效【上篇】

    每一个网页设计师都希望创建出让用户能够赏识的网站.当然,这是不可能满足每个人的口味的.幸运的是,我们有最强大的工具和资源.实际上,我们非常多的网站模板,框架,内容管理系统,先进的工具和其他的资源可以使 ...

  3. OkHttp,Retrofit 1.x - 2.x 基本使用

    Square 为广大开发者奉献了OkHttp,Retrofit1.x,Retrofit2.x,运用比较广泛,这三个工具有很多相似之处,初学者可能会有一些使用迷惑.这里来总结一下它们的一些基本使用和一些 ...

  4. SQL SERVER 内存分配及常见内存问题 DMV查询

    内存动态管理视图(DMV): 从sys.dm_os_memory_clerks开始. SELECT  [type] , SUM(virtual_memory_reserved_kb) AS [VM R ...

  5. 在Mac下配置php开发环境:Apache+php+MySql

    /private/etc/apache2/httpd.conf 一.启动Apache sudo apachectl start sudo apachectl -v   可以查看到Apache的版本信息 ...

  6. Azure ARM (11) ARM模式下,创建虚拟机并配置负载均衡器

    <Windows Azure Platform 系列文章目录> 本文内容比较多,请大家仔细阅读,谢谢! 在前几章中,我们做了准备工作: 1.创建ARM Resouce Group,叫Lei ...

  7. 语义化HTML:i、b、em和strong标签

    一.前言 在HTML4.1中i和b作为表象标签分别表示斜体和粗体样式,而强调样式与内容分离的XHTML中则出现样式效果相同的em和strong表义标签,此时我们会建议避免使用i和b标签,应该改用em和 ...

  8. Ubuntu14.04安装JDK

    下载oracle jdk包 从oracle官网下载jdk包,请选择Linux的tar包: 如果想使用命令行下载工具进行下载,可以先获得下载地址,然后运行curl进行下载: curl -L -O -H ...

  9. 一对多关系domain Model中设置使用AutoMapper时出错

    在使用AutoMapper时,把数据从VO-PO时显示如下错误,错误提示说在一对多关系中已将集合设置为EntityCollection,那么这个是为什么呢. 看下action中的代码,我们可以发现这是 ...

  10. C语言学习006:歌曲搜索

    #include <stdio.h> #include <string.h> //字符串处理库 ]={ "I left my heart in Harvard Med ...