1 Star 10 Fork 1

菩提树上的大李子/BP神经网络算法

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
示例1 4.41 KB
一键复制 编辑 原始数据 按行查看 历史
菩提树上的大李子 提交于 2023-04-12 17:51 +08:00 . add 示例1.
import numpy as np
from openpyxl import load_workbook
import matplotlib.pyplot as plt
##########读取数据###########
wb = load_workbook('C:\\Users\\sister\\Desktop\\钻井实验数据\\钻井数据源.xlsx')
ws=wb.active
rows=ws.max_row#行数
columes=ws.max_column#列数
lst = []
for col in ws.rows:
for row in col:
lst.append(row.value)
shuju=np.array(lst)
original_data=shuju.reshape(rows,columes)#####将excel表格里的数据全部读入数组
#############################
def sigmod(x):
h = 1./(1+np.exp(-x))
return h
def de_sigmoid(x):
return x*(1-x)
def standrd(data):
mean = data.mean(axis=1)
mean=mean.reshape(-1,1)
std = data.std(axis=1)
std=std.reshape(-1,1)
data=(data- mean)/std
return data
data=original_data[1:]
holedepth=data[:,0]#井深
sd=[]
for i in data[:,1]:
if i=="砂岩":#将泥岩,砂岩转为数值,砂岩为0,泥岩为1
sd.append(0)
else:
sd.append(1)
data[:,1]=sd
x=data[:,1:6].astype('float32')#这是由特征组成的矩阵,分别为岩性、钻压、转速、排量、泵压
y=data[:,8].astype('float32')#这是钻速标签
y=y.reshape(-1,1)
data=np.hstack((x,y))
data=standrd(data)
train_x=data[:1000,:5]
train_y=data[:1000,5].reshape(-1,1)
train_y=sigmod(train_y)
test_x=data[1000:,:5]
test_y=data[1000:,5].reshape(-1,1)
test_y=sigmod(test_y)
xlael=[]
for i in range(test_y.shape[0]):
xlael.append(i)
def loss2(pred_y,true_y):
loss1= 0.5*(true_y-pred_y)**2
return loss1
def bulid_net(dim_in,list_num_hidden):
layers=[]
# 逐层的进行网络构建
for i in range(len(list_num_hidden)):
layer = {}
# 定义每一层的权重
if i ==0:
layer["w"]=0.5*np.random.rand(dim_in,list_num_hidden[i])-0.1
else:
layer["w"]=0.5*np.random.rand(list_num_hidden[i-1],list_num_hidden[i])-0.1
layer["b"] = np.zeros((1,list_num_hidden[i]))
layers.append(layer)
return layers
def fead_forward(datas,layers):
input_layers = []
input_acfun = []
for i in range(len(layers)):
layer = layers[i]
if i ==0:
inputs = datas
z = np.dot(inputs,layer["w"]) + layer["b"]
h = sigmod(z)
input_layers.append(inputs)
input_acfun.append(z)
else:
inputs = h
z = np.dot(inputs,layer["w"])+ layer["b"]
h = sigmod(z)
input_layers.append(inputs)
input_acfun.append(z)
return input_layers,input_acfun,h
# 进行参数更新
def updata_wb(datas,labs,layers1,alpha):
inputs,input_acfun,output = fead_forward(datas,layers1)
asf=(output-labs)*(input_acfun[-1]*(1-input_acfun[-1]))
for i in range(len(layers1)):
index=-i-1
if index==-1:
last_w=(np.dot(inputs[-1].T,asf))/(labs.shape[0])
layers1[index]["w"] = layers1[index]["w"] - alpha*last_w#首先对最后一层权重进行更新
layers1[index]["b"] =((layers1[index]["b"] - alpha*(asf)/(labs.shape[0])).mean(axis=0)).reshape(1,-1)#对最后一层偏置进行更新
else:
asf=np.dot(asf,layers[index+1]["w"].T)*(input_acfun[index]*(1-input_acfun[index]))#更新asf
v_w=(np.dot(inputs[index].T,asf))/(labs.shape[0])
layers1[index]["w"] = layers1[index]["w"] - alpha* v_w#开始更新所有权重值和偏置
layers1[index]["b"] = ((layers1[index]["b"] - alpha*(asf)/(labs.shape[0])).mean(axis=0)).reshape(1,-1)
inputs,input_acfun,output1 = fead_forward(datas,layers1)
err=np.sum(loss2(output1, labs))/(labs.shape[0])
return layers1,err
dim_in=5 #输入特征个数
list_num_hidden=[10,5,1]#层数包括隐藏层个数和输出层,总共三层;第一层隐藏层神经元个数为10个,第二层为5个,输出一个值做回归
layers = bulid_net(dim_in,list_num_hidden)#layers是个列表,每个列表元素为一个字典,每个字典里存储了层数对应的权重w和偏置b
new_layers,err=updata_wb(train_x,train_y,layers,alpha=0.01)
inputs,input_acfun,output = fead_forward(test_x,new_layers)
plt.plot(xlael,output,label="pred")#绘制预测关系图
plt.legend()
plt.figure()
plt.plot(xlael,test_y,label="true")#绘制预测关系图
plt.legend()
plt.show()
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
Python
1
https://gitee.com/nocturnal_xing/bp-neural-network-algorithm.git
git@gitee.com:nocturnal_xing/bp-neural-network-algorithm.git
nocturnal_xing
bp-neural-network-algorithm
BP神经网络算法
master

搜索帮助