Bootstrap

Python——反向传播算法

# 传递神经元的激活值
import time
import numpy as np
import xlrd
import xlwt


def transfer(activation):
    return 1.0 / (1.0 + np.exp(-activation))
# 计算神经元输出值的导数
def transfer_derivative(output):
    return output * (1.0 - output)

def excel2m(path):
    data = xlrd.open_workbook(path)
    table = data.sheets()[0]
    nrows = table.nrows  # 行数
    ncols = table.ncols  # 列数
    c1 = np.arange(0, nrows, 1)
    datamatrix = np.zeros((nrows, ncols))
    for x in range(ncols):
        cols = table.col_values(x)
        minVals = min(cols)
        maxVals = max(cols)
        cols1 = np.matrix(cols)  # 把list转换为矩阵进行矩阵操作
        #ranges = maxVals - minVals
        #b = cols1 - minVals
        #normcols = b / ranges  # 数据进行归一化处理
        datamatrix[:, x] = cols1  # 把数据进行存储
    return datamatrix

def makeMatrix(row_num,column_num,start=0.1,step=0):
    matrix=[]
    row=[]
    for i in range(row_num):
        for j in range(column_num):
            row.append(start)
            start+=step
        matrix.append(row)
        row=[]
    return matrix
def save(data,path):
    f = xlwt.Workbook()  # 创建工作簿
    sheet1 = f.add_sheet(u'sheet1', cell_overwrite_ok=True)  # 创建sheet
    [h, l] = data.shape  # h为行数,l为列数
    for i in range(h):
        for j in range(l):
            sheet1.write(i, j, data[i, j])
    f.save(path)

def error(i,j):#i:标签值  j:预测值(求偏导时  有一部分是固定的  所以直接写为一个函数)
    return -(i-j)*j*(1-j)
def look(w,b,dataset,labels):#预测数据
    p = 0
    for i in range(len(dataset)):
        y = np.add(np.dot(dataset[i], w), b)
        y = transfer(y)
        if np.fabs(1-y) >=np.fabs(0-y):
            t = 0
        else:
            t = 1
        if labels[i][0] ==t:
            p+=1
    return p/len(dataset)
dataset = excel2m(r'train.xls')#训练数据处理为矩阵
labels =  excel2m(r'train_label.xlsx')
test_dataset = excel2m(r'test.xls')
test_labels =  excel2m(r'test_label.xlsx')
w =makeMatrix(len(labels[0]),len(dataset[0]))#权重
b = 0.5#偏置
l_rate = 0.05#学习率
starttime = time.time()
for i in range(3000):#迭代次数
    mis = 0
    for j in range(len(dataset)):#数据训练
        y = np.add(np.dot(dataset[j], w[0]), b)#y=w*x+b
        y = transfer(y)#sigmoid
        mis += np.fabs(labels[j][0] - y)#错误总和
        for k in range(len(w[0])):#更新权重
            w[0][k] -= l_rate * error(labels[j][0], y) * dataset[j][k]
        b -= l_rate * error(labels[j][0], y)#更新偏置
    #print('第',i+1,'次迭代--','error:',mis)
    if (i+1)%1 == 0:
        endtime = time.time()
        print('-------------',endtime - starttime,'------------------')
        print('第',i+1,'次迭代--','正确率:', look(w[0],b,test_dataset,test_labels))
save(np.array(w),'wight.xls')
save(np.array([[b]]),'bias.xls')


w1 = excel2m(r'wight.xls')
b1 = excel2m(r'bias.xls')

print('正确率:',look(w1[0],b1[0][0],test_dataset,test_labels))

;