Bootstrap

Python精选200Tips:141-145

运行系统:macOS Sonoma 14.6.1
Python编译器:PyCharm 2024.1.4 (Community Edition)
Python版本:3.12

往期链接:

1-5 6-10 11-20 21-30 31-40 41-50
51-60:函数 61-70:类 71-80:编程范式及设计模式
81-90:Python编码规范 91-100:Python自带常用模块-1
101-105:Python自带模块-2 106-110:Python自带模块-3
111-115:Python常用第三方包-频繁使用 116-120:Python常用第三方包-深度学习
121-125:Python常用第三方包-爬取数据 126-130:Python常用第三方包-为了乐趣
131-135:Python常用第三方包-拓展工具1 136-140:Python常用第三方包-拓展工具2
P141–纯numpy手码BP神经网络分类
技术栈:numpy(版本1.26.4)+BP神经网络模型
import numpy as np
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder

class NeuralNetwork:
    def __init__(self, input_size, hidden_layers, output_size):
        self.layers = [input_size] + hidden_layers + [output_size]
        self.weights = []
        self.biases = []

        for i in range(len(self.layers) - 1):
            weight = np.random.randn(self.layers[i], self.layers[i + 1]) * np.sqrt(1. / self.layers[i])
            bias = np.zeros((1, self.layers[i + 1]))
            self.weights.append(weight)
            self.biases.append(bias)

    def sigmoid(self, z):
        return 1 / (1 + np.exp(-z))

    def sigmoid_derivative(self, z):
        return z * (1 - z)

    def feedforward(self, X):
        self.a = [X]
        for w, b in zip(self.weights, self.biases):
            z = np.dot(self.a[-1], w) + b
            a = self.sigmoid(z)
            self.a.append(a)
        return self.a[-1]

    def backpropagate(self, X, y, learning_rate):
        m = y.shape[0]
        output = self.feedforward(X)

        error = output - y
        deltas = [error * self.sigmoid_derivative(output)]

        for i in reversed(range(len(self.weights) - 1)):
            error = deltas[-1].dot(self.weights[i + 1].T)
            deltas.append(error * self.sigmoid_derivative(self.a[i + 1]))

        deltas.reverse()

        for i in range(len(self.weights)):
            self.weights[i] -= learning_rate * self.a[i].T.dot(deltas[i]) / m
            self.biases[i] -= learning_rate * np.sum(deltas[i], axis=0, keepdims=True) / m

    def train(self, X, y, epochs, learning_rate):
        for epoch in range(epochs):
            self.backpropagate(X, y, learning_rate)
            if epoch % 1000 == 0:
                loss = np.mean(np.square(y - self.feedforward(X)))
                print(f'Epoch {
     epoch}, Loss: {
     loss:.6f}')

    def predict(self, X):
        return np.argmax(self.feedforward(X), axis=1)

# 示例用法
if __name__ == "__main__":
    # 加载鸢尾花数据集
    iris 
;