dsigmoid(double x) { return x * (1 - x); } void init() {//参数初始化 for (int i = 0; i < nx; i++) { x[i].val = 0; x[i].bias = ((rand() % 1000) - 500) / 10000.0; for (auto &w : x[i].weight) { w = ((rand() % 1000) - 500) / 10000.0; } } for (int i = 0; i < nb; i++) { b[i].val = 0; b[i].bias = ((rand() % 1000) - 500) / 10000.0; for (auto &w : b[i].weight) { w = ((rand() % 1000) - 500) / 10000.0; } } for (int i = 0; i < ny; i++) { y[i].val = 0; y[i].bias = ((rand() % 1000) - 500) / 10000.0; for (auto &w : y[i].weight) { w = ((rand() % 1000) - 500) / 10000.0; } } } void forward_propagation() {//前向传播 for (int i = 0; i < nb; i++) { double sum = 0; for (int j = 0; j < nx; j++) { sum += x[j].val * x[j].weight[i]; } b[i].val = sigmoid(sum + b[i].bias); } for (int i = 0; i < ny; i++) { double sum = 0; for (int j = 0; j < nb; j++) { sum += b[j].val * b[j].weight[i]; } y[i].val = sigmoid(sum + y[i].bias); } } void back_propagation() {//反向传播 for (int i = 0; i < ny; i++) { g[i] *= dsigmoid(y[i].val); } for (int i = 0; i < nb; i++) { e[i] = 0; for (int j = 0; j < ny; j++) { e[i] += g[j] * y[j].weight[i]; } e[i] *= dsigmoid(b[i].val); } for (int i = 0; i < ny; i++) { for (int j = 0; j < nb; j++) { y[i].weight[j] += eta * g[i] * b[j].val; } y[i].bias += eta * g[i]; } for (int i = 0; i < nb; i++) { for (int j = 0; j < nx; j++) { b[i].weight[j] += eta * e[i] * x[j].val; } b[i].bias += eta * e[i]; } } void train() { // 训练代码,根据需要进行实现 } int main() { init(); train(); return 0; } [2024-09-06 08:46:09 | AI写代码神器 | 1309点数解答]