设为首页 加入收藏

TOP

单隐含层神经网络公式推导及C++实现 笔记(二)
2018-03-02 06:57:00 】 浏览:996
Tags:隐含 神经网络 公式 推导 实现 笔记
num); for (int i = 0; i < train_num; ++i) { const T* p = data + i * feature_length; this->x[i].resize(feature_length); for (int j = 0; j < feature_length; ++j) { this->x[i][j] = p[j]; } this->y[i] = labels[i]; } return 0; } template void SingleHiddenLayer ::init_train_variable() { J = (T)0.; dw1.resize(this->hidden_layer_node_num); db1.resize(this->hidden_layer_node_num); for (int i = 0; i < this->hidden_layer_node_num; ++i) { dw1[i].resize(this->feature_length); for (int j = 0; j < this->feature_length; ++j) { dw1[i][j] = (T)0.; } db1[i] = (T)0.; } dw2.resize(this->output_layer_node_num); db2.resize(this->output_layer_node_num); for (int i = 0; i < this->output_layer_node_num; ++i) { dw2[i].resize(this->hidden_layer_node_num); for (int j = 0; j < this->hidden_layer_node_num; ++j) { dw2[i][j] = (T)0.; } db2[i] = (T)0.; } z1.resize(this->m); a1.resize(this->m); da1.resize(this->m); dz1.resize(this->m); for (int i = 0; i < this->m; ++i) { z1[i].resize(this->hidden_layer_node_num); a1[i].resize(this->hidden_layer_node_num); dz1[i].resize(this->hidden_layer_node_num); da1[i].resize(this->hidden_layer_node_num); for (int j = 0; j < this->hidden_layer_node_num; ++j) { z1[i][j] = (T)0.; a1[i][j] = (T)0.; dz1[i][j] = (T)0.; da1[i][j] = (T)0.; } } z2.resize(this->m); a2.resize(this->m); da2.resize(this->m); dz2.resize(this->m); for (int i = 0; i < this->m; ++i) { z2[i].resize(this->output_layer_node_num); a2[i].resize(this->output_layer_node_num); dz2[i].resize(this->output_layer_node_num); da2[i].resize(this->output_layer_node_num); for (int j = 0; j < this->output_layer_node_num; ++j) { z2[i][j] = (T)0.; a2[i][j] = (T)0.; dz2[i][j] = (T)0.; da2[i][j] = (T)0.; } } } template void SingleHiddenLayer ::init_w_and_b() { w1.resize(this->hidden_layer_node_num); // (hidden_layer_node_num, feature_length) b1.resize(this->hidden_layer_node_num); // (hidden_layer_node_num, 1) w2.resize(this->output_layer_node_num); // (output_layer_node_num, hidden_layer_node_num) b2.resize(this->output_layer_node_num); // (output_layer_node_num, 1) std::random_device rd; std::mt19937 generator(rd()); std::uniform_real_distribution distribution(-0.01, 0.01); for (int i = 0; i < this->hidden_layer_node_num; ++i) { w1[i].resize(this->feature_length); for (int j = 0; j < this->feature_length; ++j) { w1[i][j] = distribution(generator); } b1[i] = distribution(generator); } for (int i = 0; i < this->output_layer_node_num; ++i) { w2[i].resize(this->hidden_layer_node_num); for (int j = 0; j < this->hidden_layer_node_num; ++j) { w2[i][j] = distribution(generator); } b2[i] = distribution(generator); } } template int SingleHiddenLayer ::train(const std::string& model) { CHECK(x.size() == y.size()); CHECK(output_layer_node_num == 1); init_w_and_b(); for (int iter = 0; iter < this->iterations; ++iter) { init_train_variable(); for (int i = 0; i < this->m; ++i) { for (int p = 0; p < this->hidden_layer_node_num; ++p) { for (int q = 0; q < this->feature_length; ++q) { z1[i][p] += w1[p][q] * x[i][q]; } z1[i][p] += b1[p]; // z[1](i)=w[1]*x(i)+b[1] a1[i][p] = calculate_activation_function(z1[i][p], this->hidden_layer_activation_type);
首页 上一页 1 2 3 4 5 6 下一页 尾页 2/6/6
】【打印繁体】【投稿】【收藏】 【推荐】【举报】【评论】 【关闭】 【返回顶部
上一篇c++与C const变量的区别详解 下一篇C++中的引用(代码实例)

最新文章

热门文章

Hot 文章

Python

C 语言

C++基础

大数据基础

linux编程基础

C/C++面试题目