设为首页 加入收藏

TOP

单隐含层神经网络公式推导及C++实现 笔记(四)
2018-03-02 06:57:00 】 浏览:995
Tags:隐含 神经网络 公式 推导 实现 笔记
// a[1](i)=g[1](z[1](i)) } for (int p = 0; p < this->output_layer_node_num; ++p) { for (int q = 0; q < this->hidden_layer_node_num; ++q) { z2[i][p] += w2[p][q] * a1[i][q]; } z2[i][p] += b2[p]; // z[2](i)=w[2]*a[1](i)+b[2] a2[i][p] = calculate_activation_function(z2[i][p], this->output_layer_activation_type); // a[2](i)=g[2](z[2](i)) } for (int p = 0; p < this->output_layer_node_num; ++p) { J += -(y[i] * std::log(a2[i][p]) + (1 - y[i] * std::log(1 - a2[i][p]))); // J+=-[y(i)*loga[2](i)+(1-y(i))*log(1-a[2](i))] } for (int p = 0; p < this->output_layer_node_num; ++p) { da2[i][p] = -(y[i] / a2[i][p]) + ((1. - y[i]) / (1. - a2[i][p])); // da[2](i)=-(y(i)/a[2](i))+((1-y(i))/(1.-a[2](i))) dz2[i][p] = da2[i][p] * calcuate_activation_function_derivative(z2[i][p], this->output_layer_activation_type); // dz[2](i)=da[2](i)*g[2]'(z[2](i)) } for (int p = 0; p < this->output_layer_node_num; ++p) { for (int q = 0; q < this->hidden_layer_node_num; ++q) { dw2[p][q] += dz2[i][p] * a1[i][q]; // dw[2]+=dz[2](i)*(a[1](i)^T) } db2[p] += dz2[i][p]; // db[2]+=dz[2](i) } for (int p = 0; p < this->hidden_layer_node_num; ++p) { for (int q = 0; q < this->output_layer_node_num; ++q) { da1[i][p] = w2[q][p] * dz2[i][q]; // (da[1](i)=w[2](i)^T)*dz[2](i) dz1[i][p] = da1[i][p] * calcuate_activation_function_derivative(z1[i][p], this->hidden_layer_activation_type); // dz[1](i)=da[1](i)*(g[1]'(z[1](i))) } } for (int p = 0; p < this->hidden_layer_node_num; ++p) { for (int q = 0; q < this->feature_length; ++q) { dw1[p][q] += dz1[i][p] * x[i][q]; // dw[1]+=dz[1](i)*(x(i)^T) } db1[p] += dz1[i][p]; // db[1]+=dz[1](i) } } J /= m; for (int p = 0; p < this->output_layer_node_num; ++p) { for (int q = 0; q < this->hidden_layer_node_num; ++q) { dw2[p][q] = dw2[p][q] / m; // dw[2] /=m } db2[p] = db2[p] / m; // db[2] /=m } for (int p = 0; p < this->hidden_layer_node_num; ++p) { for (int q = 0; q < this->feature_length; ++q) { dw1[p][q] = dw1[p][q] / m; // dw[1] /= m } db1[p] = db1[p] / m; // db[1] /= m } for (int p = 0; p < this->output_layer_node_num; ++p) { for (int q = 0; q < this->hidden_layer_node_num; ++q) { w2[p][q] = w2[p][q] - this->alpha * dw2[p][q]; // w[2]=w[2]-alpha*dw[2] } b2[p] = b2[p] - this->alpha * db2[p]; // b[2]=b[2]-alpha*db[2] } for (int p = 0; p < this->hidden_layer_node_num; ++p) { for (int q = 0; q < this->feature_length; ++q) { w1[p][q] = w1[p][q] - this->alpha * dw1[p][q]; // w[1]=w[1]-alpha*dw[1] } b1[p] = b1[p] - this->alpha * db1[p]; // b[1]=b[1]-alpha*db[1] } } CHECK(store_model(model) == 0); } template int SingleHiddenLayer ::load_model(const std::string& model) { std::ifstream file; file.open(model.c_str(), std::ios::binary); if (!file.is_open()) { fprintf(stderr, "open file fail: %s\n", model.c_str()); return -1; } file.read((char*)&this->hidden_layer_node_num, sizeof(int)); file.read((char*)&this->output_layer_node_num, sizeof(int)); int type{ -1 }; file.read((char*)&type, sizeof(int)); this->hidden_layer_activation_type = static_cast (type); file.read((char*)&type, sizeof(int)); this->output_layer_activation_type = static_cast (type); file.read((char*)&this->feature_length, sizeof(int)); this->w1.resize(this->hidden_layer_node_num); for (int i = 0; i < this->hidden_layer_node_num; ++i
首页 上一页 1 2 3 4 5 6 下一页 尾页 4/6/6
】【打印繁体】【投稿】【收藏】 【推荐】【举报】【评论】 【关闭】 【返回顶部
上一篇c++与C const变量的区别详解 下一篇C++中的引用(代码实例)

最新文章

热门文章

Hot 文章

Python

C 语言

C++基础

大数据基础

linux编程基础

C/C++面试题目