掌握了上次Net的初始化代码以及方法,我们下面来看一下他的Forward
代码:
template <typename Dtype>
Dtype Net<Dtype>::ForwardFromTo(int start, int end) {
//计算从第start到end层的前向传播过程
CHECK_GE(start, 0);
CHECK_LT(end, layers_.size());
Dtype loss = 0;
for (int i = start; i <= end; ++i) {
for (int c = 0; c < before_forward_.size(); ++c) {
before_forward_[c]->run(i);
}
// LOG(ERROR) << "Forwarding " <<layer_names_[i];
// 调用每个Layer的Forward()函数,得到每层loss
Dtype layer_loss = layers_[i]->Forward(bottom_vecs_[i], top_vecs_[i]);
loss += layer_loss;
if (debug_info_) { ForwardDebugInfo(i); }
for (int c = 0; c < after_forward_.size(); ++c) {
after_forward_[c]->run(i);
}
}
//返回loss值
return loss;
}
template <typename Dtype>
Dtype Net<Dtype>::ForwardFrom(int start) {
//计算从start开始到最后一层的前向传播过程
return ForwardFromTo(start, layers_.size() - 1);
}
template <typename Dtype>
Dtype Net<Dtype>::ForwardTo(int end) {
//计算从第1层到第end层的前向传播过程
return ForwardFromTo(0, end);
}
template <typename Dtype>
const vector<Blob<Dtype>*>& Net<Dtype>::Forward(Dtype* loss) {
//计算整个网络前向传播过程,返回损失值(可选)和网络输出Blob
if (loss != NULL) {
*loss = ForwardFromTo(0, layers_.size() - 1);
} else {
ForwardFromTo(0, layers_.size() - 1);
}
return net_output_blobs_;
}
template <typename Dtype>
const vector<Blob<Dtype>*>& Net<Dtype>::Forward(
const vector<Blob<Dtype>*> & bottom, Dtype* loss) {
//接受输入Blob作为Net输入,计算前向传播,得到损失值(可选)和网络输出Blob
LOG_EVERY_N(WARNING, 1000) << "DEPRECATED: Forward(bottom, loss) "
<< "will be removed in a future version. Use Forward(loss).";
// Copy bottom to net bottoms(直接将输入Blob拷贝到net_input_blobs_中)
for (int i = 0; i < bottom.size(); ++i) {
net_input_blobs_[i]->CopyFrom(*bottom[i]);
}
return Forward(loss);
}
到这里 我们就初步了解了所有的前向传波函数,应该能够在脑海中形成DAG数据流动图,后面学习反向传播过程.