首先是Net使用的小例子:

#include <vector>
#include <iostream>
#include <caffe/net.hpp>
using namespace std;
using namespace caffe;
int main()
{
std::string proto("./bambootry/deploy.prototxt");
Net<float> nn(proto,caffe::TEST);
vector<string> bn=nn.blob_names();//获取Net中所有Blob对象名
for(int i=;i<bn.size();i++)
{
cout<<"Blob #"<<i<<" : "<<bn[i]<<endl;
}
return ;
}

linux下编译(bambootry为自己创建的文件夹)

g++ -o ./bambootry/netapp ./bambootry/net.cpp -I ./include -D CPU_ONLY \
-I ./.build_release/src/ -L ./build/lib -lcaffe -lglog -lboost_system \
-lprotobuf

结果:

……中间省略

继续省略……

代码注释

src/caffe/proto/caffe.proto中NetParameter

 message NetParameter {
optional string name = ; // consider giving the network a name 网络名称
// DEPRECATED. See InputParameter. The input blobs to the network.
repeated string input = ;//网络的输入blob名称,可以有多个blob
// DEPRECATED. See InputParameter. The shape of the input blobs.
repeated BlobShape input_shape = ;//输入Blob维度信息 // 4D input dimensions -- deprecated. Use "input_shape" instead.
// If specified, for each input blob there should be four
// values specifying the num, channels, height and width of the input blob.
// Thus, there should be a total of (4 * #input) numbers.
repeated int32 input_dim = ;//旧版维度信息 // Whether the network will force every layer to carry out backward operation.
// If set False, then whether to carry out backward is determined
// automatically according to the net structure and learning rates.
// 网络是否强制每个层进行反向传播运算,如果设置为false,则由网络结构和学习速率自动确定是否进行反向传播运算
optional bool force_backward = [default = false];
// The current "state" of the network, including the phase, level, and stage.
// Some layers may be included/excluded depending on this state and the states
// specified in the layers' include and exclude fields.
optional NetState state = ;//网络当前状态,phase, level, 和 stage,根据状态可以确定是否包含某些层 // Print debugging information about results while running Net::Forward,
// Net::Backward, and Net::Update.
// 运行Net::Forward,Net::Backward, and Net::Update是否打印结果的调试信息
optional bool debug_info = [default = false]; // The layers that make up the net. Each of their configurations, including
// connectivity and behavior, is specified as a LayerParameter.
//组成net的所有层。每个层的配置都包括连接属性和行为,由LayerParameter定义。
//ID设为100可以保证层描述置于末尾
repeated LayerParameter layer = ; // ID 100 so layers are printed last. // DEPRECATED: use 'layer' instead.
repeated V1LayerParameter layers = ;//已淘汰
}

include/caffe/net.hpp

 #ifndef CAFFE_NET_HPP_
#define CAFFE_NET_HPP_ #include <map>
#include <set>
#include <string>
#include <utility>
#include <vector> #include "caffe/blob.hpp"
#include "caffe/common.hpp"
#include "caffe/layer.hpp"
#include "caffe/proto/caffe.pb.h" namespace caffe { /**
* @brief Connects Layer%s together into a directed acyclic graph (DAG)
* specified by a NetParameter.
*
* TODO(dox): more thorough description.
*/
template <typename Dtype>
class Net {
public:
explicit Net(const NetParameter& param);
explicit Net(const string& param_file, Phase phase,
const int level = , const vector<string>* stages = NULL);
virtual ~Net() {} /// @brief Initialize a network with a NetParameter.
void Init(const NetParameter& param);//用NetParameter对象初始化Net /**
* @brief Run Forward and return the result.
*
*/
//前向传播
const vector<Blob<Dtype>*>& Forward(Dtype* loss = NULL);
/// @brief DEPRECATED; use Forward() instead.已舍弃
const vector<Blob<Dtype>*>& ForwardPrefilled(Dtype* loss = NULL) {
LOG_EVERY_N(WARNING, ) << "DEPRECATED: ForwardPrefilled() "
<< "will be removed in a future version. Use Forward().";
return Forward(loss);
} /**
* The From and To variants of Forward and Backward operate on the
* (topological) ordering by which the net is specified. For general DAG
* networks, note that (1) computing from one layer to another might entail
* extra computation on unrelated branches, and (2) computation starting in
* the middle may be incorrect if all of the layers of a fan-in are not
* included.
*/
//前向传播的几种形式
Dtype ForwardFromTo(int start, int end);
Dtype ForwardFrom(int start);
Dtype ForwardTo(int end);
/// @brief DEPRECATED; set input blobs then use Forward() instead.
//弃用。指定输入Blob返回输出Blob。确定输入Blob后使用Forward()替代。
const vector<Blob<Dtype>*>& Forward(const vector<Blob<Dtype>* > & bottom,
Dtype* loss = NULL); /**
* @brief Zeroes out the diffs of all net parameters.
* Should be run before Backward.
*/
//将diff的所有权值参数清零。应在反向传播之前调用。
void ClearParamDiffs(); /**
* The network backward should take no input and output, since it solely
* computes the gradient w.r.t the parameters, and the data has already been
* provided during the forward pass.
*/
//反向传播的几个函数。不需要输入或输出的Blob,数据已经在前向传播时提供。
void Backward();
void BackwardFromTo(int start, int end);
void BackwardFrom(int start);
void BackwardTo(int end); /**
* @brief Reshape all layers from bottom to top.
*
* This is useful to propagate changes to layer sizes without running
* a forward pass, e.g. to compute output feature size.
*/
//对Net中所有层自底向上的变形函数。无需进行一次前向传播就可以计算各层所需的Blob尺寸。
void Reshape();
//前向传播+反向传播。输入为bottom blob,输出为loss。
Dtype ForwardBackward() {
Dtype loss;
Forward(&loss);
Backward();
return loss;
} /// @brief Updates the network weights based on the diff values computed.
void Update();//根据(Solver)准备好的diff更新网络权值
/**
* @brief Shares weight data of owner blobs with shared blobs.
*
* Note: this is called by Net::Init, and thus should normally not be
* called manually.
*/
void ShareWeights();//由初始化函数调用,不能随意调用 /**
* @brief For an already initialized net, implicitly copies (i.e., using no
* additional memory) the pre-trained layers from another Net.
*/
void ShareTrainedLayersWith(const Net* other);
// For an already initialized net, CopyTrainedLayersFrom() copies the already
// trained layers from another net parameter instance.
/**
* @brief For an already initialized net, copies the pre-trained layers from
* another Net.
*/
void CopyTrainedLayersFrom(const NetParameter& param);
void CopyTrainedLayersFrom(const string trained_filename);
void CopyTrainedLayersFromBinaryProto(const string trained_filename);
void CopyTrainedLayersFromHDF5(const string trained_filename);
/// @brief Writes the net to a proto.
void ToProto(NetParameter* param, bool write_diff = false) const;
/// @brief Writes the net to an HDF5 file.
void ToHDF5(const string& filename, bool write_diff = false) const; /// @brief returns the network name.返回网络名称
inline const string& name() const { return name_; }
/// @brief returns the layer names 返回层名
inline const vector<string>& layer_names() const { return layer_names_; }
/// @brief returns the blob names
inline const vector<string>& blob_names() const { return blob_names_; }
/// @brief returns the blobs
inline const vector<shared_ptr<Blob<Dtype> > >& blobs() const {
return blobs_;
}
/// @brief returns the layers
inline const vector<shared_ptr<Layer<Dtype> > >& layers() const {
return layers_;
}
/// @brief returns the phase: TRAIN or TEST
inline Phase phase() const { return phase_; }
/**
* @brief returns the bottom vecs for each layer -- usually you won't
* need this unless you do per-layer checks such as gradients.
*/
inline const vector<vector<Blob<Dtype>*> >& bottom_vecs() const {
return bottom_vecs_;//grad check 需要调用返回每一层的输入。平时不需要。
}
/**
* @brief returns the top vecs for each layer -- usually you won't
* need this unless you do per-layer checks such as gradients.
*/
inline const vector<vector<Blob<Dtype>*> >& top_vecs() const {
return top_vecs_;//grad check 需要调用返回每一层的输出。平时不需要。
}
/// @brief returns the ids of the top blobs of layer i
inline const vector<int> & top_ids(int i) const {
CHECK_GE(i, ) << "Invalid layer id";
CHECK_LT(i, top_id_vecs_.size()) << "Invalid layer id";
return top_id_vecs_[i];
}
/// @brief returns the ids of the bottom blobs of layer i
inline const vector<int> & bottom_ids(int i) const {
CHECK_GE(i, ) << "Invalid layer id";
CHECK_LT(i, bottom_id_vecs_.size()) << "Invalid layer id";
return bottom_id_vecs_[i];
}
inline const vector<vector<bool> >& bottom_need_backward() const {
return bottom_need_backward_;
}
inline const vector<Dtype>& blob_loss_weights() const {
return blob_loss_weights_;
}
inline const vector<bool>& layer_need_backward() const {
return layer_need_backward_;
}
/// @brief returns the parameters
inline const vector<shared_ptr<Blob<Dtype> > >& params() const {
return params_;
}
inline const vector<Blob<Dtype>*>& learnable_params() const {
return learnable_params_;
}
/// @brief returns the learnable parameter learning rate multipliers
inline const vector<float>& params_lr() const { return params_lr_; }
inline const vector<bool>& has_params_lr() const { return has_params_lr_; }
/// @brief returns the learnable parameter decay multipliers
inline const vector<float>& params_weight_decay() const {
return params_weight_decay_;
}
inline const vector<bool>& has_params_decay() const {
return has_params_decay_;
}
const map<string, int>& param_names_index() const {
return param_names_index_;
}
inline const vector<int>& param_owners() const { return param_owners_; }
inline const vector<string>& param_display_names() const {
return param_display_names_;
}
/// @brief Input and output blob numbers 输入输出的blob数量
inline int num_inputs() const { return net_input_blobs_.size(); }
inline int num_outputs() const { return net_output_blobs_.size(); }
inline const vector<Blob<Dtype>*>& input_blobs() const {
return net_input_blobs_;//返回输入blob
}
inline const vector<Blob<Dtype>*>& output_blobs() const {
return net_output_blobs_;//返回输出blob
}
inline const vector<int>& input_blob_indices() const {
return net_input_blob_indices_;//返回输入blob下标
}
inline const vector<int>& output_blob_indices() const {
return net_output_blob_indices_;//返回输出blob下标
}
bool has_blob(const string& blob_name) const;//查找网络是否包含该blob
const shared_ptr<Blob<Dtype> > blob_by_name(const string& blob_name) const;//如果有则找出
bool has_layer(const string& layer_name) const;//查找网络是否包含该层
const shared_ptr<Layer<Dtype> > layer_by_name(const string& layer_name) const;//如果有则找出 void set_debug_info(const bool value) { debug_info_ = value; } // Helpers for Init.以下用于初始化
/**
* @brief Remove layers that the user specified should be excluded given the current
* phase, level, and stage.
*/
//过滤掉用户指定的在某阶段、级别、状态下不应包含的层
static void FilterNet(const NetParameter& param,
NetParameter* param_filtered);
/// @brief return whether NetState state meets NetStateRule rule
//判断网络状态是否满足网络规则
static bool StateMeetsRule(const NetState& state, const NetStateRule& rule,
const string& layer_name); // Invoked at specific points during an iteration
class Callback {
protected:
virtual void run(int layer) = ; template <typename T>
friend class Net;
};
const vector<Callback*>& before_forward() const { return before_forward_; }
void add_before_forward(Callback* value) {
before_forward_.push_back(value);
}
const vector<Callback*>& after_forward() const { return after_forward_; }
void add_after_forward(Callback* value) {
after_forward_.push_back(value);
}
const vector<Callback*>& before_backward() const { return before_backward_; }
void add_before_backward(Callback* value) {
before_backward_.push_back(value);
}
const vector<Callback*>& after_backward() const { return after_backward_; }
void add_after_backward(Callback* value) {
after_backward_.push_back(value);
} protected:
// Helpers for Init.
/// @brief Append a new top blob to the net.给网络追加新的输出blob
void AppendTop(const NetParameter& param, const int layer_id,
const int top_id, set<string>* available_blobs,
map<string, int>* blob_name_to_idx);
/// @brief Append a new bottom blob to the net.给网络追加新的输入blob
int AppendBottom(const NetParameter& param, const int layer_id,
const int bottom_id, set<string>* available_blobs,
map<string, int>* blob_name_to_idx);
/// @brief Append a new parameter blob to the net.给网络追加新的权值blob
void AppendParam(const NetParameter& param, const int layer_id,
const int param_id);
//以下几个函数显示调试信息
/// @brief Helper for displaying debug info in Forward.
void ForwardDebugInfo(const int layer_id);
/// @brief Helper for displaying debug info in Backward.
void BackwardDebugInfo(const int layer_id);
/// @brief Helper for displaying debug info in Update.
void UpdateDebugInfo(const int param_id); /// @brief The network name网络名
string name_;
/// @brief The phase: TRAIN or TEST 网络当前阶段(训练 测试)
Phase phase_;
/// @brief Individual layers in the net 网络中的独立层
vector<shared_ptr<Layer<Dtype> > > layers_;
vector<string> layer_names_;//层名称
map<string, int> layer_names_index_;//层名称与索引映射表
vector<bool> layer_need_backward_;//标记某个层是否需要反向传播
/// @brief the blobs storing intermediate results between the layer.
vector<shared_ptr<Blob<Dtype> > > blobs_;//层与层之间传递数据通道
vector<string> blob_names_;//Blob名称
map<string, int> blob_names_index_;//Blob名称和索引映射表
vector<bool> blob_need_backward_;//标记Blob是否需要反向传播
/// bottom_vecs stores the vectors containing the input for each layer.
/// They don't actually host the blobs (blobs_ does), so we simply store
/// pointers.
//存放每个层的输入Blob指针(真正数据所有者为blobs_)
vector<vector<Blob<Dtype>*> > bottom_vecs_;
vector<vector<int> > bottom_id_vecs_;
vector<vector<bool> > bottom_need_backward_;
/// top_vecs stores the vectors containing the output for each layer
//存放每个层的输出Blob指针(真正数据所有者为blobs_)
vector<vector<Blob<Dtype>*> > top_vecs_;
vector<vector<int> > top_id_vecs_;
/// Vector of weight in the loss (or objective) function of each net blob,
/// indexed by blob_id.
//每个blob对全局损失(目标函数)的贡献权重
vector<Dtype> blob_loss_weights_;
vector<vector<int> > param_id_vecs_;
vector<int> param_owners_;
vector<string> param_display_names_;
vector<pair<int, int> > param_layer_indices_;
map<string, int> param_names_index_;
/// blob indices for the input and the output of the net
//网络输入输出blob的索引
vector<int> net_input_blob_indices_;
vector<int> net_output_blob_indices_;
vector<Blob<Dtype>*> net_input_blobs_;
vector<Blob<Dtype>*> net_output_blobs_;
/// The parameters in the network. 网络权值
vector<shared_ptr<Blob<Dtype> > > params_;
vector<Blob<Dtype>*> learnable_params_;//可训练的网络权值
/**
* The mapping from params_ -> learnable_params_: we have
* learnable_param_ids_.size() == params_.size(),
* and learnable_params_[learnable_param_ids_[i]] == params_[i].get()
* if and only if params_[i] is an "owner"; otherwise, params_[i] is a sharer
* and learnable_params_[learnable_param_ids_[i]] gives its owner.
*/
//params_ 到 learnable_params_映射:
//当且仅当params_[i]是所有者时,learnable_param_ids_.size() == params_.size(),
//learnable_params_[learnable_param_ids_[i]] == params_[i].get()
//否则 params_[i] 只是共享者,learnable_params_[learnable_param_ids_[i]] 给出所有者
vector<int> learnable_param_ids_;
/// the learning rate multipliers for learnable_params_
vector<float> params_lr_;//学习率倍乘因子
vector<bool> has_params_lr_;
/// the weight decay multipliers for learnable_params_
vector<float> params_weight_decay_;//权值衰减因子
vector<bool> has_params_decay_;
/// The bytes of memory used by this net
size_t memory_used_;//记录网络占用的内存大小
/// Whether to compute and display debug info for the net.
bool debug_info_;//是否显示调试信息
// Callbacks
vector<Callback*> before_forward_;
vector<Callback*> after_forward_;
vector<Callback*> before_backward_;
vector<Callback*> after_backward_; DISABLE_COPY_AND_ASSIGN(Net);//禁止拷贝构造函数、赋值运算函数
}; } // namespace caffe #endif // CAFFE_NET_HPP_

未完待续……

内容来自赵永科《深度学习 21天实战caffe》

【caffe Net】使用举例和代码中文注释的更多相关文章

  1. 去掉VS2010代码中文注释的红色下划线

    VS2010代码中文注释出现红色下划线,代码看上去很不美观,发现是由于安装Visual Assist X插件造成的. 解决办法:打开VAX的Options对话框,取消Advanced --> U ...

  2. 关闭shift中英文切换 英文代码/中文注释随意切换着写。

    x 背景 写代码的时候总是意外的就切成中文了,特别是代码中大小写切换的这种情况... 例如:"public static TimeZone CurrentTime..."publi ...

  3. 【caffe I/O】数据读取层 代码中文注释

    caffe.proto中DataParameter部分 message DataParameter { //输入数据使用的DB类型 enum DB { LEVELDB = ;//使用LEVELDB L ...

  4. 【caffe Layer】代码中文注释

    src/caffe/proto/caffe.proto 中LayerParameter部分 // NOTE // Update the next available ID when you add a ...

  5. WIdo联网代码中文注释

    代码如下 /*************************************************** 这是一个例子的dfrobot维多-无线集成物联网建兴传感器和控制节点 *产品页面及更 ...

  6. [转载]将别人的项目或JAVA文件导入到自己的Eclipse中时,常常会出现JAVA文件的中文注释变成乱码的情况,解决办法

    eclipse 代码中文注释乱码 求解决 将别人的项目或JAVA文件导入到自己的Eclipse中时,常常会出现JAVA文件的中文注释变成乱码的情况,主要原因就是别人的IDE编码格式和自己的Eclips ...

  7. mysql代码里面有中文注释导致语法错误

    一个简单的创建表的代码 DROP database IF exists reg_login; CREATE database reg_login; use reg_login --用户表 create ...

  8. Visual Studio vs2010 去掉中文注释红色下划线;去掉代码红色下划线;

    vs去掉下挂线也分两种: 1.去掉中文注释红色下划线,需要去掉VisualAssist下划线鸡肋功能: 1.选择Visual AssistX Options: 2.把如图所示的勾去掉,解决. 以后再次 ...

  9. C++格式化代码,去掉vs2010编辑器里中文注释的红色波浪线

    原文:http://sulianqi.cn/Article/ART2013053100001.html Vs2010中C++没有智能感应提示,不习惯,于是装了个番茄插件(Visual Assist x ...

随机推荐

  1. Java 之 数据库连接池

    一.数据库连接池 1.连接池概念 连接池其实就是一个容器(集合),存放数据库连接的容器. 当系统初始化好后,容器被创建,容器中会申请一些连接对象,当用户来访问数据库时,从容器中获取连接对象,用户访问之 ...

  2. 笔谈I帧、P帧、B帧、PTS、DTS(一)

    做视频的播放,涉及到关键帧一说,从视频流中取出数据显示图像的时候,这些一幅幅图像之间到底有什么关联呢.那就有必要弄清楚I帧.P帧.B帧.PTS.DTS的概念,文章 I,P,B帧和PTS,DTS的关系  ...

  3. 学习python的日常5

    形如__xxx__的变量或者函数名,在python中是有特殊用途的,例如__slots__是为了绑定属性的名称, __len()__方法是为了让class作用于len()函数,很多这样的函数都可以帮忙 ...

  4. Flask的基础二

    一.session 除请求对象之外,还有一个 session 对象.它允许你在不同请求间存储特定用户的信息.它是在 Cookies 的基础上实现的,并且对 Cookies 进行密钥签名要使用会话,你需 ...

  5. Java精通并发-synchronized关键字原理详解

    关于synchronized关键字原理其实在当时JVM的学习[https://www.cnblogs.com/webor2006/p/9595300.html]中已经剖析过了,这里从研究并发专题的角度 ...

  6. ifram 调用父页面的easyui弹框

    转自https://www.cnblogs.com/puke/archive/2012/09/13/2683067.html 曾经试过这样的方法       在iframe子页面获取父页面元素     ...

  7. 大数据之路week07--day05 (Hive的搭建部署)

    在之前博客中我有记录安装JDK和Hadoop和Mysql的过程,如果还没有安装,请先进行安装配置好,对应的随笔我也提供了百度云下载连接. 安装JDK:   https://www.cnblogs.co ...

  8. 《少年先疯队》第八次团队作业:Alpha冲刺第二天

    前言   第一天冲刺会议   时间:2019.6.15   地点:宿舍 2.1 今日完成任务情况以及遇到的问题.   2.1.1今日完成任务情况 姚玉婷:房间信息管理功能的实现,如房间的显示, 马丽莎 ...

  9. Oracle 中 CONTAINS 函数的用法

    Oracle 中 CONTAINS 函数的用法 1. 查询住址在北京的学生 SELECT student_id,student_name FROM students WHERE CONTAINS( a ...

  10. 关闭win10 任务栏窗口预览的步骤:

    win10虽好,但是总有不利于使用的反人类设计,好在可以设置,这也是比较好了的, 作为开发人员,经常会开好几个窗口,但是win10的预览很不好,设计的就是娱乐用途一般,因此必须是把他关了 一下步骤亲自 ...