在alexnet 訓練最後,會有全鏈接轉卷積過程。用來替代滑動窗口。下面函數能夠實現這個功能。ios
emmmm...注意使用時候,根據你的層名字,修改對應代碼。我懶得寫成通用的了,實在是沒啥意義~網絡
而後通常別人都會提供兩個網絡,一個是全鏈接,一個是卷積的。 兩個對應的層名字不一樣。我不推薦這個,那樣你要去修改他的上下層名字。多麻煩的,直接不應名字,以下面,方便點。函數
以上~~~google
/**
*將全鏈接網絡,轉換成爲卷積網絡
*/
int ChangeFCToConv(char *pszSrcSolver, char *pszDstSolver, char *pszSrcCaffeModel, char *pszDstCaffeModel)
{
caffe::NetParameter netparam;
string model = pszSrcCaffeModel;
ReadNetParamsFromBinaryFileOrDie(model, &netparam);string
caffe::LayerParameter *srcFc6 = nullptr, *srcFc7 = nullptr, *srcFc8 = nullptr;
caffe::NetParameter srcNetParam;
ReadNetParamsFromTextFileOrDie(pszSrcSolver, &srcNetParam);
for (int i = 0; i < srcNetParam.layer_size(); i++)
{
caffe::LayerParameter *xxx = srcNetParam.mutable_layer(i);
const string &name = xxx->name();
if (name == "fc7-conv")
{
srcFc6 = srcNetParam.mutable_layer(i);
}
}it
int layer_size = netparam.layer_size();
cout << "layer_size = " << layer_size << endl;
caffe::LayerParameter* layerparam = NULL;io
caffe::Net<float> net(pszSrcSolver, caffe::TEST);
net.CopyTrainedLayersFrom(netparam);
auto netBlob = net.blob_by_name("fc6");
for (int i = 0; i < layer_size; i++) {
layerparam = netparam.mutable_layer(i);
const string& layername = layerparam->name();
const string& layertype = layerparam->type();
//cout << "layertype: " << layertype << endl;
if (layername == "conv2")
{
srcFc6 = layerparam;
}table
if (layername == "fc6") {
const string& layername = layerparam->name();
cout << "layername: " << layername << endl;
const string& topname = layerparam->top(0);
cout << " " << topname << endl;
cout << " " << layerparam->type() << endl;stream
layerparam->release_inner_product_param();
caffe::BlobProto *blob0 = layerparam->mutable_blobs(0);
caffe::Blob<float> x_blob0;
x_blob0.FromProto(*blob0, true);
x_blob0.Reshape(4096, 256, 6, 6);
x_blob0.ToProto(blob0);
layerparam->set_type("Convolution");
caffe::ConvolutionParameter *conv = new caffe::ConvolutionParameter();
conv->add_kernel_size(1); //設置你須要的參數
conv->set_kernel_size(0, 6);
conv->set_num_output(4096);
layerparam->set_allocated_convolution_param(conv);model
}
if (layername == "fc7")
{
const string& layername = layerparam->name();
cout << "layername: " << layername << endl;
const string& topname = layerparam->top(0);
cout << " " << topname << endl;
cout << " " << layerparam->type() << endl;
layerparam->release_inner_product_param();
caffe::BlobProto *blob0 = layerparam->mutable_blobs(0);
caffe::Blob<float> x_blob0;
x_blob0.FromProto(*blob0, true);
x_blob0.Reshape(4096, 4096, 1, 1);
x_blob0.ToProto(blob0);
layerparam->set_type("Convolution");
caffe::ConvolutionParameter *conv = new caffe::ConvolutionParameter();
conv->add_kernel_size(1);
conv->set_kernel_size(0, 1);
conv->set_num_output(4096);
layerparam->set_allocated_convolution_param(conv);
}
if (layername == "fc8")
{
const string& layername = layerparam->name();
cout << "layername: " << layername << endl;
const string& topname = layerparam->top(0);
cout << " " << topname << endl;
cout << " " << layerparam->type() << endl;
layerparam->release_inner_product_param();
caffe::BlobProto *blob0 = layerparam->mutable_blobs(0);
caffe::Blob<float> x_blob0;
x_blob0.FromProto(*blob0, true);
x_blob0.Reshape(2, 4096, 1, 1);
x_blob0.ToProto(blob0);
layerparam->set_type("Convolution");
caffe::ConvolutionParameter *conv = new caffe::ConvolutionParameter();
conv->add_kernel_size(1);
conv->set_kernel_size(0, 1);
conv->set_num_output(1000);
layerparam->set_allocated_convolution_param(conv);
}
if (layername == "data")
{
layerparam->release_data_param();
}
}
//fstream outcaffemodel(pszDstCaffeModel,ios_base::out|ios_base::trunc|ios_base::binary);
//netparam.SerializeToOstream(&outcaffemodel);
WriteProtoToBinaryFile(netparam, pszDstCaffeModel);
//outcaffemodel.close();
google::protobuf::ShutdownProtobufLibrary();
return 0;
}