layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1 # shouldn't be needed?
decay_mult: 1 # shouldn't be needed?
}
param {
lr_mult: 2 # shouldn't be needed?
decay_mult: 0 # shouldn't be needed?
}
convolution_param {
num_output: 96
kernel_size: 11
stride: 4
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 20
kernel_size: 5
stride: 1
weight_filler { ##### STARTING HERE
type: "xavier"
}
bias_filler {
type: "constant"
} ##### Ending here. Is this part useful as the weights are not being trained here nor the biases.
}
string prototextPath = "";
cout << "\nEnter the prototext file (e.g lenet_solver-leveldb.prototxt)\n ";
getline(cin, prototextPath);
// parse solver parameters
string solver_prototxt = prototextPath;// "examples/mnist/lenet_solver-leveldb.prototxt";
caffe::SolverParameter solver_param;
caffe::ReadProtoFromTextFileOrDie(solver_prototxt, &solver_param);
// set device id and mode
Caffe::SetDevice(0);
Caffe::set_mode(Caffe::GPU);
// solver handler
caffe::shared_ptr<caffe::Solver<float>> solver(caffe::GetSolver<float>(solver_param));
// start solver
solver->Solve();
// get a testing image and display it
Mat img = imread(path);//(CAFFE_ROOT + "/examples/images/mnist_5.png");
cvtColor(img, img, CV_BGR2GRAY);
imshow("img", img);
waitKey(1);
// Load net
Net<float> net(prototextPath);//(CAFFE_ROOT + "/examples/mnist/lenet_test-memory-1.prototxt");
string model_file = modelPath;//CAFFE_ROOT + "/examples/mnist/lenet_iter_10000.caffemodel";
net.CopyTrainedLayersFrom(model_file);
// set the patch for testing
vector<Mat> patches;
patches.push_back(img);
// push vector<Mat> to data layer
float loss = 0.0;
boost::shared_ptr<MemoryDataLayer<float> > memory_data_layer;
memory_data_layer = boost::static_pointer_cast<MemoryDataLayer<float>>(net.layer_by_name("data"));
vector<int> labels(patches.size());
memory_data_layer->AddMatVector(patches, labels);
// Net forward
const vector<Blob<float>*> & results = net.ForwardPrefilled(&loss);
float *output = results[1]->mutable_cpu_data();
// Display the output
for (int i = 0; i < 10; i++) {
printf("Probability to be Number %d is %.3f\n", i, output[i]);
}
net = caffe.Net('prototxt', 'cafffemodel', caffe.TEST)
net.blobs['myblob'].data[...] = ...
net.forward()
# access all blobs you like to view results by net.blobs['blobname']
# and the current/trained layer parameters by net.params['layername']