Hi everyone,
I'm writing a new player (
MsgPassLayer) which functions like the
convolutional (conv.) layer and test it using the MNIST environment. This new layerer just init the convolutional parameters and calling convolution forward and backward.
However, the it is run using MNIST example, the accuracy is quite low. I don't know how to break the code down to detect the error.
Hope to get your support.
Following is the implementation of the new layer.
template <typename Dtype>
void MsgPassLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
CHECK_EQ(4, bottom[0]->num_axes()) << "Input must have 4 axes, "
<< "corresponding to (num, channels, height, width)";
// Conv layer configuration
conv_bottom_vec_.clear();
conv_bottom_vec_.push_back(bottom[0]);
conv_top_vec_.clear();
//conv_top_vec_.push_back(&out_conv_);
conv_top_vec_.push_back(top[0]);
LayerParameter conv_param;
conv_param.mutable_convolution_param()->set_num_output(50);
conv_param.mutable_convolution_param()->set_kernel_size(5);
conv_param.mutable_convolution_param()->set_stride(1);
FillerParameter* weight_filler_ptr = new FillerParameter();
weight_filler_ptr->set_type("xavier");
conv_param.mutable_convolution_param()->set_allocated_weight_filler(weight_filler_ptr);
FillerParameter* bias_filler_ptr= new FillerParameter();
bias_filler_ptr->set_type("constant");
conv_param.mutable_convolution_param()->set_allocated_bias_filler(bias_filler_ptr);
conv_layer_.reset(new ConvolutionLayer<Dtype>(conv_param));
conv_layer_->SetUp(conv_bottom_vec_, conv_top_vec_);
}
template <typename Dtype>
void MsgPassLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
conv_layer_->Forward(conv_bottom_vec_, conv_top_vec_);
}
template <typename Dtype>
void MsgPassLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {};
template <typename Dtype>
void MsgPassLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
conv_layer_->Backward(conv_top_vec_, propagate_down, conv_bottom_vec_);
}
INSTANTIATE_CLASS(MsgPassLayer);
REGISTER_LAYER_CLASS(MsgPass);