with db_10img_encoded_label_systole.begin(write=True) as txn_img:
for label in systole_encode:
# label is an array of 600 integers.
datum = caffe.io.array_to_datum(np.expand_dims(np.expand_dims(label, axis=1), axis=1))
txn_img.put("{:0>10d}".format(systole_count),datum.SerializeToString())
systole_count+=1
name: "FCN"force_backward: truelayer { name: "data" type: "Data" top: "data" transform_param { mirror: false crop_size: 0 mean_value: 0 } data_param { source: "10_img_train_from_csv/" batch_size: 1 backend: LMDB }}layer { name: "label" type: "Data" top: "label" data_param { source: "db_10img_encoded_label_systole/" batch_size: 1 backend: LMDB }}layer { name: "conv1" type: "Convolution" bottom: "data" top: "conv1" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 100 pad: 0 kernel_size: 5 group: 1 stride: 1 weight_filler { type: "gaussian" mean: 0.0 std: 0.01 } bias_filler { type: "constant" value: 0.1 } }}layer { name: "relu1" type: "ReLU" bottom: "conv1" top: "conv1"}layer { name: "conv2" type: "Convolution" bottom: "conv1" top: "conv2" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 200 pad: 0 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "gaussian" mean: 0.0 std: 0.01 } bias_filler { type: "constant" value: 0.1 } }}layer { name: "relu2" type: "ReLU" bottom: "conv2" top: "conv2"}layer { name: "pool2" type: "Pooling" bottom: "conv2" top: "pool2" pooling_param { pool: MAX kernel_size: 2 stride: 2 }}layer { name: "conv3" type: "Convolution" bottom: "pool2" top: "conv3" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 300 pad: 0 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "gaussian" mean: 0.0 std: 0.01 } bias_filler { type: "constant" value: 0.1 } }}layer { name: "relu3" type: "ReLU" bottom: "conv3" top: "conv3"}layer { name: "conv4" type: "Convolution" bottom: "conv3" top: "conv4" param { lr_mult: 1 decay_mult: 1 } param { lr_mult: 2 decay_mult: 0 } convolution_param { num_output: 300 pad: 0 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "gaussian" mean: 0.0 std: 0.01 } bias_filler { type: "constant" value: 0.1 } }}layer { name: "relu4" type: "ReLU" bottom: "conv4" top: "conv4"}layer { name: "pool4" type: "Pooling" bottom: "conv4" top: "pool4" pooling_param { pool: MAX kernel_size: 2 stride: 2 }}layer { name: "drop4" type: "Dropout" bottom: "pool4" top: "pool4" dropout_param { dropout_ratio: 0.5 }}layer { name: "fcc5" type: "InnerProduct" bottom: "pool4" top: "fcc5" inner_product_param { num_output: 1000 }}layer { name: "relu5" type: "ReLU" bottom: "fcc5" top: "fcc5"}layer { name: "fcc6" type: "InnerProduct" bottom: "fcc5" top: "fcc6" inner_product_param { num_output: 600 }}layer { name: "loss" type: "SoftmaxWithLoss" bottom: "fcc6" bottom: "label" top: "loss" loss_param { normalize: true }}
softmax_loss_layer.cpp:42] Check failed: outer_num_ * inner_num_ == bottom[1]->count() (1 vs. 600) Number of labels must match number of predictions; e.g., if softmax axis == 1 and prediction shape is (N, C, H, W), label count (number of labels) must be N*H*W, with integer values in {0, 1, ..., C-1}.