Check failed: data_

261 views
Skip to first unread message

Ali MassahKiani

unread,
Jun 9, 2017, 6:43:58 AM6/9/17
to Caffe Users
when i want run caffe exception occur. caffe output:
I0609 15:11:37.034447 10992 caffe.cpp:219] Using GPUs 0
I0609
15:11:37.660609 10992 caffe.cpp:224] GPU 0: GeForce GTX 960M
I0609
15:11:38.004349 10992 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0609
15:11:38.035588 10992 solver.cpp:44] Initializing solver from parameters:
test_iter
: 18
test_interval
: 500
base_lr
: 0.0015
display
: 500
max_iter
: 200000
lr_policy
: "step"
gamma
: 0.95
momentum
: 0.9
weight_decay
: 0.0005
stepsize
: 5000
snapshot
: 5000
snapshot_prefix
: "D:/Deep/eye/snapShot/eye"
solver_mode
: GPU
device_id
: 0
net
: "D:/Deep/eye/caffe/eye_network.prototxt"
train_state
{
  level
: 0
  stage
: ""
}
type
: "AdaDelta"
I0609
15:11:38.035588 10992 solver.cpp:87] Creating training net from net file: D:/Deep/eye/caffe/eye_network.prototxt
I0609
15:11:38.035588 10992 net.cpp:294] The NetState phase (0) differed from the phase (1) specified by a rule in layer data
I0609
15:11:38.035588 10992 net.cpp:294] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy
I0609
15:11:38.035588 10992 net.cpp:51] Initializing net from parameters:
name
: "CaffeNet"
state
{
  phase
: TRAIN
  level
: 0
  stage
: ""
}
layer
{
  name
: "data"
  type
: "Data"
  top
: "data"
  top
: "label"
  include
{
    phase
: TRAIN
 
}
  transform_param
{
    mirror
: true
    mean_file
: "D:/Deep/eye/MeanFiles/mean.binaryproto"
 
}
  data_param
{
    source
: "D:/Deep/eye/DataBases/train"
    batch_size
: 64
    backend
: LMDB
 
}
}
layer
{
  name
: "conv1"
  type
: "Convolution"
  bottom
: "data"
  top
: "conv1"
  param
{
    lr_mult
: 1
    decay_mult
: 1
 
}
  param
{
    lr_mult
: 2
    decay_mult
: 0
 
}
  convolution_param
{
    num_output
: 96
    kernel_size
: 11
    stride
: 4
    weight_filler
{
      type
: "gaussian"
      std
: 0.01
   
}
    bias_filler
{
      type
: "constant"
      value
: 0
   
}
 
}
}
layer
{
  name
: "relu1"
  type
: "ReLU"
  bottom
: "conv1"
  top
: "conv1"
}
layer
{
  name
: "pool1"
  type
: "Pooling"
  bottom
: "conv1"
  top
: "pool1"
  pooling_param
{
    pool
: MAX
    kernel_size
: 3
    stride
: 2
 
}
}
layer
{
  name
: "norm1"
  type
: "LRN"
  bottom
: "pool1"
  top
: "norm1"
  lrn_param
{
    local_size
: 5
    alpha
: 0.0001
    beta
: 0.75
 
}
}
layer
{
  name
: "conv2"
  type
: "Convolution"
  bottom
: "norm1"
  top
: "conv2"
  param
{
    lr_mult
: 1
    decay_mult
: 1
 
}
  param
{
    lr_mult
: 2
    decay_mult
: 0
 
}
  convolution_param
{
    num_output
: 256
    pad
: 2
    kernel_size
: 5
   
group: 2
    weight_filler
{
      type
: "gaussian"
      std
: 0.01
   
}
    bias_filler
{
      type
: "constant"
      value
: 1
   
}
 
}
}
layer
{
  name
: "relu2"
  type
: "ReLU"
  bottom
: "conv2"
  top
: "conv2"
}
layer
{
  name
: "pool2"
  type
: "Pooling"
  bottom
: "conv2"
  top
: "pool2"
  pooling_param
{
    pool
: MAX
    kernel_size
: 3
    stride
: 2
 
}
}
layer
{
  name
: "norm2"
  type
: "LRN"
  bottom
: "pool2"
  top
: "norm2"
  lrn_param
{
    local_size
: 5
    alpha
: 0.0001
    beta
: 0.75
 
}
}
layer
{
  name
: "conv3"
  type
: "Convolution"
  bottom
: "norm2"
  top
: "conv3"
  param
{
    lr_mult
: 1
    decay_mult
: 1
 
}
  param
{
    lr_mult
: 2
    decay_mult
: 0
 
}
  convolution_param
{
    num_output
: 384
    pad
: 1
    kernel_size
: 3
    weight_filler
{
      type
: "gaussian"
      std
: 0.01
   
}
    bias_filler
{
      type
: "constant"
      value
: 0
   
}
 
}
}
layer
{
  name
: "relu3"
  type
: "ReLU"
  bottom
: "conv3"
  top
: "conv3"
}
layer
{
  name
: "conv4"
  type
: "Convolution"
  bottom
: "conv3"
  top
: "conv4"
  param
{
    lr_mult
: 1
    decay_mult
: 1
 
}
  param
{
    lr_mult
: 2
    decay_mult
: 0
 
}
  convolution_param
{
    num_output
: 384
    pad
: 1
    kernel_size
: 3
   
group: 2
    weight_filler
{
      type
: "gaussian"
      std
: 0.01
   
}
    bias_filler
{
      type
: "constant"
      value
: 1
   
}
 
}
}
layer
{
  name
: "relu4"
  type
: "ReLU"
  bottom
: "conv4"
  top
: "conv4"
}
layer
{
  name
: "conv5"
  type
: "Convolution"
  bottom
: "conv4"
  top
: "conv5"
  param
{
    lr_mult
: 1
    decay_mult
: 1
 
}
  param
{
    lr_mult
: 2
    decay_mult
: 0
 
}
  convolution_param
{
    num_output
: 256
    pad
: 1
    kernel_size
: 3
   
group: 2
    weight_filler
{
      type
: "gaussian"
      std
: 0.01
   
}
    bias_filler
{
      type
: "constant"
      value
: 1
   
}
 
}
}
layer
{
  name
: "relu5"
  type
: "ReLU"
  bottom
: "conv5"
  top
: "conv5"
}
layer
{
  name
: "pool5"
  type
: "Pooling"
  bottom
: "conv5"
  top
: "pool5"
  pooling_param
{
    pool
: MAX
    kernel_size
: 3
    stride
: 2
 
}
}
layer
{
  name
: "fc6"
  type
: "InnerProduct"
  bottom
: "pool5"
  top
: "fc6"
  param
{
    lr_mult
: 1
    decay_mult
: 1
 
}
  param
{
    lr_mult
: 2
    decay_mult
: 0
 
}
  inner_product_param
{
    num_output
: 4096
    weight_filler
{
      type
: "gaussian"
      std
: 0.005
   
}
    bias_filler
{
      type
: "constant"
      value
: 1
   
}
 
}
}
layer
{
  name
: "relu6"
  type
: "ReLU"
  bottom
: "fc6"
  top
: "fc6"
}
layer
{
  name
: "drop6"
  type
: "Dropout"
  bottom
: "fc6"
  top
: "fc6"
  dropout_param
{
    dropout_ratio
: 0.5
 
}
}
layer
{
  name
: "fc7"
  type
: "InnerProduct"
  bottom
: "fc6"
  top
: "fc7"
  param
{
    lr_mult
: 1
    decay_mult
: 1
 
}
  param
{
    lr_mult
: 2
    decay_mult
: 0
 
}
  inner_product_param
{
    num_output
: 4096
    weight_filler
{
      type
: "gaussian"
      std
: 0.005
   
}
    bias_filler
{
      type
: "constant"
      value
: 1
   
}
 
}
}
layer
{
  name
: "relu7"
  type
: "ReLU"
  bottom
: "fc7"
  top
: "fc7"
}
layer
{
  name
: "drop7"
  type
: "Dropout"
  bottom
: "fc7"
  top
: "fc7"
  dropout_param
{
    dropout_ratio
: 0.5
 
}
}
layer
{
  name
: "fc8"
  type
: "InnerProduct"
  bottom
: "fc7"
  top
: "fc8"
  param
{
    lr_mult
: 1
    decay_mult
: 1
 
}
  param
{
    lr_mult
: 2
    decay_mult
: 0
 
}
  inner_product_param
{
    num_output
: 2
    weight_filler
{
      type
: "gaussian"
      std
: 0.01
   
}
    bias_filler
{
      type
: "constant"
      value
: 0
   
}
 
}
}
layer
{
  name
: "loss"
  type
: "SoftmaxWithLoss"
  bottom
: "fc8"
  bottom
: "label"
  top
: "loss"
}
I0609
15:11:38.035588 10992 layer_factory.cpp:66] Creating layer data
I0609
15:11:38.035588 10992 db_lmdb.cpp:40] Opened lmdb D:/Deep/eye/DataBases/train
I0609
15:11:38.035588 10992 net.cpp:84] Creating Layer data
I0609
15:11:38.035588 10992 net.cpp:380] data -> data
I0609
15:11:38.035588 10992 net.cpp:380] data -> label
I0609
15:11:38.035588 10992 data_transformer.cpp:25] Loading mean file from: D:/Deep/eye/MeanFiles/mean.binaryproto
I0609
15:11:38.051213 10992 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0609
15:11:38.051213 10992 data_layer.cpp:45] output data size: 64,3,24,24
I0609
15:11:38.066792 10992 net.cpp:122] Setting up data
I0609
15:11:38.066792 10992 net.cpp:129] Top shape: 64 3 24 24 (110592)
I0609
15:11:38.066792 10992 net.cpp:129] Top shape: 64 (64)
I0609
15:11:38.066792 10992 net.cpp:137] Memory required for data: 442624
I0609
15:11:38.066792 10992 layer_factory.cpp:66] Creating layer conv1
I0609
15:11:38.066792 12248 common.cpp:36] System entropy source not available, using fallback algorithm to generate seed instead.
I0609
15:11:38.066792 10992 net.cpp:84] Creating Layer conv1
I0609
15:11:38.066792 10992 net.cpp:406] conv1 <- data
I0609
15:11:38.066792 10992 net.cpp:380] conv1 -> conv1
I0609
15:11:38.066792 10992 net.cpp:122] Setting up conv1
I0609
15:11:38.082458 10992 net.cpp:129] Top shape: 64 96 4 4 (98304)
I0609
15:11:38.082458 10992 net.cpp:137] Memory required for data: 835840
I0609
15:11:38.082458 10992 layer_factory.cpp:66] Creating layer relu1
I0609
15:11:38.098103 10992 net.cpp:84] Creating Layer relu1
I0609
15:11:38.098103 10992 net.cpp:406] relu1 <- conv1
I0609
15:11:38.098103 10992 net.cpp:367] relu1 -> conv1 (in-place)
I0609
15:11:38.098103 10992 net.cpp:122] Setting up relu1
I0609
15:11:38.098103 10992 net.cpp:129] Top shape: 64 96 4 4 (98304)
I0609
15:11:38.098103 10992 net.cpp:137] Memory required for data: 1229056
I0609
15:11:38.098103 10992 layer_factory.cpp:66] Creating layer pool1
I0609
15:11:38.098103 10992 net.cpp:84] Creating Layer pool1
I0609
15:11:38.098103 10992 net.cpp:406] pool1 <- conv1
I0609
15:11:38.098103 10992 net.cpp:380] pool1 -> pool1
I0609
15:11:38.098103 10992 net.cpp:122] Setting up pool1
I0609
15:11:38.098103 10992 net.cpp:129] Top shape: 64 96 2 2 (24576)
I0609
15:11:38.098103 10992 net.cpp:137] Memory required for data: 1327360
I0609
15:11:38.098103 10992 layer_factory.cpp:66] Creating layer norm1
I0609
15:11:38.098103 10992 net.cpp:84] Creating Layer norm1
I0609
15:11:38.098103 10992 net.cpp:406] norm1 <- pool1
I0609
15:11:38.098103 10992 net.cpp:380] norm1 -> norm1
I0609
15:11:38.113674 10992 net.cpp:122] Setting up norm1
I0609
15:11:38.113674 10992 net.cpp:129] Top shape: 64 96 2 2 (24576)
I0609
15:11:38.113674 10992 net.cpp:137] Memory required for data: 1425664
I0609
15:11:38.113674 10992 layer_factory.cpp:66] Creating layer conv2
I0609
15:11:38.129328 10992 net.cpp:84] Creating Layer conv2
I0609
15:11:38.129328 10992 net.cpp:406] conv2 <- norm1
I0609
15:11:38.129328 10992 net.cpp:380] conv2 -> conv2
I0609
15:11:38.129328 10992 net.cpp:122] Setting up conv2
I0609
15:11:38.129328 10992 net.cpp:129] Top shape: 64 256 2 2 (65536)
I0609
15:11:38.129328 10992 net.cpp:137] Memory required for data: 1687808
I0609
15:11:38.129328 10992 layer_factory.cpp:66] Creating layer relu2
I0609
15:11:38.129328 10992 net.cpp:84] Creating Layer relu2
I0609
15:11:38.129328 10992 net.cpp:406] relu2 <- conv2
I0609
15:11:38.129328 10992 net.cpp:367] relu2 -> conv2 (in-place)
I0609
15:11:38.129328 10992 net.cpp:122] Setting up relu2
I0609
15:11:38.144934 10992 net.cpp:129] Top shape: 64 256 2 2 (65536)
I0609
15:11:38.144934 10992 net.cpp:137] Memory required for data: 1949952
I0609
15:11:38.144934 10992 layer_factory.cpp:66] Creating layer pool2
I0609
15:11:38.144934 10992 net.cpp:84] Creating Layer pool2
I0609
15:11:38.160554 10992 net.cpp:406] pool2 <- conv2
I0609
15:11:38.160554 10992 net.cpp:380] pool2 -> pool2
I0609
15:11:38.160554 10992 net.cpp:122] Setting up pool2
I0609
15:11:38.160554 10992 net.cpp:129] Top shape: 64 256 1 1 (16384)
I0609
15:11:38.160554 10992 net.cpp:137] Memory required for data: 2015488
I0609
15:11:38.160554 10992 layer_factory.cpp:66] Creating layer norm2
I0609
15:11:38.160554 10992 net.cpp:84] Creating Layer norm2
I0609
15:11:38.160554 10992 net.cpp:406] norm2 <- pool2
I0609
15:11:38.160554 10992 net.cpp:380] norm2 -> norm2
I0609
15:11:38.160554 10992 net.cpp:122] Setting up norm2
I0609
15:11:38.160554 10992 net.cpp:129] Top shape: 64 256 1 1 (16384)
I0609
15:11:38.176208 10992 net.cpp:137] Memory required for data: 2081024
I0609
15:11:38.176208 10992 layer_factory.cpp:66] Creating layer conv3
I0609
15:11:38.191799 10992 net.cpp:84] Creating Layer conv3
I0609
15:11:38.191799 10992 net.cpp:406] conv3 <- norm2
I0609
15:11:38.191799 10992 net.cpp:380] conv3 -> conv3
I0609
15:11:38.191799 10992 net.cpp:122] Setting up conv3
I0609
15:11:38.191799 10992 net.cpp:129] Top shape: 64 384 1 1 (24576)
I0609
15:11:38.207413 10992 net.cpp:137] Memory required for data: 2179328
I0609
15:11:38.207413 10992 layer_factory.cpp:66] Creating layer relu3
I0609
15:11:38.207413 10992 net.cpp:84] Creating Layer relu3
I0609
15:11:38.223059 10992 net.cpp:406] relu3 <- conv3
I0609
15:11:38.223059 10992 net.cpp:367] relu3 -> conv3 (in-place)
I0609
15:11:38.223059 10992 net.cpp:122] Setting up relu3
I0609
15:11:38.223059 10992 net.cpp:129] Top shape: 64 384 1 1 (24576)
I0609
15:11:38.223059 10992 net.cpp:137] Memory required for data: 2277632
I0609
15:11:38.223059 10992 layer_factory.cpp:66] Creating layer conv4
I0609
15:11:38.223059 10992 net.cpp:84] Creating Layer conv4
I0609
15:11:38.223059 10992 net.cpp:406] conv4 <- conv3
I0609
15:11:38.223059 10992 net.cpp:380] conv4 -> conv4
I0609
15:11:38.238728 10992 net.cpp:122] Setting up conv4
I0609
15:11:38.238728 10992 net.cpp:129] Top shape: 64 384 1 1 (24576)
I0609
15:11:38.238728 10992 net.cpp:137] Memory required for data: 2375936
I0609
15:11:38.238728 10992 layer_factory.cpp:66] Creating layer relu4
I0609
15:11:38.254323 10992 net.cpp:84] Creating Layer relu4
I0609
15:11:38.254323 10992 net.cpp:406] relu4 <- conv4
I0609
15:11:38.254323 10992 net.cpp:367] relu4 -> conv4 (in-place)
I0609
15:11:38.254323 10992 net.cpp:122] Setting up relu4
I0609
15:11:38.254323 10992 net.cpp:129] Top shape: 64 384 1 1 (24576)
I0609
15:11:38.254323 10992 net.cpp:137] Memory required for data: 2474240
I0609
15:11:38.254323 10992 layer_factory.cpp:66] Creating layer conv5
I0609
15:11:38.254323 10992 net.cpp:84] Creating Layer conv5
I0609
15:11:38.254323 10992 net.cpp:406] conv5 <- conv4
I0609
15:11:38.254323 10992 net.cpp:380] conv5 -> conv5
I0609
15:11:38.269917 10992 net.cpp:122] Setting up conv5
I0609
15:11:38.269917 10992 net.cpp:129] Top shape: 64 256 1 1 (16384)
I0609
15:11:38.269917 10992 net.cpp:137] Memory required for data: 2539776
I0609
15:11:38.269917 10992 layer_factory.cpp:66] Creating layer relu5
I0609
15:11:38.269917 10992 net.cpp:84] Creating Layer relu5
I0609
15:11:38.285559 10992 net.cpp:406] relu5 <- conv5
I0609
15:11:38.285559 10992 net.cpp:367] relu5 -> conv5 (in-place)
I0609
15:11:38.285559 10992 net.cpp:122] Setting up relu5
I0609
15:11:38.285559 10992 net.cpp:129] Top shape: 64 256 1 1 (16384)
I0609
15:11:38.285559 10992 net.cpp:137] Memory required for data: 2605312
I0609
15:11:38.285559 10992 layer_factory.cpp:66] Creating layer pool5
I0609
15:11:38.285559 10992 net.cpp:84] Creating Layer pool5
I0609
15:11:38.285559 10992 net.cpp:406] pool5 <- conv5
I0609
15:11:38.285559 10992 net.cpp:380] pool5 -> pool5
I0609
15:11:38.285559 10992 net.cpp:122] Setting up pool5
I0609
15:11:38.285559 10992 net.cpp:129] Top shape: 64 256 0 0 (0)
I0609
15:11:38.285559 10992 net.cpp:137] Memory required for data: 2605312
I0609
15:11:38.285559 10992 layer_factory.cpp:66] Creating layer fc6
I0609
15:11:38.301179 10992 net.cpp:84] Creating Layer fc6
I0609
15:11:38.301179 10992 net.cpp:406] fc6 <- pool5
I0609
15:11:38.301179 10992 net.cpp:380] fc6 -> fc6
F0609
15:11:38.316798 10992 blob.cpp:133] Check failed: data_
*** Check failure stack trace: ***

Reply all
Reply to author
Forward
0 new messages