DIGITS-CNN/cars/architecture-investigations/fc/1-layer/4096/caffe_output.log

4439 lines
356 KiB
Plaintext
Raw Normal View History

2021-04-10 12:20:26 +01:00
I0409 21:17:28.075495 25006 upgrade_proto.cpp:1082] Attempting to upgrade input file specified using deprecated 'solver_type' field (enum)': /mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210409-204921-7cb9/solver.prototxt
I0409 21:17:28.075667 25006 upgrade_proto.cpp:1089] Successfully upgraded file specified using deprecated 'solver_type' field (enum) to 'type' field (string).
W0409 21:17:28.075675 25006 upgrade_proto.cpp:1091] Note that future Caffe releases will only support 'type' field (string) for a solver's type.
I0409 21:17:28.075744 25006 caffe.cpp:218] Using GPUs 3
I0409 21:17:28.104261 25006 caffe.cpp:223] GPU 3: GeForce GTX 1080 Ti
I0409 21:17:28.376274 25006 solver.cpp:44] Initializing solver from parameters:
test_iter: 51
test_interval: 102
base_lr: 0.01
display: 12
max_iter: 10200
lr_policy: "exp"
gamma: 0.99980193
momentum: 0.9
weight_decay: 0.0001
snapshot: 102
snapshot_prefix: "snapshot"
solver_mode: GPU
device_id: 3
net: "train_val.prototxt"
train_state {
level: 0
stage: ""
}
type: "SGD"
I0409 21:17:28.377038 25006 solver.cpp:87] Creating training net from net file: train_val.prototxt
I0409 21:17:28.377640 25006 net.cpp:294] The NetState phase (0) differed from the phase (1) specified by a rule in layer val-data
I0409 21:17:28.377660 25006 net.cpp:294] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy
I0409 21:17:28.377806 25006 net.cpp:51] Initializing net from parameters:
state {
phase: TRAIN
level: 0
stage: ""
}
layer {
name: "train-data"
type: "Data"
top: "data"
top: "label"
include {
phase: TRAIN
}
transform_param {
mirror: true
crop_size: 227
mean_file: "/mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210407-214532-d396/mean.binaryproto"
}
data_param {
source: "/mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210407-214532-d396/train_db"
batch_size: 128
backend: LMDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 96
kernel_size: 11
stride: 4
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "norm1"
type: "LRN"
bottom: "conv1"
top: "norm1"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "norm1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 2
kernel_size: 5
group: 2
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0.1
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "norm2"
type: "LRN"
bottom: "conv2"
top: "norm2"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "norm2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "conv4"
type: "Convolution"
bottom: "conv3"
top: "conv4"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
group: 2
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0.1
}
}
}
layer {
name: "relu4"
type: "ReLU"
bottom: "conv4"
top: "conv4"
}
layer {
name: "conv5"
type: "Convolution"
bottom: "conv4"
top: "conv5"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
group: 2
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0.1
}
}
}
layer {
name: "relu5"
type: "ReLU"
bottom: "conv5"
top: "conv5"
}
layer {
name: "pool5"
type: "Pooling"
bottom: "conv5"
top: "pool5"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "fc6"
type: "InnerProduct"
bottom: "pool5"
top: "fc6"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 4096
weight_filler {
type: "gaussian"
std: 0.005
}
bias_filler {
type: "constant"
value: 0.1
}
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6"
top: "fc6"
}
layer {
name: "drop6"
type: "Dropout"
bottom: "fc6"
top: "fc6"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc8"
type: "InnerProduct"
bottom: "fc6"
top: "fc8"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 196
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "fc8"
bottom: "label"
top: "loss"
}
I0409 21:17:28.377900 25006 layer_factory.hpp:77] Creating layer train-data
I0409 21:17:28.379285 25006 db_lmdb.cpp:35] Opened lmdb /mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210407-214532-d396/train_db
I0409 21:17:28.379494 25006 net.cpp:84] Creating Layer train-data
I0409 21:17:28.379506 25006 net.cpp:380] train-data -> data
I0409 21:17:28.379528 25006 net.cpp:380] train-data -> label
I0409 21:17:28.379540 25006 data_transformer.cpp:25] Loading mean file from: /mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210407-214532-d396/mean.binaryproto
I0409 21:17:28.384963 25006 data_layer.cpp:45] output data size: 128,3,227,227
I0409 21:17:28.508553 25006 net.cpp:122] Setting up train-data
I0409 21:17:28.508576 25006 net.cpp:129] Top shape: 128 3 227 227 (19787136)
I0409 21:17:28.508581 25006 net.cpp:129] Top shape: 128 (128)
I0409 21:17:28.508585 25006 net.cpp:137] Memory required for data: 79149056
I0409 21:17:28.508595 25006 layer_factory.hpp:77] Creating layer conv1
I0409 21:17:28.508615 25006 net.cpp:84] Creating Layer conv1
I0409 21:17:28.508622 25006 net.cpp:406] conv1 <- data
I0409 21:17:28.508635 25006 net.cpp:380] conv1 -> conv1
I0409 21:17:29.065069 25006 net.cpp:122] Setting up conv1
I0409 21:17:29.065093 25006 net.cpp:129] Top shape: 128 96 55 55 (37171200)
I0409 21:17:29.065097 25006 net.cpp:137] Memory required for data: 227833856
I0409 21:17:29.065116 25006 layer_factory.hpp:77] Creating layer relu1
I0409 21:17:29.065127 25006 net.cpp:84] Creating Layer relu1
I0409 21:17:29.065132 25006 net.cpp:406] relu1 <- conv1
I0409 21:17:29.065140 25006 net.cpp:367] relu1 -> conv1 (in-place)
I0409 21:17:29.065425 25006 net.cpp:122] Setting up relu1
I0409 21:17:29.065434 25006 net.cpp:129] Top shape: 128 96 55 55 (37171200)
I0409 21:17:29.065439 25006 net.cpp:137] Memory required for data: 376518656
I0409 21:17:29.065443 25006 layer_factory.hpp:77] Creating layer norm1
I0409 21:17:29.065452 25006 net.cpp:84] Creating Layer norm1
I0409 21:17:29.065457 25006 net.cpp:406] norm1 <- conv1
I0409 21:17:29.065464 25006 net.cpp:380] norm1 -> norm1
I0409 21:17:29.065902 25006 net.cpp:122] Setting up norm1
I0409 21:17:29.065913 25006 net.cpp:129] Top shape: 128 96 55 55 (37171200)
I0409 21:17:29.065917 25006 net.cpp:137] Memory required for data: 525203456
I0409 21:17:29.065922 25006 layer_factory.hpp:77] Creating layer pool1
I0409 21:17:29.065930 25006 net.cpp:84] Creating Layer pool1
I0409 21:17:29.065934 25006 net.cpp:406] pool1 <- norm1
I0409 21:17:29.065940 25006 net.cpp:380] pool1 -> pool1
I0409 21:17:29.066011 25006 net.cpp:122] Setting up pool1
I0409 21:17:29.066018 25006 net.cpp:129] Top shape: 128 96 27 27 (8957952)
I0409 21:17:29.066023 25006 net.cpp:137] Memory required for data: 561035264
I0409 21:17:29.066027 25006 layer_factory.hpp:77] Creating layer conv2
I0409 21:17:29.066038 25006 net.cpp:84] Creating Layer conv2
I0409 21:17:29.066042 25006 net.cpp:406] conv2 <- pool1
I0409 21:17:29.066048 25006 net.cpp:380] conv2 -> conv2
I0409 21:17:29.074885 25006 net.cpp:122] Setting up conv2
I0409 21:17:29.074903 25006 net.cpp:129] Top shape: 128 256 27 27 (23887872)
I0409 21:17:29.074906 25006 net.cpp:137] Memory required for data: 656586752
I0409 21:17:29.074918 25006 layer_factory.hpp:77] Creating layer relu2
I0409 21:17:29.074926 25006 net.cpp:84] Creating Layer relu2
I0409 21:17:29.074931 25006 net.cpp:406] relu2 <- conv2
I0409 21:17:29.074939 25006 net.cpp:367] relu2 -> conv2 (in-place)
I0409 21:17:29.076439 25006 net.cpp:122] Setting up relu2
I0409 21:17:29.076449 25006 net.cpp:129] Top shape: 128 256 27 27 (23887872)
I0409 21:17:29.076452 25006 net.cpp:137] Memory required for data: 752138240
I0409 21:17:29.076457 25006 layer_factory.hpp:77] Creating layer norm2
I0409 21:17:29.076464 25006 net.cpp:84] Creating Layer norm2
I0409 21:17:29.076468 25006 net.cpp:406] norm2 <- conv2
I0409 21:17:29.076473 25006 net.cpp:380] norm2 -> norm2
I0409 21:17:29.076766 25006 net.cpp:122] Setting up norm2
I0409 21:17:29.076773 25006 net.cpp:129] Top shape: 128 256 27 27 (23887872)
I0409 21:17:29.076777 25006 net.cpp:137] Memory required for data: 847689728
I0409 21:17:29.076781 25006 layer_factory.hpp:77] Creating layer pool2
I0409 21:17:29.076789 25006 net.cpp:84] Creating Layer pool2
I0409 21:17:29.076793 25006 net.cpp:406] pool2 <- norm2
I0409 21:17:29.076798 25006 net.cpp:380] pool2 -> pool2
I0409 21:17:29.076824 25006 net.cpp:122] Setting up pool2
I0409 21:17:29.076830 25006 net.cpp:129] Top shape: 128 256 13 13 (5537792)
I0409 21:17:29.076833 25006 net.cpp:137] Memory required for data: 869840896
I0409 21:17:29.076838 25006 layer_factory.hpp:77] Creating layer conv3
I0409 21:17:29.076846 25006 net.cpp:84] Creating Layer conv3
I0409 21:17:29.076850 25006 net.cpp:406] conv3 <- pool2
I0409 21:17:29.076855 25006 net.cpp:380] conv3 -> conv3
I0409 21:17:29.086712 25006 net.cpp:122] Setting up conv3
I0409 21:17:29.086728 25006 net.cpp:129] Top shape: 128 384 13 13 (8306688)
I0409 21:17:29.086733 25006 net.cpp:137] Memory required for data: 903067648
I0409 21:17:29.086743 25006 layer_factory.hpp:77] Creating layer relu3
I0409 21:17:29.086752 25006 net.cpp:84] Creating Layer relu3
I0409 21:17:29.086757 25006 net.cpp:406] relu3 <- conv3
I0409 21:17:29.086763 25006 net.cpp:367] relu3 -> conv3 (in-place)
I0409 21:17:29.087239 25006 net.cpp:122] Setting up relu3
I0409 21:17:29.087249 25006 net.cpp:129] Top shape: 128 384 13 13 (8306688)
I0409 21:17:29.087253 25006 net.cpp:137] Memory required for data: 936294400
I0409 21:17:29.087257 25006 layer_factory.hpp:77] Creating layer conv4
I0409 21:17:29.087270 25006 net.cpp:84] Creating Layer conv4
I0409 21:17:29.087275 25006 net.cpp:406] conv4 <- conv3
I0409 21:17:29.087281 25006 net.cpp:380] conv4 -> conv4
I0409 21:17:29.097532 25006 net.cpp:122] Setting up conv4
I0409 21:17:29.097548 25006 net.cpp:129] Top shape: 128 384 13 13 (8306688)
I0409 21:17:29.097551 25006 net.cpp:137] Memory required for data: 969521152
I0409 21:17:29.097560 25006 layer_factory.hpp:77] Creating layer relu4
I0409 21:17:29.097568 25006 net.cpp:84] Creating Layer relu4
I0409 21:17:29.097573 25006 net.cpp:406] relu4 <- conv4
I0409 21:17:29.097580 25006 net.cpp:367] relu4 -> conv4 (in-place)
I0409 21:17:29.097918 25006 net.cpp:122] Setting up relu4
I0409 21:17:29.097926 25006 net.cpp:129] Top shape: 128 384 13 13 (8306688)
I0409 21:17:29.097930 25006 net.cpp:137] Memory required for data: 1002747904
I0409 21:17:29.097935 25006 layer_factory.hpp:77] Creating layer conv5
I0409 21:17:29.097946 25006 net.cpp:84] Creating Layer conv5
I0409 21:17:29.097950 25006 net.cpp:406] conv5 <- conv4
I0409 21:17:29.097990 25006 net.cpp:380] conv5 -> conv5
I0409 21:17:29.106262 25006 net.cpp:122] Setting up conv5
I0409 21:17:29.106277 25006 net.cpp:129] Top shape: 128 256 13 13 (5537792)
I0409 21:17:29.106281 25006 net.cpp:137] Memory required for data: 1024899072
I0409 21:17:29.106294 25006 layer_factory.hpp:77] Creating layer relu5
I0409 21:17:29.106302 25006 net.cpp:84] Creating Layer relu5
I0409 21:17:29.106307 25006 net.cpp:406] relu5 <- conv5
I0409 21:17:29.106314 25006 net.cpp:367] relu5 -> conv5 (in-place)
I0409 21:17:29.106789 25006 net.cpp:122] Setting up relu5
I0409 21:17:29.106798 25006 net.cpp:129] Top shape: 128 256 13 13 (5537792)
I0409 21:17:29.106802 25006 net.cpp:137] Memory required for data: 1047050240
I0409 21:17:29.106806 25006 layer_factory.hpp:77] Creating layer pool5
I0409 21:17:29.106815 25006 net.cpp:84] Creating Layer pool5
I0409 21:17:29.106819 25006 net.cpp:406] pool5 <- conv5
I0409 21:17:29.106827 25006 net.cpp:380] pool5 -> pool5
I0409 21:17:29.106864 25006 net.cpp:122] Setting up pool5
I0409 21:17:29.106870 25006 net.cpp:129] Top shape: 128 256 6 6 (1179648)
I0409 21:17:29.106874 25006 net.cpp:137] Memory required for data: 1051768832
I0409 21:17:29.106878 25006 layer_factory.hpp:77] Creating layer fc6
I0409 21:17:29.106889 25006 net.cpp:84] Creating Layer fc6
I0409 21:17:29.106892 25006 net.cpp:406] fc6 <- pool5
I0409 21:17:29.106899 25006 net.cpp:380] fc6 -> fc6
I0409 21:17:29.460299 25006 net.cpp:122] Setting up fc6
I0409 21:17:29.460322 25006 net.cpp:129] Top shape: 128 4096 (524288)
I0409 21:17:29.460326 25006 net.cpp:137] Memory required for data: 1053865984
I0409 21:17:29.460336 25006 layer_factory.hpp:77] Creating layer relu6
I0409 21:17:29.460345 25006 net.cpp:84] Creating Layer relu6
I0409 21:17:29.460351 25006 net.cpp:406] relu6 <- fc6
I0409 21:17:29.460358 25006 net.cpp:367] relu6 -> fc6 (in-place)
I0409 21:17:29.460968 25006 net.cpp:122] Setting up relu6
I0409 21:17:29.460978 25006 net.cpp:129] Top shape: 128 4096 (524288)
I0409 21:17:29.460983 25006 net.cpp:137] Memory required for data: 1055963136
I0409 21:17:29.460988 25006 layer_factory.hpp:77] Creating layer drop6
I0409 21:17:29.460994 25006 net.cpp:84] Creating Layer drop6
I0409 21:17:29.460999 25006 net.cpp:406] drop6 <- fc6
I0409 21:17:29.461006 25006 net.cpp:367] drop6 -> fc6 (in-place)
I0409 21:17:29.461033 25006 net.cpp:122] Setting up drop6
I0409 21:17:29.461041 25006 net.cpp:129] Top shape: 128 4096 (524288)
I0409 21:17:29.461045 25006 net.cpp:137] Memory required for data: 1058060288
I0409 21:17:29.461050 25006 layer_factory.hpp:77] Creating layer fc8
I0409 21:17:29.461057 25006 net.cpp:84] Creating Layer fc8
I0409 21:17:29.461061 25006 net.cpp:406] fc8 <- fc6
I0409 21:17:29.461067 25006 net.cpp:380] fc8 -> fc8
I0409 21:17:29.468709 25006 net.cpp:122] Setting up fc8
I0409 21:17:29.468721 25006 net.cpp:129] Top shape: 128 196 (25088)
I0409 21:17:29.468725 25006 net.cpp:137] Memory required for data: 1058160640
I0409 21:17:29.468734 25006 layer_factory.hpp:77] Creating layer loss
I0409 21:17:29.468740 25006 net.cpp:84] Creating Layer loss
I0409 21:17:29.468745 25006 net.cpp:406] loss <- fc8
I0409 21:17:29.468751 25006 net.cpp:406] loss <- label
I0409 21:17:29.468758 25006 net.cpp:380] loss -> loss
I0409 21:17:29.468767 25006 layer_factory.hpp:77] Creating layer loss
I0409 21:17:29.469372 25006 net.cpp:122] Setting up loss
I0409 21:17:29.469380 25006 net.cpp:129] Top shape: (1)
I0409 21:17:29.469384 25006 net.cpp:132] with loss weight 1
I0409 21:17:29.469403 25006 net.cpp:137] Memory required for data: 1058160644
I0409 21:17:29.469408 25006 net.cpp:198] loss needs backward computation.
I0409 21:17:29.469415 25006 net.cpp:198] fc8 needs backward computation.
I0409 21:17:29.469419 25006 net.cpp:198] drop6 needs backward computation.
I0409 21:17:29.469424 25006 net.cpp:198] relu6 needs backward computation.
I0409 21:17:29.469429 25006 net.cpp:198] fc6 needs backward computation.
I0409 21:17:29.469432 25006 net.cpp:198] pool5 needs backward computation.
I0409 21:17:29.469436 25006 net.cpp:198] relu5 needs backward computation.
I0409 21:17:29.469460 25006 net.cpp:198] conv5 needs backward computation.
I0409 21:17:29.469463 25006 net.cpp:198] relu4 needs backward computation.
I0409 21:17:29.469467 25006 net.cpp:198] conv4 needs backward computation.
I0409 21:17:29.469472 25006 net.cpp:198] relu3 needs backward computation.
I0409 21:17:29.469475 25006 net.cpp:198] conv3 needs backward computation.
I0409 21:17:29.469480 25006 net.cpp:198] pool2 needs backward computation.
I0409 21:17:29.469485 25006 net.cpp:198] norm2 needs backward computation.
I0409 21:17:29.469488 25006 net.cpp:198] relu2 needs backward computation.
I0409 21:17:29.469492 25006 net.cpp:198] conv2 needs backward computation.
I0409 21:17:29.469496 25006 net.cpp:198] pool1 needs backward computation.
I0409 21:17:29.469501 25006 net.cpp:198] norm1 needs backward computation.
I0409 21:17:29.469506 25006 net.cpp:198] relu1 needs backward computation.
I0409 21:17:29.469509 25006 net.cpp:198] conv1 needs backward computation.
I0409 21:17:29.469513 25006 net.cpp:200] train-data does not need backward computation.
I0409 21:17:29.469517 25006 net.cpp:242] This network produces output loss
I0409 21:17:29.469530 25006 net.cpp:255] Network initialization done.
I0409 21:17:29.470003 25006 solver.cpp:172] Creating test net (#0) specified by net file: train_val.prototxt
I0409 21:17:29.470031 25006 net.cpp:294] The NetState phase (1) differed from the phase (0) specified by a rule in layer train-data
I0409 21:17:29.470160 25006 net.cpp:51] Initializing net from parameters:
state {
phase: TEST
}
layer {
name: "val-data"
type: "Data"
top: "data"
top: "label"
include {
phase: TEST
}
transform_param {
crop_size: 227
mean_file: "/mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210407-214532-d396/mean.binaryproto"
}
data_param {
source: "/mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210407-214532-d396/val_db"
batch_size: 32
backend: LMDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 96
kernel_size: 11
stride: 4
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "norm1"
type: "LRN"
bottom: "conv1"
top: "norm1"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "norm1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 2
kernel_size: 5
group: 2
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0.1
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "norm2"
type: "LRN"
bottom: "conv2"
top: "norm2"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "norm2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "conv4"
type: "Convolution"
bottom: "conv3"
top: "conv4"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
group: 2
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0.1
}
}
}
layer {
name: "relu4"
type: "ReLU"
bottom: "conv4"
top: "conv4"
}
layer {
name: "conv5"
type: "Convolution"
bottom: "conv4"
top: "conv5"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
group: 2
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0.1
}
}
}
layer {
name: "relu5"
type: "ReLU"
bottom: "conv5"
top: "conv5"
}
layer {
name: "pool5"
type: "Pooling"
bottom: "conv5"
top: "pool5"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "fc6"
type: "InnerProduct"
bottom: "pool5"
top: "fc6"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 4096
weight_filler {
type: "gaussian"
std: 0.005
}
bias_filler {
type: "constant"
value: 0.1
}
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6"
top: "fc6"
}
layer {
name: "drop6"
type: "Dropout"
bottom: "fc6"
top: "fc6"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc8"
type: "InnerProduct"
bottom: "fc6"
top: "fc8"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 196
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "fc8"
bottom: "label"
top: "accuracy"
include {
phase: TEST
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "fc8"
bottom: "label"
top: "loss"
}
I0409 21:17:29.470247 25006 layer_factory.hpp:77] Creating layer val-data
I0409 21:17:29.471750 25006 db_lmdb.cpp:35] Opened lmdb /mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210407-214532-d396/val_db
I0409 21:17:29.471953 25006 net.cpp:84] Creating Layer val-data
I0409 21:17:29.471963 25006 net.cpp:380] val-data -> data
I0409 21:17:29.471972 25006 net.cpp:380] val-data -> label
I0409 21:17:29.471980 25006 data_transformer.cpp:25] Loading mean file from: /mnt/bigdisk/DIGITS-MAN-2/digits/jobs/20210407-214532-d396/mean.binaryproto
I0409 21:17:29.475826 25006 data_layer.cpp:45] output data size: 32,3,227,227
I0409 21:17:29.507874 25006 net.cpp:122] Setting up val-data
I0409 21:17:29.507897 25006 net.cpp:129] Top shape: 32 3 227 227 (4946784)
I0409 21:17:29.507902 25006 net.cpp:129] Top shape: 32 (32)
I0409 21:17:29.507906 25006 net.cpp:137] Memory required for data: 19787264
I0409 21:17:29.507913 25006 layer_factory.hpp:77] Creating layer label_val-data_1_split
I0409 21:17:29.507926 25006 net.cpp:84] Creating Layer label_val-data_1_split
I0409 21:17:29.507932 25006 net.cpp:406] label_val-data_1_split <- label
I0409 21:17:29.507939 25006 net.cpp:380] label_val-data_1_split -> label_val-data_1_split_0
I0409 21:17:29.507949 25006 net.cpp:380] label_val-data_1_split -> label_val-data_1_split_1
I0409 21:17:29.508000 25006 net.cpp:122] Setting up label_val-data_1_split
I0409 21:17:29.508006 25006 net.cpp:129] Top shape: 32 (32)
I0409 21:17:29.508010 25006 net.cpp:129] Top shape: 32 (32)
I0409 21:17:29.508014 25006 net.cpp:137] Memory required for data: 19787520
I0409 21:17:29.508018 25006 layer_factory.hpp:77] Creating layer conv1
I0409 21:17:29.508029 25006 net.cpp:84] Creating Layer conv1
I0409 21:17:29.508034 25006 net.cpp:406] conv1 <- data
I0409 21:17:29.508040 25006 net.cpp:380] conv1 -> conv1
I0409 21:17:29.510104 25006 net.cpp:122] Setting up conv1
I0409 21:17:29.510116 25006 net.cpp:129] Top shape: 32 96 55 55 (9292800)
I0409 21:17:29.510121 25006 net.cpp:137] Memory required for data: 56958720
I0409 21:17:29.510133 25006 layer_factory.hpp:77] Creating layer relu1
I0409 21:17:29.510139 25006 net.cpp:84] Creating Layer relu1
I0409 21:17:29.510164 25006 net.cpp:406] relu1 <- conv1
I0409 21:17:29.510169 25006 net.cpp:367] relu1 -> conv1 (in-place)
I0409 21:17:29.510610 25006 net.cpp:122] Setting up relu1
I0409 21:17:29.510620 25006 net.cpp:129] Top shape: 32 96 55 55 (9292800)
I0409 21:17:29.510624 25006 net.cpp:137] Memory required for data: 94129920
I0409 21:17:29.510628 25006 layer_factory.hpp:77] Creating layer norm1
I0409 21:17:29.510637 25006 net.cpp:84] Creating Layer norm1
I0409 21:17:29.510643 25006 net.cpp:406] norm1 <- conv1
I0409 21:17:29.510648 25006 net.cpp:380] norm1 -> norm1
I0409 21:17:29.510948 25006 net.cpp:122] Setting up norm1
I0409 21:17:29.510957 25006 net.cpp:129] Top shape: 32 96 55 55 (9292800)
I0409 21:17:29.510962 25006 net.cpp:137] Memory required for data: 131301120
I0409 21:17:29.510967 25006 layer_factory.hpp:77] Creating layer pool1
I0409 21:17:29.510973 25006 net.cpp:84] Creating Layer pool1
I0409 21:17:29.510978 25006 net.cpp:406] pool1 <- norm1
I0409 21:17:29.510983 25006 net.cpp:380] pool1 -> pool1
I0409 21:17:29.511010 25006 net.cpp:122] Setting up pool1
I0409 21:17:29.511016 25006 net.cpp:129] Top shape: 32 96 27 27 (2239488)
I0409 21:17:29.511020 25006 net.cpp:137] Memory required for data: 140259072
I0409 21:17:29.511024 25006 layer_factory.hpp:77] Creating layer conv2
I0409 21:17:29.511032 25006 net.cpp:84] Creating Layer conv2
I0409 21:17:29.511037 25006 net.cpp:406] conv2 <- pool1
I0409 21:17:29.511042 25006 net.cpp:380] conv2 -> conv2
I0409 21:17:29.519682 25006 net.cpp:122] Setting up conv2
I0409 21:17:29.519701 25006 net.cpp:129] Top shape: 32 256 27 27 (5971968)
I0409 21:17:29.519704 25006 net.cpp:137] Memory required for data: 164146944
I0409 21:17:29.519716 25006 layer_factory.hpp:77] Creating layer relu2
I0409 21:17:29.519724 25006 net.cpp:84] Creating Layer relu2
I0409 21:17:29.519729 25006 net.cpp:406] relu2 <- conv2
I0409 21:17:29.519739 25006 net.cpp:367] relu2 -> conv2 (in-place)
I0409 21:17:29.520244 25006 net.cpp:122] Setting up relu2
I0409 21:17:29.520256 25006 net.cpp:129] Top shape: 32 256 27 27 (5971968)
I0409 21:17:29.520259 25006 net.cpp:137] Memory required for data: 188034816
I0409 21:17:29.520264 25006 layer_factory.hpp:77] Creating layer norm2
I0409 21:17:29.520274 25006 net.cpp:84] Creating Layer norm2
I0409 21:17:29.520279 25006 net.cpp:406] norm2 <- conv2
I0409 21:17:29.520285 25006 net.cpp:380] norm2 -> norm2
I0409 21:17:29.520788 25006 net.cpp:122] Setting up norm2
I0409 21:17:29.520798 25006 net.cpp:129] Top shape: 32 256 27 27 (5971968)
I0409 21:17:29.520803 25006 net.cpp:137] Memory required for data: 211922688
I0409 21:17:29.520808 25006 layer_factory.hpp:77] Creating layer pool2
I0409 21:17:29.520817 25006 net.cpp:84] Creating Layer pool2
I0409 21:17:29.520821 25006 net.cpp:406] pool2 <- norm2
I0409 21:17:29.520826 25006 net.cpp:380] pool2 -> pool2
I0409 21:17:29.520859 25006 net.cpp:122] Setting up pool2
I0409 21:17:29.520865 25006 net.cpp:129] Top shape: 32 256 13 13 (1384448)
I0409 21:17:29.520869 25006 net.cpp:137] Memory required for data: 217460480
I0409 21:17:29.520874 25006 layer_factory.hpp:77] Creating layer conv3
I0409 21:17:29.520885 25006 net.cpp:84] Creating Layer conv3
I0409 21:17:29.520889 25006 net.cpp:406] conv3 <- pool2
I0409 21:17:29.520895 25006 net.cpp:380] conv3 -> conv3
I0409 21:17:29.533210 25006 net.cpp:122] Setting up conv3
I0409 21:17:29.533227 25006 net.cpp:129] Top shape: 32 384 13 13 (2076672)
I0409 21:17:29.533231 25006 net.cpp:137] Memory required for data: 225767168
I0409 21:17:29.533243 25006 layer_factory.hpp:77] Creating layer relu3
I0409 21:17:29.533252 25006 net.cpp:84] Creating Layer relu3
I0409 21:17:29.533257 25006 net.cpp:406] relu3 <- conv3
I0409 21:17:29.533265 25006 net.cpp:367] relu3 -> conv3 (in-place)
I0409 21:17:29.534776 25006 net.cpp:122] Setting up relu3
I0409 21:17:29.534790 25006 net.cpp:129] Top shape: 32 384 13 13 (2076672)
I0409 21:17:29.534793 25006 net.cpp:137] Memory required for data: 234073856
I0409 21:17:29.534798 25006 layer_factory.hpp:77] Creating layer conv4
I0409 21:17:29.534827 25006 net.cpp:84] Creating Layer conv4
I0409 21:17:29.534832 25006 net.cpp:406] conv4 <- conv3
I0409 21:17:29.534839 25006 net.cpp:380] conv4 -> conv4
I0409 21:17:29.551090 25006 net.cpp:122] Setting up conv4
I0409 21:17:29.551107 25006 net.cpp:129] Top shape: 32 384 13 13 (2076672)
I0409 21:17:29.551111 25006 net.cpp:137] Memory required for data: 242380544
I0409 21:17:29.551120 25006 layer_factory.hpp:77] Creating layer relu4
I0409 21:17:29.551131 25006 net.cpp:84] Creating Layer relu4
I0409 21:17:29.551134 25006 net.cpp:406] relu4 <- conv4
I0409 21:17:29.551141 25006 net.cpp:367] relu4 -> conv4 (in-place)
I0409 21:17:29.551626 25006 net.cpp:122] Setting up relu4
I0409 21:17:29.551637 25006 net.cpp:129] Top shape: 32 384 13 13 (2076672)
I0409 21:17:29.551641 25006 net.cpp:137] Memory required for data: 250687232
I0409 21:17:29.551645 25006 layer_factory.hpp:77] Creating layer conv5
I0409 21:17:29.551657 25006 net.cpp:84] Creating Layer conv5
I0409 21:17:29.551661 25006 net.cpp:406] conv5 <- conv4
I0409 21:17:29.551668 25006 net.cpp:380] conv5 -> conv5
I0409 21:17:29.560024 25006 net.cpp:122] Setting up conv5
I0409 21:17:29.560041 25006 net.cpp:129] Top shape: 32 256 13 13 (1384448)
I0409 21:17:29.560046 25006 net.cpp:137] Memory required for data: 256225024
I0409 21:17:29.560060 25006 layer_factory.hpp:77] Creating layer relu5
I0409 21:17:29.560067 25006 net.cpp:84] Creating Layer relu5
I0409 21:17:29.560072 25006 net.cpp:406] relu5 <- conv5
I0409 21:17:29.560079 25006 net.cpp:367] relu5 -> conv5 (in-place)
I0409 21:17:29.560573 25006 net.cpp:122] Setting up relu5
I0409 21:17:29.560583 25006 net.cpp:129] Top shape: 32 256 13 13 (1384448)
I0409 21:17:29.560587 25006 net.cpp:137] Memory required for data: 261762816
I0409 21:17:29.560591 25006 layer_factory.hpp:77] Creating layer pool5
I0409 21:17:29.560602 25006 net.cpp:84] Creating Layer pool5
I0409 21:17:29.560607 25006 net.cpp:406] pool5 <- conv5
I0409 21:17:29.560613 25006 net.cpp:380] pool5 -> pool5
I0409 21:17:29.560654 25006 net.cpp:122] Setting up pool5
I0409 21:17:29.560660 25006 net.cpp:129] Top shape: 32 256 6 6 (294912)
I0409 21:17:29.560664 25006 net.cpp:137] Memory required for data: 262942464
I0409 21:17:29.560668 25006 layer_factory.hpp:77] Creating layer fc6
I0409 21:17:29.560679 25006 net.cpp:84] Creating Layer fc6
I0409 21:17:29.560683 25006 net.cpp:406] fc6 <- pool5
I0409 21:17:29.560688 25006 net.cpp:380] fc6 -> fc6
I0409 21:17:29.914144 25006 net.cpp:122] Setting up fc6
I0409 21:17:29.914165 25006 net.cpp:129] Top shape: 32 4096 (131072)
I0409 21:17:29.914170 25006 net.cpp:137] Memory required for data: 263466752
I0409 21:17:29.914180 25006 layer_factory.hpp:77] Creating layer relu6
I0409 21:17:29.914188 25006 net.cpp:84] Creating Layer relu6
I0409 21:17:29.914194 25006 net.cpp:406] relu6 <- fc6
I0409 21:17:29.914202 25006 net.cpp:367] relu6 -> fc6 (in-place)
I0409 21:17:29.914839 25006 net.cpp:122] Setting up relu6
I0409 21:17:29.914849 25006 net.cpp:129] Top shape: 32 4096 (131072)
I0409 21:17:29.914852 25006 net.cpp:137] Memory required for data: 263991040
I0409 21:17:29.914856 25006 layer_factory.hpp:77] Creating layer drop6
I0409 21:17:29.914863 25006 net.cpp:84] Creating Layer drop6
I0409 21:17:29.914868 25006 net.cpp:406] drop6 <- fc6
I0409 21:17:29.914873 25006 net.cpp:367] drop6 -> fc6 (in-place)
I0409 21:17:29.914898 25006 net.cpp:122] Setting up drop6
I0409 21:17:29.914903 25006 net.cpp:129] Top shape: 32 4096 (131072)
I0409 21:17:29.914907 25006 net.cpp:137] Memory required for data: 264515328
I0409 21:17:29.914911 25006 layer_factory.hpp:77] Creating layer fc8
I0409 21:17:29.914919 25006 net.cpp:84] Creating Layer fc8
I0409 21:17:29.914923 25006 net.cpp:406] fc8 <- fc6
I0409 21:17:29.914929 25006 net.cpp:380] fc8 -> fc8
I0409 21:17:29.922585 25006 net.cpp:122] Setting up fc8
I0409 21:17:29.922597 25006 net.cpp:129] Top shape: 32 196 (6272)
I0409 21:17:29.922602 25006 net.cpp:137] Memory required for data: 264540416
I0409 21:17:29.922610 25006 layer_factory.hpp:77] Creating layer fc8_fc8_0_split
I0409 21:17:29.922622 25006 net.cpp:84] Creating Layer fc8_fc8_0_split
I0409 21:17:29.922646 25006 net.cpp:406] fc8_fc8_0_split <- fc8
I0409 21:17:29.922653 25006 net.cpp:380] fc8_fc8_0_split -> fc8_fc8_0_split_0
I0409 21:17:29.922667 25006 net.cpp:380] fc8_fc8_0_split -> fc8_fc8_0_split_1
I0409 21:17:29.922701 25006 net.cpp:122] Setting up fc8_fc8_0_split
I0409 21:17:29.922708 25006 net.cpp:129] Top shape: 32 196 (6272)
I0409 21:17:29.922713 25006 net.cpp:129] Top shape: 32 196 (6272)
I0409 21:17:29.922716 25006 net.cpp:137] Memory required for data: 264590592
I0409 21:17:29.922721 25006 layer_factory.hpp:77] Creating layer accuracy
I0409 21:17:29.922729 25006 net.cpp:84] Creating Layer accuracy
I0409 21:17:29.922732 25006 net.cpp:406] accuracy <- fc8_fc8_0_split_0
I0409 21:17:29.922739 25006 net.cpp:406] accuracy <- label_val-data_1_split_0
I0409 21:17:29.922744 25006 net.cpp:380] accuracy -> accuracy
I0409 21:17:29.922753 25006 net.cpp:122] Setting up accuracy
I0409 21:17:29.922757 25006 net.cpp:129] Top shape: (1)
I0409 21:17:29.922760 25006 net.cpp:137] Memory required for data: 264590596
I0409 21:17:29.922765 25006 layer_factory.hpp:77] Creating layer loss
I0409 21:17:29.922770 25006 net.cpp:84] Creating Layer loss
I0409 21:17:29.922775 25006 net.cpp:406] loss <- fc8_fc8_0_split_1
I0409 21:17:29.922780 25006 net.cpp:406] loss <- label_val-data_1_split_1
I0409 21:17:29.922785 25006 net.cpp:380] loss -> loss
I0409 21:17:29.922791 25006 layer_factory.hpp:77] Creating layer loss
I0409 21:17:29.923693 25006 net.cpp:122] Setting up loss
I0409 21:17:29.923703 25006 net.cpp:129] Top shape: (1)
I0409 21:17:29.923707 25006 net.cpp:132] with loss weight 1
I0409 21:17:29.923719 25006 net.cpp:137] Memory required for data: 264590600
I0409 21:17:29.923723 25006 net.cpp:198] loss needs backward computation.
I0409 21:17:29.923729 25006 net.cpp:200] accuracy does not need backward computation.
I0409 21:17:29.923735 25006 net.cpp:198] fc8_fc8_0_split needs backward computation.
I0409 21:17:29.923739 25006 net.cpp:198] fc8 needs backward computation.
I0409 21:17:29.923743 25006 net.cpp:198] drop6 needs backward computation.
I0409 21:17:29.923748 25006 net.cpp:198] relu6 needs backward computation.
I0409 21:17:29.923750 25006 net.cpp:198] fc6 needs backward computation.
I0409 21:17:29.923754 25006 net.cpp:198] pool5 needs backward computation.
I0409 21:17:29.923758 25006 net.cpp:198] relu5 needs backward computation.
I0409 21:17:29.923763 25006 net.cpp:198] conv5 needs backward computation.
I0409 21:17:29.923766 25006 net.cpp:198] relu4 needs backward computation.
I0409 21:17:29.923770 25006 net.cpp:198] conv4 needs backward computation.
I0409 21:17:29.923774 25006 net.cpp:198] relu3 needs backward computation.
I0409 21:17:29.923779 25006 net.cpp:198] conv3 needs backward computation.
I0409 21:17:29.923782 25006 net.cpp:198] pool2 needs backward computation.
I0409 21:17:29.923786 25006 net.cpp:198] norm2 needs backward computation.
I0409 21:17:29.923790 25006 net.cpp:198] relu2 needs backward computation.
I0409 21:17:29.923794 25006 net.cpp:198] conv2 needs backward computation.
I0409 21:17:29.923797 25006 net.cpp:198] pool1 needs backward computation.
I0409 21:17:29.923801 25006 net.cpp:198] norm1 needs backward computation.
I0409 21:17:29.923805 25006 net.cpp:198] relu1 needs backward computation.
I0409 21:17:29.923808 25006 net.cpp:198] conv1 needs backward computation.
I0409 21:17:29.923812 25006 net.cpp:200] label_val-data_1_split does not need backward computation.
I0409 21:17:29.923817 25006 net.cpp:200] val-data does not need backward computation.
I0409 21:17:29.923821 25006 net.cpp:242] This network produces output accuracy
I0409 21:17:29.923825 25006 net.cpp:242] This network produces output loss
I0409 21:17:29.923841 25006 net.cpp:255] Network initialization done.
I0409 21:17:29.923903 25006 solver.cpp:56] Solver scaffolding done.
I0409 21:17:29.924280 25006 caffe.cpp:248] Starting Optimization
I0409 21:17:29.924289 25006 solver.cpp:272] Solving
I0409 21:17:29.924293 25006 solver.cpp:273] Learning Rate Policy: exp
I0409 21:17:29.930464 25006 solver.cpp:330] Iteration 0, Testing net (#0)
I0409 21:17:29.930490 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:17:29.997550 25006 blocking_queue.cpp:49] Waiting for data
I0409 21:17:34.237562 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:17:34.282348 25006 solver.cpp:397] Test net output #0: accuracy = 0.00551471
I0409 21:17:34.282400 25006 solver.cpp:397] Test net output #1: loss = 5.28591 (* 1 = 5.28591 loss)
I0409 21:17:34.377739 25006 solver.cpp:218] Iteration 0 (-1.637e-40 iter/s, 4.45322s/12 iters), loss = 5.3038
I0409 21:17:34.379252 25006 solver.cpp:237] Train net output #0: loss = 5.3038 (* 1 = 5.3038 loss)
I0409 21:17:34.379274 25006 sgd_solver.cpp:105] Iteration 0, lr = 0.01
I0409 21:17:38.371312 25006 solver.cpp:218] Iteration 12 (3.00611 iter/s, 3.99187s/12 iters), loss = 5.28731
I0409 21:17:38.371366 25006 solver.cpp:237] Train net output #0: loss = 5.28731 (* 1 = 5.28731 loss)
I0409 21:17:38.371378 25006 sgd_solver.cpp:105] Iteration 12, lr = 0.00997626
I0409 21:17:43.462726 25006 solver.cpp:218] Iteration 24 (2.35704 iter/s, 5.09114s/12 iters), loss = 5.29124
I0409 21:17:43.462769 25006 solver.cpp:237] Train net output #0: loss = 5.29124 (* 1 = 5.29124 loss)
I0409 21:17:43.462777 25006 sgd_solver.cpp:105] Iteration 24, lr = 0.00995257
I0409 21:17:48.424854 25006 solver.cpp:218] Iteration 36 (2.41845 iter/s, 4.96186s/12 iters), loss = 5.28815
I0409 21:17:48.424904 25006 solver.cpp:237] Train net output #0: loss = 5.28815 (* 1 = 5.28815 loss)
I0409 21:17:48.424914 25006 sgd_solver.cpp:105] Iteration 36, lr = 0.00992894
I0409 21:17:53.458158 25006 solver.cpp:218] Iteration 48 (2.38425 iter/s, 5.03302s/12 iters), loss = 5.30385
I0409 21:17:53.458215 25006 solver.cpp:237] Train net output #0: loss = 5.30385 (* 1 = 5.30385 loss)
I0409 21:17:53.458228 25006 sgd_solver.cpp:105] Iteration 48, lr = 0.00990537
I0409 21:17:58.474432 25006 solver.cpp:218] Iteration 60 (2.39234 iter/s, 5.016s/12 iters), loss = 5.28852
I0409 21:17:58.474584 25006 solver.cpp:237] Train net output #0: loss = 5.28852 (* 1 = 5.28852 loss)
I0409 21:17:58.474594 25006 sgd_solver.cpp:105] Iteration 60, lr = 0.00988185
I0409 21:18:03.497730 25006 solver.cpp:218] Iteration 72 (2.38905 iter/s, 5.02292s/12 iters), loss = 5.30241
I0409 21:18:03.497788 25006 solver.cpp:237] Train net output #0: loss = 5.30241 (* 1 = 5.30241 loss)
I0409 21:18:03.497800 25006 sgd_solver.cpp:105] Iteration 72, lr = 0.00985839
I0409 21:18:08.472985 25006 solver.cpp:218] Iteration 84 (2.41207 iter/s, 4.97498s/12 iters), loss = 5.2953
I0409 21:18:08.473026 25006 solver.cpp:237] Train net output #0: loss = 5.2953 (* 1 = 5.2953 loss)
I0409 21:18:08.473035 25006 sgd_solver.cpp:105] Iteration 84, lr = 0.00983498
I0409 21:18:13.493826 25006 solver.cpp:218] Iteration 96 (2.39016 iter/s, 5.02058s/12 iters), loss = 5.30609
I0409 21:18:13.493871 25006 solver.cpp:237] Train net output #0: loss = 5.30609 (* 1 = 5.30609 loss)
I0409 21:18:13.493882 25006 sgd_solver.cpp:105] Iteration 96, lr = 0.00981163
I0409 21:18:15.223186 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:18:15.530791 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_102.caffemodel
I0409 21:18:20.382115 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_102.solverstate
I0409 21:18:23.715229 25006 solver.cpp:330] Iteration 102, Testing net (#0)
I0409 21:18:23.715257 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:18:28.141080 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:18:28.217748 25006 solver.cpp:397] Test net output #0: accuracy = 0.00551471
I0409 21:18:28.217798 25006 solver.cpp:397] Test net output #1: loss = 5.28701 (* 1 = 5.28701 loss)
I0409 21:18:30.090631 25006 solver.cpp:218] Iteration 108 (0.723063 iter/s, 16.5961s/12 iters), loss = 5.30611
I0409 21:18:30.090734 25006 solver.cpp:237] Train net output #0: loss = 5.30611 (* 1 = 5.30611 loss)
I0409 21:18:30.090744 25006 sgd_solver.cpp:105] Iteration 108, lr = 0.00978834
I0409 21:18:35.071504 25006 solver.cpp:218] Iteration 120 (2.40937 iter/s, 4.98055s/12 iters), loss = 5.28359
I0409 21:18:35.071544 25006 solver.cpp:237] Train net output #0: loss = 5.28359 (* 1 = 5.28359 loss)
I0409 21:18:35.071552 25006 sgd_solver.cpp:105] Iteration 120, lr = 0.0097651
I0409 21:18:40.070344 25006 solver.cpp:218] Iteration 132 (2.40068 iter/s, 4.99858s/12 iters), loss = 5.22434
I0409 21:18:40.070389 25006 solver.cpp:237] Train net output #0: loss = 5.22434 (* 1 = 5.22434 loss)
I0409 21:18:40.070399 25006 sgd_solver.cpp:105] Iteration 132, lr = 0.00974192
I0409 21:18:45.114707 25006 solver.cpp:218] Iteration 144 (2.37902 iter/s, 5.04409s/12 iters), loss = 5.29219
I0409 21:18:45.114755 25006 solver.cpp:237] Train net output #0: loss = 5.29219 (* 1 = 5.29219 loss)
I0409 21:18:45.114765 25006 sgd_solver.cpp:105] Iteration 144, lr = 0.00971879
I0409 21:18:50.134524 25006 solver.cpp:218] Iteration 156 (2.39066 iter/s, 5.01954s/12 iters), loss = 5.2347
I0409 21:18:50.134582 25006 solver.cpp:237] Train net output #0: loss = 5.2347 (* 1 = 5.2347 loss)
I0409 21:18:50.134593 25006 sgd_solver.cpp:105] Iteration 156, lr = 0.00969571
I0409 21:18:55.153033 25006 solver.cpp:218] Iteration 168 (2.39128 iter/s, 5.01823s/12 iters), loss = 5.1669
I0409 21:18:55.153088 25006 solver.cpp:237] Train net output #0: loss = 5.1669 (* 1 = 5.1669 loss)
I0409 21:18:55.153101 25006 sgd_solver.cpp:105] Iteration 168, lr = 0.00967269
I0409 21:19:00.195706 25006 solver.cpp:218] Iteration 180 (2.37982 iter/s, 5.04239s/12 iters), loss = 5.16586
I0409 21:19:00.195804 25006 solver.cpp:237] Train net output #0: loss = 5.16586 (* 1 = 5.16586 loss)
I0409 21:19:00.195819 25006 sgd_solver.cpp:105] Iteration 180, lr = 0.00964973
I0409 21:19:05.193747 25006 solver.cpp:218] Iteration 192 (2.40109 iter/s, 4.99773s/12 iters), loss = 5.25947
I0409 21:19:05.193799 25006 solver.cpp:237] Train net output #0: loss = 5.25947 (* 1 = 5.25947 loss)
I0409 21:19:05.193811 25006 sgd_solver.cpp:105] Iteration 192, lr = 0.00962682
I0409 21:19:09.068967 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:19:09.741791 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_204.caffemodel
I0409 21:19:15.739841 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_204.solverstate
I0409 21:19:24.487247 25006 solver.cpp:330] Iteration 204, Testing net (#0)
I0409 21:19:24.487275 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:19:28.906582 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:19:29.029314 25006 solver.cpp:397] Test net output #0: accuracy = 0.00919118
I0409 21:19:29.029363 25006 solver.cpp:397] Test net output #1: loss = 5.19273 (* 1 = 5.19273 loss)
I0409 21:19:29.118275 25006 solver.cpp:218] Iteration 204 (0.501599 iter/s, 23.9235s/12 iters), loss = 5.10781
I0409 21:19:29.118324 25006 solver.cpp:237] Train net output #0: loss = 5.10781 (* 1 = 5.10781 loss)
I0409 21:19:29.118335 25006 sgd_solver.cpp:105] Iteration 204, lr = 0.00960396
I0409 21:19:33.838584 25006 solver.cpp:218] Iteration 216 (2.54235 iter/s, 4.72005s/12 iters), loss = 5.16499
I0409 21:19:33.838665 25006 solver.cpp:237] Train net output #0: loss = 5.16499 (* 1 = 5.16499 loss)
I0409 21:19:33.838677 25006 sgd_solver.cpp:105] Iteration 216, lr = 0.00958116
I0409 21:19:38.812316 25006 solver.cpp:218] Iteration 228 (2.41282 iter/s, 4.97344s/12 iters), loss = 5.18474
I0409 21:19:38.812355 25006 solver.cpp:237] Train net output #0: loss = 5.18474 (* 1 = 5.18474 loss)
I0409 21:19:38.812363 25006 sgd_solver.cpp:105] Iteration 228, lr = 0.00955841
I0409 21:19:43.920428 25006 solver.cpp:218] Iteration 240 (2.34933 iter/s, 5.10784s/12 iters), loss = 5.1667
I0409 21:19:43.920482 25006 solver.cpp:237] Train net output #0: loss = 5.1667 (* 1 = 5.1667 loss)
I0409 21:19:43.920493 25006 sgd_solver.cpp:105] Iteration 240, lr = 0.00953572
I0409 21:19:48.952095 25006 solver.cpp:218] Iteration 252 (2.38503 iter/s, 5.03139s/12 iters), loss = 5.1129
I0409 21:19:48.952136 25006 solver.cpp:237] Train net output #0: loss = 5.1129 (* 1 = 5.1129 loss)
I0409 21:19:48.952148 25006 sgd_solver.cpp:105] Iteration 252, lr = 0.00951308
I0409 21:19:54.020730 25006 solver.cpp:218] Iteration 264 (2.36763 iter/s, 5.06837s/12 iters), loss = 5.14949
I0409 21:19:54.020781 25006 solver.cpp:237] Train net output #0: loss = 5.14949 (* 1 = 5.14949 loss)
I0409 21:19:54.020792 25006 sgd_solver.cpp:105] Iteration 264, lr = 0.00949049
I0409 21:19:58.984257 25006 solver.cpp:218] Iteration 276 (2.41777 iter/s, 4.96326s/12 iters), loss = 5.13445
I0409 21:19:58.984305 25006 solver.cpp:237] Train net output #0: loss = 5.13445 (* 1 = 5.13445 loss)
I0409 21:19:58.984314 25006 sgd_solver.cpp:105] Iteration 276, lr = 0.00946796
I0409 21:20:04.000931 25006 solver.cpp:218] Iteration 288 (2.39215 iter/s, 5.0164s/12 iters), loss = 4.98099
I0409 21:20:04.001055 25006 solver.cpp:237] Train net output #0: loss = 4.98099 (* 1 = 4.98099 loss)
I0409 21:20:04.001065 25006 sgd_solver.cpp:105] Iteration 288, lr = 0.00944548
I0409 21:20:09.186936 25006 solver.cpp:218] Iteration 300 (2.31408 iter/s, 5.18566s/12 iters), loss = 5.17011
I0409 21:20:09.186985 25006 solver.cpp:237] Train net output #0: loss = 5.17011 (* 1 = 5.17011 loss)
I0409 21:20:09.186996 25006 sgd_solver.cpp:105] Iteration 300, lr = 0.00942305
I0409 21:20:10.181282 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:20:11.231920 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_306.caffemodel
I0409 21:20:15.009464 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_306.solverstate
I0409 21:20:17.611407 25006 solver.cpp:330] Iteration 306, Testing net (#0)
I0409 21:20:17.611439 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:20:21.920807 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:20:22.078517 25006 solver.cpp:397] Test net output #0: accuracy = 0.0134804
I0409 21:20:22.078563 25006 solver.cpp:397] Test net output #1: loss = 5.11373 (* 1 = 5.11373 loss)
I0409 21:20:24.073999 25006 solver.cpp:218] Iteration 312 (0.806107 iter/s, 14.8864s/12 iters), loss = 5.02511
I0409 21:20:24.074060 25006 solver.cpp:237] Train net output #0: loss = 5.02511 (* 1 = 5.02511 loss)
I0409 21:20:24.074079 25006 sgd_solver.cpp:105] Iteration 312, lr = 0.00940068
I0409 21:20:29.207756 25006 solver.cpp:218] Iteration 324 (2.3376 iter/s, 5.13347s/12 iters), loss = 5.0856
I0409 21:20:29.207803 25006 solver.cpp:237] Train net output #0: loss = 5.0856 (* 1 = 5.0856 loss)
I0409 21:20:29.207810 25006 sgd_solver.cpp:105] Iteration 324, lr = 0.00937836
I0409 21:20:34.194939 25006 solver.cpp:218] Iteration 336 (2.4063 iter/s, 4.98691s/12 iters), loss = 5.0651
I0409 21:20:34.195005 25006 solver.cpp:237] Train net output #0: loss = 5.0651 (* 1 = 5.0651 loss)
I0409 21:20:34.195016 25006 sgd_solver.cpp:105] Iteration 336, lr = 0.0093561
I0409 21:20:39.184195 25006 solver.cpp:218] Iteration 348 (2.4053 iter/s, 4.98898s/12 iters), loss = 5.05028
I0409 21:20:39.184239 25006 solver.cpp:237] Train net output #0: loss = 5.05028 (* 1 = 5.05028 loss)
I0409 21:20:39.184249 25006 sgd_solver.cpp:105] Iteration 348, lr = 0.00933388
I0409 21:20:44.154775 25006 solver.cpp:218] Iteration 360 (2.41432 iter/s, 4.97034s/12 iters), loss = 5.06451
I0409 21:20:44.154829 25006 solver.cpp:237] Train net output #0: loss = 5.06451 (* 1 = 5.06451 loss)
I0409 21:20:44.154841 25006 sgd_solver.cpp:105] Iteration 360, lr = 0.00931172
I0409 21:20:49.112210 25006 solver.cpp:218] Iteration 372 (2.42072 iter/s, 4.9572s/12 iters), loss = 5.11131
I0409 21:20:49.112249 25006 solver.cpp:237] Train net output #0: loss = 5.11131 (* 1 = 5.11131 loss)
I0409 21:20:49.112257 25006 sgd_solver.cpp:105] Iteration 372, lr = 0.00928961
I0409 21:20:54.107661 25006 solver.cpp:218] Iteration 384 (2.4023 iter/s, 4.99522s/12 iters), loss = 5.09796
I0409 21:20:54.107702 25006 solver.cpp:237] Train net output #0: loss = 5.09796 (* 1 = 5.09796 loss)
I0409 21:20:54.107710 25006 sgd_solver.cpp:105] Iteration 384, lr = 0.00926756
I0409 21:20:59.100016 25006 solver.cpp:218] Iteration 396 (2.40379 iter/s, 4.99212s/12 iters), loss = 5.01324
I0409 21:20:59.100059 25006 solver.cpp:237] Train net output #0: loss = 5.01324 (* 1 = 5.01324 loss)
I0409 21:20:59.100067 25006 sgd_solver.cpp:105] Iteration 396, lr = 0.00924556
I0409 21:21:02.231523 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:21:03.636430 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_408.caffemodel
I0409 21:21:07.470283 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_408.solverstate
I0409 21:21:10.665769 25006 solver.cpp:330] Iteration 408, Testing net (#0)
I0409 21:21:10.665797 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:21:14.863863 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:21:15.072158 25006 solver.cpp:397] Test net output #0: accuracy = 0.0196078
I0409 21:21:15.072204 25006 solver.cpp:397] Test net output #1: loss = 5.07848 (* 1 = 5.07848 loss)
I0409 21:21:15.161053 25006 solver.cpp:218] Iteration 408 (0.747179 iter/s, 16.0604s/12 iters), loss = 5.10097
I0409 21:21:15.161098 25006 solver.cpp:237] Train net output #0: loss = 5.10097 (* 1 = 5.10097 loss)
I0409 21:21:15.161109 25006 sgd_solver.cpp:105] Iteration 408, lr = 0.00922361
I0409 21:21:19.578132 25006 solver.cpp:218] Iteration 420 (2.71686 iter/s, 4.41686s/12 iters), loss = 5.14602
I0409 21:21:19.578184 25006 solver.cpp:237] Train net output #0: loss = 5.14602 (* 1 = 5.14602 loss)
I0409 21:21:19.578197 25006 sgd_solver.cpp:105] Iteration 420, lr = 0.00920171
I0409 21:21:24.684644 25006 solver.cpp:218] Iteration 432 (2.35006 iter/s, 5.10626s/12 iters), loss = 5.0782
I0409 21:21:24.684693 25006 solver.cpp:237] Train net output #0: loss = 5.0782 (* 1 = 5.0782 loss)
I0409 21:21:24.684705 25006 sgd_solver.cpp:105] Iteration 432, lr = 0.00917986
I0409 21:21:29.654585 25006 solver.cpp:218] Iteration 444 (2.41463 iter/s, 4.9697s/12 iters), loss = 4.97767
I0409 21:21:29.654639 25006 solver.cpp:237] Train net output #0: loss = 4.97767 (* 1 = 4.97767 loss)
I0409 21:21:29.654650 25006 sgd_solver.cpp:105] Iteration 444, lr = 0.00915807
I0409 21:21:34.658872 25006 solver.cpp:218] Iteration 456 (2.39807 iter/s, 5.00403s/12 iters), loss = 4.98293
I0409 21:21:34.658927 25006 solver.cpp:237] Train net output #0: loss = 4.98293 (* 1 = 4.98293 loss)
I0409 21:21:34.658938 25006 sgd_solver.cpp:105] Iteration 456, lr = 0.00913632
I0409 21:21:39.627049 25006 solver.cpp:218] Iteration 468 (2.41549 iter/s, 4.96793s/12 iters), loss = 5.0248
I0409 21:21:39.627141 25006 solver.cpp:237] Train net output #0: loss = 5.0248 (* 1 = 5.0248 loss)
I0409 21:21:39.627151 25006 sgd_solver.cpp:105] Iteration 468, lr = 0.00911463
I0409 21:21:44.580940 25006 solver.cpp:218] Iteration 480 (2.42248 iter/s, 4.9536s/12 iters), loss = 5.03027
I0409 21:21:44.580992 25006 solver.cpp:237] Train net output #0: loss = 5.03027 (* 1 = 5.03027 loss)
I0409 21:21:44.581004 25006 sgd_solver.cpp:105] Iteration 480, lr = 0.00909299
I0409 21:21:49.635665 25006 solver.cpp:218] Iteration 492 (2.37414 iter/s, 5.05447s/12 iters), loss = 5.02364
I0409 21:21:49.635717 25006 solver.cpp:237] Train net output #0: loss = 5.02364 (* 1 = 5.02364 loss)
I0409 21:21:49.635727 25006 sgd_solver.cpp:105] Iteration 492, lr = 0.0090714
I0409 21:21:54.648432 25006 solver.cpp:218] Iteration 504 (2.39401 iter/s, 5.01252s/12 iters), loss = 5.06106
I0409 21:21:54.648479 25006 solver.cpp:237] Train net output #0: loss = 5.06106 (* 1 = 5.06106 loss)
I0409 21:21:54.648492 25006 sgd_solver.cpp:105] Iteration 504, lr = 0.00904986
I0409 21:21:54.895253 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:21:56.677695 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_510.caffemodel
I0409 21:22:07.885254 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_510.solverstate
I0409 21:22:12.253885 25006 solver.cpp:330] Iteration 510, Testing net (#0)
I0409 21:22:12.254002 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:22:16.481945 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:22:16.719792 25006 solver.cpp:397] Test net output #0: accuracy = 0.0196078
I0409 21:22:16.719841 25006 solver.cpp:397] Test net output #1: loss = 5.03301 (* 1 = 5.03301 loss)
I0409 21:22:18.569911 25006 solver.cpp:218] Iteration 516 (0.501661 iter/s, 23.9205s/12 iters), loss = 4.89244
I0409 21:22:18.569977 25006 solver.cpp:237] Train net output #0: loss = 4.89244 (* 1 = 4.89244 loss)
I0409 21:22:18.569989 25006 sgd_solver.cpp:105] Iteration 516, lr = 0.00902838
I0409 21:22:23.862229 25006 solver.cpp:218] Iteration 528 (2.26755 iter/s, 5.29207s/12 iters), loss = 5.01498
I0409 21:22:23.862268 25006 solver.cpp:237] Train net output #0: loss = 5.01498 (* 1 = 5.01498 loss)
I0409 21:22:23.862278 25006 sgd_solver.cpp:105] Iteration 528, lr = 0.00900694
I0409 21:22:28.854858 25006 solver.cpp:218] Iteration 540 (2.40366 iter/s, 4.99239s/12 iters), loss = 4.90202
I0409 21:22:28.854908 25006 solver.cpp:237] Train net output #0: loss = 4.90202 (* 1 = 4.90202 loss)
I0409 21:22:28.854920 25006 sgd_solver.cpp:105] Iteration 540, lr = 0.00898556
I0409 21:22:33.862499 25006 solver.cpp:218] Iteration 552 (2.39646 iter/s, 5.00739s/12 iters), loss = 5.05056
I0409 21:22:33.862552 25006 solver.cpp:237] Train net output #0: loss = 5.05056 (* 1 = 5.05056 loss)
I0409 21:22:33.862563 25006 sgd_solver.cpp:105] Iteration 552, lr = 0.00896423
I0409 21:22:38.867776 25006 solver.cpp:218] Iteration 564 (2.39759 iter/s, 5.00503s/12 iters), loss = 4.88719
I0409 21:22:38.867820 25006 solver.cpp:237] Train net output #0: loss = 4.88719 (* 1 = 4.88719 loss)
I0409 21:22:38.867830 25006 sgd_solver.cpp:105] Iteration 564, lr = 0.00894294
I0409 21:22:43.844957 25006 solver.cpp:218] Iteration 576 (2.41112 iter/s, 4.97693s/12 iters), loss = 4.99071
I0409 21:22:43.854032 25006 solver.cpp:237] Train net output #0: loss = 4.99071 (* 1 = 4.99071 loss)
I0409 21:22:43.854043 25006 sgd_solver.cpp:105] Iteration 576, lr = 0.00892171
I0409 21:22:48.830994 25006 solver.cpp:218] Iteration 588 (2.41121 iter/s, 4.97676s/12 iters), loss = 4.81656
I0409 21:22:48.831046 25006 solver.cpp:237] Train net output #0: loss = 4.81656 (* 1 = 4.81656 loss)
I0409 21:22:48.831058 25006 sgd_solver.cpp:105] Iteration 588, lr = 0.00890053
I0409 21:22:53.806129 25006 solver.cpp:218] Iteration 600 (2.41212 iter/s, 4.97489s/12 iters), loss = 4.79233
I0409 21:22:53.806174 25006 solver.cpp:237] Train net output #0: loss = 4.79233 (* 1 = 4.79233 loss)
I0409 21:22:53.806182 25006 sgd_solver.cpp:105] Iteration 600, lr = 0.0088794
I0409 21:22:56.214568 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:22:58.504938 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_612.caffemodel
I0409 21:23:05.161427 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_612.solverstate
I0409 21:23:11.507010 25006 solver.cpp:330] Iteration 612, Testing net (#0)
I0409 21:23:11.507047 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:23:15.660861 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:23:15.945657 25006 solver.cpp:397] Test net output #0: accuracy = 0.0269608
I0409 21:23:15.945690 25006 solver.cpp:397] Test net output #1: loss = 4.94624 (* 1 = 4.94624 loss)
I0409 21:23:16.034648 25006 solver.cpp:218] Iteration 612 (0.539868 iter/s, 22.2276s/12 iters), loss = 4.77508
I0409 21:23:16.034690 25006 solver.cpp:237] Train net output #0: loss = 4.77508 (* 1 = 4.77508 loss)
I0409 21:23:16.034701 25006 sgd_solver.cpp:105] Iteration 612, lr = 0.00885831
I0409 21:23:20.572325 25006 solver.cpp:218] Iteration 624 (2.64466 iter/s, 4.53744s/12 iters), loss = 4.88702
I0409 21:23:20.572377 25006 solver.cpp:237] Train net output #0: loss = 4.88702 (* 1 = 4.88702 loss)
I0409 21:23:20.572387 25006 sgd_solver.cpp:105] Iteration 624, lr = 0.00883728
I0409 21:23:25.640328 25006 solver.cpp:218] Iteration 636 (2.36792 iter/s, 5.06775s/12 iters), loss = 4.75024
I0409 21:23:25.640379 25006 solver.cpp:237] Train net output #0: loss = 4.75024 (* 1 = 4.75024 loss)
I0409 21:23:25.640391 25006 sgd_solver.cpp:105] Iteration 636, lr = 0.0088163
I0409 21:23:30.626956 25006 solver.cpp:218] Iteration 648 (2.40656 iter/s, 4.98638s/12 iters), loss = 4.94059
I0409 21:23:30.626997 25006 solver.cpp:237] Train net output #0: loss = 4.94059 (* 1 = 4.94059 loss)
I0409 21:23:30.627007 25006 sgd_solver.cpp:105] Iteration 648, lr = 0.00879537
I0409 21:23:35.865610 25006 solver.cpp:218] Iteration 660 (2.29078 iter/s, 5.2384s/12 iters), loss = 4.85685
I0409 21:23:35.865664 25006 solver.cpp:237] Train net output #0: loss = 4.85685 (* 1 = 4.85685 loss)
I0409 21:23:35.865676 25006 sgd_solver.cpp:105] Iteration 660, lr = 0.00877449
I0409 21:23:40.747090 25006 solver.cpp:218] Iteration 672 (2.4584 iter/s, 4.88123s/12 iters), loss = 4.73154
I0409 21:23:40.747134 25006 solver.cpp:237] Train net output #0: loss = 4.73154 (* 1 = 4.73154 loss)
I0409 21:23:40.747144 25006 sgd_solver.cpp:105] Iteration 672, lr = 0.00875366
I0409 21:23:45.632825 25006 solver.cpp:218] Iteration 684 (2.45625 iter/s, 4.88549s/12 iters), loss = 4.79228
I0409 21:23:45.632869 25006 solver.cpp:237] Train net output #0: loss = 4.79228 (* 1 = 4.79228 loss)
I0409 21:23:45.632877 25006 sgd_solver.cpp:105] Iteration 684, lr = 0.00873287
I0409 21:23:46.403911 25006 blocking_queue.cpp:49] Waiting for data
I0409 21:23:50.549552 25006 solver.cpp:218] Iteration 696 (2.44077 iter/s, 4.91648s/12 iters), loss = 4.68116
I0409 21:23:50.549597 25006 solver.cpp:237] Train net output #0: loss = 4.68116 (* 1 = 4.68116 loss)
I0409 21:23:50.549605 25006 sgd_solver.cpp:105] Iteration 696, lr = 0.00871214
I0409 21:23:55.280043 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:23:55.676234 25006 solver.cpp:218] Iteration 708 (2.34081 iter/s, 5.12642s/12 iters), loss = 4.95
I0409 21:23:55.676297 25006 solver.cpp:237] Train net output #0: loss = 4.95 (* 1 = 4.95 loss)
I0409 21:23:55.676312 25006 sgd_solver.cpp:105] Iteration 708, lr = 0.00869145
I0409 21:23:57.673856 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_714.caffemodel
I0409 21:24:01.157616 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_714.solverstate
I0409 21:24:04.717136 25006 solver.cpp:330] Iteration 714, Testing net (#0)
I0409 21:24:04.717170 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:24:08.762495 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:24:09.082912 25006 solver.cpp:397] Test net output #0: accuracy = 0.03125
I0409 21:24:09.082965 25006 solver.cpp:397] Test net output #1: loss = 4.94555 (* 1 = 4.94555 loss)
I0409 21:24:10.846974 25006 solver.cpp:218] Iteration 720 (0.79103 iter/s, 15.1701s/12 iters), loss = 4.83311
I0409 21:24:10.847043 25006 solver.cpp:237] Train net output #0: loss = 4.83311 (* 1 = 4.83311 loss)
I0409 21:24:10.847059 25006 sgd_solver.cpp:105] Iteration 720, lr = 0.00867082
I0409 21:24:15.877148 25006 solver.cpp:218] Iteration 732 (2.38573 iter/s, 5.0299s/12 iters), loss = 4.58125
I0409 21:24:15.877199 25006 solver.cpp:237] Train net output #0: loss = 4.58125 (* 1 = 4.58125 loss)
I0409 21:24:15.877210 25006 sgd_solver.cpp:105] Iteration 732, lr = 0.00865023
I0409 21:24:20.950593 25006 solver.cpp:218] Iteration 744 (2.36538 iter/s, 5.07319s/12 iters), loss = 4.73605
I0409 21:24:20.950718 25006 solver.cpp:237] Train net output #0: loss = 4.73605 (* 1 = 4.73605 loss)
I0409 21:24:20.950736 25006 sgd_solver.cpp:105] Iteration 744, lr = 0.0086297
I0409 21:24:26.297324 25006 solver.cpp:218] Iteration 756 (2.2445 iter/s, 5.3464s/12 iters), loss = 4.778
I0409 21:24:26.297374 25006 solver.cpp:237] Train net output #0: loss = 4.778 (* 1 = 4.778 loss)
I0409 21:24:26.297384 25006 sgd_solver.cpp:105] Iteration 756, lr = 0.00860921
I0409 21:24:31.326455 25006 solver.cpp:218] Iteration 768 (2.38623 iter/s, 5.02886s/12 iters), loss = 4.76997
I0409 21:24:31.326575 25006 solver.cpp:237] Train net output #0: loss = 4.76997 (* 1 = 4.76997 loss)
I0409 21:24:31.326588 25006 sgd_solver.cpp:105] Iteration 768, lr = 0.00858877
I0409 21:24:36.386443 25006 solver.cpp:218] Iteration 780 (2.37169 iter/s, 5.05969s/12 iters), loss = 4.77363
I0409 21:24:36.386502 25006 solver.cpp:237] Train net output #0: loss = 4.77363 (* 1 = 4.77363 loss)
I0409 21:24:36.386514 25006 sgd_solver.cpp:105] Iteration 780, lr = 0.00856838
I0409 21:24:41.627835 25006 solver.cpp:218] Iteration 792 (2.28959 iter/s, 5.24112s/12 iters), loss = 4.69495
I0409 21:24:41.627883 25006 solver.cpp:237] Train net output #0: loss = 4.69495 (* 1 = 4.69495 loss)
I0409 21:24:41.627892 25006 sgd_solver.cpp:105] Iteration 792, lr = 0.00854803
I0409 21:24:46.707801 25006 solver.cpp:218] Iteration 804 (2.36234 iter/s, 5.07971s/12 iters), loss = 4.66756
I0409 21:24:46.707849 25006 solver.cpp:237] Train net output #0: loss = 4.66756 (* 1 = 4.66756 loss)
I0409 21:24:46.707859 25006 sgd_solver.cpp:105] Iteration 804, lr = 0.00852774
I0409 21:24:48.478039 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:24:51.286521 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_816.caffemodel
I0409 21:24:53.519134 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_816.solverstate
I0409 21:24:55.565881 25006 solver.cpp:330] Iteration 816, Testing net (#0)
I0409 21:24:55.565917 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:24:59.800426 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:25:00.164482 25006 solver.cpp:397] Test net output #0: accuracy = 0.0471814
I0409 21:25:00.164515 25006 solver.cpp:397] Test net output #1: loss = 4.8725 (* 1 = 4.8725 loss)
I0409 21:25:00.250975 25006 solver.cpp:218] Iteration 816 (0.886093 iter/s, 13.5426s/12 iters), loss = 4.68849
I0409 21:25:00.251022 25006 solver.cpp:237] Train net output #0: loss = 4.68849 (* 1 = 4.68849 loss)
I0409 21:25:00.251031 25006 sgd_solver.cpp:105] Iteration 816, lr = 0.00850749
I0409 21:25:04.500370 25006 solver.cpp:218] Iteration 828 (2.82408 iter/s, 4.24917s/12 iters), loss = 4.78497
I0409 21:25:04.500432 25006 solver.cpp:237] Train net output #0: loss = 4.78497 (* 1 = 4.78497 loss)
I0409 21:25:04.500447 25006 sgd_solver.cpp:105] Iteration 828, lr = 0.00848729
I0409 21:25:09.601084 25006 solver.cpp:218] Iteration 840 (2.35273 iter/s, 5.10045s/12 iters), loss = 4.5612
I0409 21:25:09.601125 25006 solver.cpp:237] Train net output #0: loss = 4.5612 (* 1 = 4.5612 loss)
I0409 21:25:09.601135 25006 sgd_solver.cpp:105] Iteration 840, lr = 0.00846714
I0409 21:25:14.597492 25006 solver.cpp:218] Iteration 852 (2.40185 iter/s, 4.99616s/12 iters), loss = 4.73394
I0409 21:25:14.597537 25006 solver.cpp:237] Train net output #0: loss = 4.73394 (* 1 = 4.73394 loss)
I0409 21:25:14.597548 25006 sgd_solver.cpp:105] Iteration 852, lr = 0.00844704
I0409 21:25:19.608204 25006 solver.cpp:218] Iteration 864 (2.39499 iter/s, 5.01046s/12 iters), loss = 4.52087
I0409 21:25:19.608258 25006 solver.cpp:237] Train net output #0: loss = 4.52087 (* 1 = 4.52087 loss)
I0409 21:25:19.608268 25006 sgd_solver.cpp:105] Iteration 864, lr = 0.00842698
I0409 21:25:24.583804 25006 solver.cpp:218] Iteration 876 (2.41189 iter/s, 4.97535s/12 iters), loss = 4.51339
I0409 21:25:24.583889 25006 solver.cpp:237] Train net output #0: loss = 4.51339 (* 1 = 4.51339 loss)
I0409 21:25:24.583899 25006 sgd_solver.cpp:105] Iteration 876, lr = 0.00840698
I0409 21:25:29.701161 25006 solver.cpp:218] Iteration 888 (2.3451 iter/s, 5.11706s/12 iters), loss = 4.57289
I0409 21:25:29.701215 25006 solver.cpp:237] Train net output #0: loss = 4.57289 (* 1 = 4.57289 loss)
I0409 21:25:29.701227 25006 sgd_solver.cpp:105] Iteration 888, lr = 0.00838702
I0409 21:25:35.154742 25006 solver.cpp:218] Iteration 900 (2.2005 iter/s, 5.4533s/12 iters), loss = 4.65776
I0409 21:25:35.154796 25006 solver.cpp:237] Train net output #0: loss = 4.65776 (* 1 = 4.65776 loss)
I0409 21:25:35.154806 25006 sgd_solver.cpp:105] Iteration 900, lr = 0.0083671
I0409 21:25:39.086783 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:25:40.186697 25006 solver.cpp:218] Iteration 912 (2.38488 iter/s, 5.0317s/12 iters), loss = 4.35051
I0409 21:25:40.186741 25006 solver.cpp:237] Train net output #0: loss = 4.35051 (* 1 = 4.35051 loss)
I0409 21:25:40.186753 25006 sgd_solver.cpp:105] Iteration 912, lr = 0.00834724
I0409 21:25:42.269886 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_918.caffemodel
I0409 21:25:46.154075 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_918.solverstate
I0409 21:25:49.362663 25006 solver.cpp:330] Iteration 918, Testing net (#0)
I0409 21:25:49.362691 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:25:53.552366 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:25:53.954138 25006 solver.cpp:397] Test net output #0: accuracy = 0.0459559
I0409 21:25:53.954178 25006 solver.cpp:397] Test net output #1: loss = 4.74693 (* 1 = 4.74693 loss)
I0409 21:25:55.798359 25006 solver.cpp:218] Iteration 924 (0.768689 iter/s, 15.611s/12 iters), loss = 4.40773
I0409 21:25:55.798506 25006 solver.cpp:237] Train net output #0: loss = 4.40773 (* 1 = 4.40773 loss)
I0409 21:25:55.798519 25006 sgd_solver.cpp:105] Iteration 924, lr = 0.00832742
I0409 21:26:00.852022 25006 solver.cpp:218] Iteration 936 (2.37468 iter/s, 5.05331s/12 iters), loss = 4.55299
I0409 21:26:00.852066 25006 solver.cpp:237] Train net output #0: loss = 4.55299 (* 1 = 4.55299 loss)
I0409 21:26:00.852075 25006 sgd_solver.cpp:105] Iteration 936, lr = 0.00830765
I0409 21:26:05.929105 25006 solver.cpp:218] Iteration 948 (2.36368 iter/s, 5.07683s/12 iters), loss = 4.50283
I0409 21:26:05.929157 25006 solver.cpp:237] Train net output #0: loss = 4.50283 (* 1 = 4.50283 loss)
I0409 21:26:05.929167 25006 sgd_solver.cpp:105] Iteration 948, lr = 0.00828793
I0409 21:26:10.948823 25006 solver.cpp:218] Iteration 960 (2.39069 iter/s, 5.01946s/12 iters), loss = 4.27794
I0409 21:26:10.948873 25006 solver.cpp:237] Train net output #0: loss = 4.27794 (* 1 = 4.27794 loss)
I0409 21:26:10.948882 25006 sgd_solver.cpp:105] Iteration 960, lr = 0.00826825
I0409 21:26:15.966064 25006 solver.cpp:218] Iteration 972 (2.39187 iter/s, 5.01699s/12 iters), loss = 4.28839
I0409 21:26:15.966101 25006 solver.cpp:237] Train net output #0: loss = 4.28839 (* 1 = 4.28839 loss)
I0409 21:26:15.966110 25006 sgd_solver.cpp:105] Iteration 972, lr = 0.00824862
I0409 21:26:21.012773 25006 solver.cpp:218] Iteration 984 (2.37791 iter/s, 5.04646s/12 iters), loss = 4.25459
I0409 21:26:21.012826 25006 solver.cpp:237] Train net output #0: loss = 4.25459 (* 1 = 4.25459 loss)
I0409 21:26:21.012840 25006 sgd_solver.cpp:105] Iteration 984, lr = 0.00822903
I0409 21:26:26.031873 25006 solver.cpp:218] Iteration 996 (2.39099 iter/s, 5.01884s/12 iters), loss = 4.27398
I0409 21:26:26.031987 25006 solver.cpp:237] Train net output #0: loss = 4.27398 (* 1 = 4.27398 loss)
I0409 21:26:26.032001 25006 sgd_solver.cpp:105] Iteration 996, lr = 0.0082095
I0409 21:26:31.133199 25006 solver.cpp:218] Iteration 1008 (2.35248 iter/s, 5.101s/12 iters), loss = 4.4137
I0409 21:26:31.133246 25006 solver.cpp:237] Train net output #0: loss = 4.4137 (* 1 = 4.4137 loss)
I0409 21:26:31.133255 25006 sgd_solver.cpp:105] Iteration 1008, lr = 0.00819001
I0409 21:26:32.169040 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:26:35.654772 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1020.caffemodel
I0409 21:26:37.848278 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1020.solverstate
I0409 21:26:39.943382 25006 solver.cpp:330] Iteration 1020, Testing net (#0)
I0409 21:26:39.943406 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:26:44.017765 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:26:44.488155 25006 solver.cpp:397] Test net output #0: accuracy = 0.060049
I0409 21:26:44.488205 25006 solver.cpp:397] Test net output #1: loss = 4.6743 (* 1 = 4.6743 loss)
I0409 21:26:44.577378 25006 solver.cpp:218] Iteration 1020 (0.892618 iter/s, 13.4436s/12 iters), loss = 4.16501
I0409 21:26:44.577426 25006 solver.cpp:237] Train net output #0: loss = 4.16501 (* 1 = 4.16501 loss)
I0409 21:26:44.577437 25006 sgd_solver.cpp:105] Iteration 1020, lr = 0.00817056
I0409 21:26:48.961488 25006 solver.cpp:218] Iteration 1032 (2.73731 iter/s, 4.38387s/12 iters), loss = 4.34199
I0409 21:26:48.961539 25006 solver.cpp:237] Train net output #0: loss = 4.34199 (* 1 = 4.34199 loss)
I0409 21:26:48.961550 25006 sgd_solver.cpp:105] Iteration 1032, lr = 0.00815116
I0409 21:26:54.147981 25006 solver.cpp:218] Iteration 1044 (2.31382 iter/s, 5.18623s/12 iters), loss = 4.32965
I0409 21:26:54.148020 25006 solver.cpp:237] Train net output #0: loss = 4.32965 (* 1 = 4.32965 loss)
I0409 21:26:54.148030 25006 sgd_solver.cpp:105] Iteration 1044, lr = 0.00813181
I0409 21:26:59.353721 25006 solver.cpp:218] Iteration 1056 (2.30527 iter/s, 5.20547s/12 iters), loss = 4.43256
I0409 21:26:59.353859 25006 solver.cpp:237] Train net output #0: loss = 4.43256 (* 1 = 4.43256 loss)
I0409 21:26:59.353871 25006 sgd_solver.cpp:105] Iteration 1056, lr = 0.0081125
I0409 21:27:04.704378 25006 solver.cpp:218] Iteration 1068 (2.24286 iter/s, 5.3503s/12 iters), loss = 4.36487
I0409 21:27:04.704421 25006 solver.cpp:237] Train net output #0: loss = 4.36487 (* 1 = 4.36487 loss)
I0409 21:27:04.704428 25006 sgd_solver.cpp:105] Iteration 1068, lr = 0.00809324
I0409 21:27:09.935338 25006 solver.cpp:218] Iteration 1080 (2.29414 iter/s, 5.23071s/12 iters), loss = 4.19463
I0409 21:27:09.935372 25006 solver.cpp:237] Train net output #0: loss = 4.19463 (* 1 = 4.19463 loss)
I0409 21:27:09.935380 25006 sgd_solver.cpp:105] Iteration 1080, lr = 0.00807403
I0409 21:27:15.104935 25006 solver.cpp:218] Iteration 1092 (2.32138 iter/s, 5.16934s/12 iters), loss = 4.32168
I0409 21:27:15.104986 25006 solver.cpp:237] Train net output #0: loss = 4.32168 (* 1 = 4.32168 loss)
I0409 21:27:15.104997 25006 sgd_solver.cpp:105] Iteration 1092, lr = 0.00805486
I0409 21:27:20.091859 25006 solver.cpp:218] Iteration 1104 (2.40642 iter/s, 4.98667s/12 iters), loss = 4.33616
I0409 21:27:20.091903 25006 solver.cpp:237] Train net output #0: loss = 4.33616 (* 1 = 4.33616 loss)
I0409 21:27:20.091914 25006 sgd_solver.cpp:105] Iteration 1104, lr = 0.00803573
I0409 21:27:23.190696 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:27:25.016727 25006 solver.cpp:218] Iteration 1116 (2.43674 iter/s, 4.92462s/12 iters), loss = 4.53764
I0409 21:27:25.016779 25006 solver.cpp:237] Train net output #0: loss = 4.53764 (* 1 = 4.53764 loss)
I0409 21:27:25.016790 25006 sgd_solver.cpp:105] Iteration 1116, lr = 0.00801666
I0409 21:27:27.060977 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1122.caffemodel
I0409 21:27:29.410876 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1122.solverstate
I0409 21:27:31.068636 25006 solver.cpp:330] Iteration 1122, Testing net (#0)
I0409 21:27:31.068666 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:27:35.165748 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:27:35.651257 25006 solver.cpp:397] Test net output #0: accuracy = 0.0729167
I0409 21:27:35.651293 25006 solver.cpp:397] Test net output #1: loss = 4.52998 (* 1 = 4.52998 loss)
I0409 21:27:37.563272 25006 solver.cpp:218] Iteration 1128 (0.956481 iter/s, 12.546s/12 iters), loss = 4.27021
I0409 21:27:37.563330 25006 solver.cpp:237] Train net output #0: loss = 4.27021 (* 1 = 4.27021 loss)
I0409 21:27:37.563342 25006 sgd_solver.cpp:105] Iteration 1128, lr = 0.00799762
I0409 21:27:42.694082 25006 solver.cpp:218] Iteration 1140 (2.33893 iter/s, 5.13054s/12 iters), loss = 4.36463
I0409 21:27:42.694128 25006 solver.cpp:237] Train net output #0: loss = 4.36463 (* 1 = 4.36463 loss)
I0409 21:27:42.694136 25006 sgd_solver.cpp:105] Iteration 1140, lr = 0.00797863
I0409 21:27:47.673067 25006 solver.cpp:218] Iteration 1152 (2.41025 iter/s, 4.97873s/12 iters), loss = 4.1321
I0409 21:27:47.673110 25006 solver.cpp:237] Train net output #0: loss = 4.1321 (* 1 = 4.1321 loss)
I0409 21:27:47.673118 25006 sgd_solver.cpp:105] Iteration 1152, lr = 0.00795969
I0409 21:27:52.686916 25006 solver.cpp:218] Iteration 1164 (2.39349 iter/s, 5.01359s/12 iters), loss = 4.05049
I0409 21:27:52.686964 25006 solver.cpp:237] Train net output #0: loss = 4.05049 (* 1 = 4.05049 loss)
I0409 21:27:52.686973 25006 sgd_solver.cpp:105] Iteration 1164, lr = 0.00794079
I0409 21:27:57.714699 25006 solver.cpp:218] Iteration 1176 (2.38686 iter/s, 5.02753s/12 iters), loss = 4.05463
I0409 21:27:57.714736 25006 solver.cpp:237] Train net output #0: loss = 4.05463 (* 1 = 4.05463 loss)
I0409 21:27:57.714746 25006 sgd_solver.cpp:105] Iteration 1176, lr = 0.00792194
I0409 21:28:02.747599 25006 solver.cpp:218] Iteration 1188 (2.38443 iter/s, 5.03265s/12 iters), loss = 4.08867
I0409 21:28:02.747745 25006 solver.cpp:237] Train net output #0: loss = 4.08867 (* 1 = 4.08867 loss)
I0409 21:28:02.747758 25006 sgd_solver.cpp:105] Iteration 1188, lr = 0.00790313
I0409 21:28:07.745945 25006 solver.cpp:218] Iteration 1200 (2.40096 iter/s, 4.998s/12 iters), loss = 4.2992
I0409 21:28:07.745997 25006 solver.cpp:237] Train net output #0: loss = 4.2992 (* 1 = 4.2992 loss)
I0409 21:28:07.746006 25006 sgd_solver.cpp:105] Iteration 1200, lr = 0.00788437
I0409 21:28:12.655800 25006 solver.cpp:218] Iteration 1212 (2.44419 iter/s, 4.9096s/12 iters), loss = 4.27938
I0409 21:28:12.655848 25006 solver.cpp:237] Train net output #0: loss = 4.27938 (* 1 = 4.27938 loss)
I0409 21:28:12.655859 25006 sgd_solver.cpp:105] Iteration 1212, lr = 0.00786565
I0409 21:28:12.933436 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:28:17.094396 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1224.caffemodel
I0409 21:28:20.601311 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1224.solverstate
I0409 21:28:23.621901 25006 solver.cpp:330] Iteration 1224, Testing net (#0)
I0409 21:28:23.621937 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:28:27.945014 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:28:28.688989 25006 solver.cpp:397] Test net output #0: accuracy = 0.0845588
I0409 21:28:28.689028 25006 solver.cpp:397] Test net output #1: loss = 4.47033 (* 1 = 4.47033 loss)
I0409 21:28:28.777987 25006 solver.cpp:218] Iteration 1224 (0.74435 iter/s, 16.1215s/12 iters), loss = 4.13212
I0409 21:28:28.778048 25006 solver.cpp:237] Train net output #0: loss = 4.13212 (* 1 = 4.13212 loss)
I0409 21:28:28.778057 25006 sgd_solver.cpp:105] Iteration 1224, lr = 0.00784697
I0409 21:28:33.790025 25006 solver.cpp:218] Iteration 1236 (2.39437 iter/s, 5.01176s/12 iters), loss = 4.18828
I0409 21:28:33.797467 25006 solver.cpp:237] Train net output #0: loss = 4.18828 (* 1 = 4.18828 loss)
I0409 21:28:33.797484 25006 sgd_solver.cpp:105] Iteration 1236, lr = 0.00782834
I0409 21:28:38.843673 25006 solver.cpp:218] Iteration 1248 (2.37812 iter/s, 5.04601s/12 iters), loss = 3.97864
I0409 21:28:38.843720 25006 solver.cpp:237] Train net output #0: loss = 3.97864 (* 1 = 3.97864 loss)
I0409 21:28:38.843729 25006 sgd_solver.cpp:105] Iteration 1248, lr = 0.00780976
I0409 21:28:44.528458 25006 solver.cpp:218] Iteration 1260 (2.11101 iter/s, 5.68449s/12 iters), loss = 4.24158
I0409 21:28:44.528532 25006 solver.cpp:237] Train net output #0: loss = 4.24158 (* 1 = 4.24158 loss)
I0409 21:28:44.528544 25006 sgd_solver.cpp:105] Iteration 1260, lr = 0.00779122
I0409 21:28:49.565876 25006 solver.cpp:218] Iteration 1272 (2.38231 iter/s, 5.03713s/12 iters), loss = 4.05181
I0409 21:28:49.565938 25006 solver.cpp:237] Train net output #0: loss = 4.05181 (* 1 = 4.05181 loss)
I0409 21:28:49.565951 25006 sgd_solver.cpp:105] Iteration 1272, lr = 0.00777272
I0409 21:28:54.656388 25006 solver.cpp:218] Iteration 1284 (2.35745 iter/s, 5.09024s/12 iters), loss = 4.02075
I0409 21:28:54.656435 25006 solver.cpp:237] Train net output #0: loss = 4.02075 (* 1 = 4.02075 loss)
I0409 21:28:54.656443 25006 sgd_solver.cpp:105] Iteration 1284, lr = 0.00775426
I0409 21:29:00.031342 25006 solver.cpp:218] Iteration 1296 (2.23269 iter/s, 5.37467s/12 iters), loss = 3.66074
I0409 21:29:00.031402 25006 solver.cpp:237] Train net output #0: loss = 3.66074 (* 1 = 3.66074 loss)
I0409 21:29:00.031412 25006 sgd_solver.cpp:105] Iteration 1296, lr = 0.00773585
I0409 21:29:05.802429 25006 solver.cpp:218] Iteration 1308 (2.07944 iter/s, 5.77079s/12 iters), loss = 4.20688
I0409 21:29:05.802556 25006 solver.cpp:237] Train net output #0: loss = 4.20688 (* 1 = 4.20688 loss)
I0409 21:29:05.802565 25006 sgd_solver.cpp:105] Iteration 1308, lr = 0.00771749
I0409 21:29:09.092674 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:29:11.566179 25006 solver.cpp:218] Iteration 1320 (2.08211 iter/s, 5.76338s/12 iters), loss = 3.77385
I0409 21:29:11.566233 25006 solver.cpp:237] Train net output #0: loss = 3.77385 (* 1 = 3.77385 loss)
I0409 21:29:11.566243 25006 sgd_solver.cpp:105] Iteration 1320, lr = 0.00769916
I0409 21:29:13.595446 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1326.caffemodel
I0409 21:29:19.955922 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1326.solverstate
I0409 21:29:26.343186 25006 solver.cpp:330] Iteration 1326, Testing net (#0)
I0409 21:29:26.343207 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:29:31.835232 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:29:32.422439 25006 solver.cpp:397] Test net output #0: accuracy = 0.101103
I0409 21:29:32.422475 25006 solver.cpp:397] Test net output #1: loss = 4.30909 (* 1 = 4.30909 loss)
I0409 21:29:34.284646 25006 solver.cpp:218] Iteration 1332 (0.528227 iter/s, 22.7175s/12 iters), loss = 3.87092
I0409 21:29:34.284700 25006 solver.cpp:237] Train net output #0: loss = 3.87092 (* 1 = 3.87092 loss)
I0409 21:29:34.284710 25006 sgd_solver.cpp:105] Iteration 1332, lr = 0.00768088
I0409 21:29:40.590667 25006 solver.cpp:218] Iteration 1344 (1.90304 iter/s, 6.3057s/12 iters), loss = 3.93979
I0409 21:29:40.590797 25006 solver.cpp:237] Train net output #0: loss = 3.93979 (* 1 = 3.93979 loss)
I0409 21:29:40.590811 25006 sgd_solver.cpp:105] Iteration 1344, lr = 0.00766265
I0409 21:29:46.599822 25006 solver.cpp:218] Iteration 1356 (1.99708 iter/s, 6.00877s/12 iters), loss = 3.71933
I0409 21:29:46.599867 25006 solver.cpp:237] Train net output #0: loss = 3.71933 (* 1 = 3.71933 loss)
I0409 21:29:46.599875 25006 sgd_solver.cpp:105] Iteration 1356, lr = 0.00764446
I0409 21:29:51.791576 25006 solver.cpp:218] Iteration 1368 (2.31148 iter/s, 5.19149s/12 iters), loss = 3.99624
I0409 21:29:51.797927 25006 solver.cpp:237] Train net output #0: loss = 3.99624 (* 1 = 3.99624 loss)
I0409 21:29:51.797984 25006 sgd_solver.cpp:105] Iteration 1368, lr = 0.00762631
I0409 21:29:53.093566 25006 blocking_queue.cpp:49] Waiting for data
I0409 21:29:56.878633 25006 solver.cpp:218] Iteration 1380 (2.36197 iter/s, 5.08051s/12 iters), loss = 3.58786
I0409 21:29:56.878696 25006 solver.cpp:237] Train net output #0: loss = 3.58786 (* 1 = 3.58786 loss)
I0409 21:29:56.878708 25006 sgd_solver.cpp:105] Iteration 1380, lr = 0.0076082
I0409 21:30:01.838579 25006 solver.cpp:218] Iteration 1392 (2.41952 iter/s, 4.95967s/12 iters), loss = 4.06363
I0409 21:30:01.838641 25006 solver.cpp:237] Train net output #0: loss = 4.06363 (* 1 = 4.06363 loss)
I0409 21:30:01.838654 25006 sgd_solver.cpp:105] Iteration 1392, lr = 0.00759014
I0409 21:30:06.911239 25006 solver.cpp:218] Iteration 1404 (2.36575 iter/s, 5.07239s/12 iters), loss = 4.02848
I0409 21:30:06.911280 25006 solver.cpp:237] Train net output #0: loss = 4.02848 (* 1 = 4.02848 loss)
I0409 21:30:06.911288 25006 sgd_solver.cpp:105] Iteration 1404, lr = 0.00757212
I0409 21:30:12.384800 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:30:12.841610 25006 solver.cpp:218] Iteration 1416 (2.02358 iter/s, 5.93008s/12 iters), loss = 3.99209
I0409 21:30:12.841655 25006 solver.cpp:237] Train net output #0: loss = 3.99209 (* 1 = 3.99209 loss)
I0409 21:30:12.841665 25006 sgd_solver.cpp:105] Iteration 1416, lr = 0.00755414
I0409 21:30:18.596410 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1428.caffemodel
I0409 21:30:22.636262 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1428.solverstate
I0409 21:30:27.248139 25006 solver.cpp:330] Iteration 1428, Testing net (#0)
I0409 21:30:27.248167 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:30:31.255426 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:30:31.863409 25006 solver.cpp:397] Test net output #0: accuracy = 0.0882353
I0409 21:30:31.863445 25006 solver.cpp:397] Test net output #1: loss = 4.35143 (* 1 = 4.35143 loss)
I0409 21:30:31.952636 25006 solver.cpp:218] Iteration 1428 (0.627937 iter/s, 19.1102s/12 iters), loss = 3.93497
I0409 21:30:31.952689 25006 solver.cpp:237] Train net output #0: loss = 3.93497 (* 1 = 3.93497 loss)
I0409 21:30:31.952700 25006 sgd_solver.cpp:105] Iteration 1428, lr = 0.0075362
I0409 21:30:36.468180 25006 solver.cpp:218] Iteration 1440 (2.65763 iter/s, 4.5153s/12 iters), loss = 3.68452
I0409 21:30:36.468225 25006 solver.cpp:237] Train net output #0: loss = 3.68452 (* 1 = 3.68452 loss)
I0409 21:30:36.468233 25006 sgd_solver.cpp:105] Iteration 1440, lr = 0.00751831
I0409 21:30:42.046445 25006 solver.cpp:218] Iteration 1452 (2.15132 iter/s, 5.57798s/12 iters), loss = 3.90676
I0409 21:30:42.046504 25006 solver.cpp:237] Train net output #0: loss = 3.90676 (* 1 = 3.90676 loss)
I0409 21:30:42.046515 25006 sgd_solver.cpp:105] Iteration 1452, lr = 0.00750046
I0409 21:30:48.266369 25006 solver.cpp:218] Iteration 1464 (1.92938 iter/s, 6.2196s/12 iters), loss = 3.82121
I0409 21:30:48.266455 25006 solver.cpp:237] Train net output #0: loss = 3.82121 (* 1 = 3.82121 loss)
I0409 21:30:48.266464 25006 sgd_solver.cpp:105] Iteration 1464, lr = 0.00748265
I0409 21:30:54.120478 25006 solver.cpp:218] Iteration 1476 (2.04996 iter/s, 5.85377s/12 iters), loss = 3.58564
I0409 21:30:54.120533 25006 solver.cpp:237] Train net output #0: loss = 3.58564 (* 1 = 3.58564 loss)
I0409 21:30:54.120544 25006 sgd_solver.cpp:105] Iteration 1476, lr = 0.00746489
I0409 21:30:59.307680 25006 solver.cpp:218] Iteration 1488 (2.31351 iter/s, 5.18692s/12 iters), loss = 3.50932
I0409 21:30:59.307744 25006 solver.cpp:237] Train net output #0: loss = 3.50932 (* 1 = 3.50932 loss)
I0409 21:30:59.307756 25006 sgd_solver.cpp:105] Iteration 1488, lr = 0.00744716
I0409 21:31:04.483104 25006 solver.cpp:218] Iteration 1500 (2.31878 iter/s, 5.17514s/12 iters), loss = 3.43114
I0409 21:31:04.483150 25006 solver.cpp:237] Train net output #0: loss = 3.43114 (* 1 = 3.43114 loss)
I0409 21:31:04.483158 25006 sgd_solver.cpp:105] Iteration 1500, lr = 0.00742948
I0409 21:31:10.803268 25006 solver.cpp:218] Iteration 1512 (1.89878 iter/s, 6.31985s/12 iters), loss = 3.71788
I0409 21:31:10.803324 25006 solver.cpp:237] Train net output #0: loss = 3.71788 (* 1 = 3.71788 loss)
I0409 21:31:10.803335 25006 sgd_solver.cpp:105] Iteration 1512, lr = 0.00741184
I0409 21:31:12.872323 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:31:16.666132 25006 solver.cpp:218] Iteration 1524 (2.04689 iter/s, 5.86256s/12 iters), loss = 3.87494
I0409 21:31:16.666186 25006 solver.cpp:237] Train net output #0: loss = 3.87494 (* 1 = 3.87494 loss)
I0409 21:31:16.666198 25006 sgd_solver.cpp:105] Iteration 1524, lr = 0.00739425
I0409 21:31:19.079679 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1530.caffemodel
I0409 21:31:21.371680 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1530.solverstate
I0409 21:31:23.010061 25006 solver.cpp:330] Iteration 1530, Testing net (#0)
I0409 21:31:23.010079 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:31:26.892192 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:31:27.760623 25006 solver.cpp:397] Test net output #0: accuracy = 0.10723
I0409 21:31:27.760661 25006 solver.cpp:397] Test net output #1: loss = 4.33156 (* 1 = 4.33156 loss)
I0409 21:31:30.124750 25006 solver.cpp:218] Iteration 1536 (0.891662 iter/s, 13.458s/12 iters), loss = 3.67054
I0409 21:31:30.124809 25006 solver.cpp:237] Train net output #0: loss = 3.67054 (* 1 = 3.67054 loss)
I0409 21:31:30.124819 25006 sgd_solver.cpp:105] Iteration 1536, lr = 0.00737669
I0409 21:31:35.331647 25006 solver.cpp:218] Iteration 1548 (2.30476 iter/s, 5.20662s/12 iters), loss = 3.2288
I0409 21:31:35.331701 25006 solver.cpp:237] Train net output #0: loss = 3.2288 (* 1 = 3.2288 loss)
I0409 21:31:35.331712 25006 sgd_solver.cpp:105] Iteration 1548, lr = 0.00735918
I0409 21:31:41.220052 25006 solver.cpp:218] Iteration 1560 (2.03801 iter/s, 5.8881s/12 iters), loss = 3.7594
I0409 21:31:41.220108 25006 solver.cpp:237] Train net output #0: loss = 3.7594 (* 1 = 3.7594 loss)
I0409 21:31:41.220120 25006 sgd_solver.cpp:105] Iteration 1560, lr = 0.00734171
I0409 21:31:46.823110 25006 solver.cpp:218] Iteration 1572 (2.1418 iter/s, 5.60276s/12 iters), loss = 3.64094
I0409 21:31:46.823184 25006 solver.cpp:237] Train net output #0: loss = 3.64094 (* 1 = 3.64094 loss)
I0409 21:31:46.823199 25006 sgd_solver.cpp:105] Iteration 1572, lr = 0.00732427
I0409 21:31:52.025689 25006 solver.cpp:218] Iteration 1584 (2.30668 iter/s, 5.20228s/12 iters), loss = 3.46392
I0409 21:31:52.025804 25006 solver.cpp:237] Train net output #0: loss = 3.46392 (* 1 = 3.46392 loss)
I0409 21:31:52.025815 25006 sgd_solver.cpp:105] Iteration 1584, lr = 0.00730688
I0409 21:31:57.511380 25006 solver.cpp:218] Iteration 1596 (2.18765 iter/s, 5.48535s/12 iters), loss = 3.64296
I0409 21:31:57.511432 25006 solver.cpp:237] Train net output #0: loss = 3.64296 (* 1 = 3.64296 loss)
I0409 21:31:57.511445 25006 sgd_solver.cpp:105] Iteration 1596, lr = 0.00728954
I0409 21:32:02.897712 25006 solver.cpp:218] Iteration 1608 (2.22798 iter/s, 5.38604s/12 iters), loss = 3.72975
I0409 21:32:02.897773 25006 solver.cpp:237] Train net output #0: loss = 3.72975 (* 1 = 3.72975 loss)
I0409 21:32:02.897783 25006 sgd_solver.cpp:105] Iteration 1608, lr = 0.00727223
I0409 21:32:07.755193 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:32:08.832834 25006 solver.cpp:218] Iteration 1620 (2.02197 iter/s, 5.93481s/12 iters), loss = 3.5997
I0409 21:32:08.832893 25006 solver.cpp:237] Train net output #0: loss = 3.5997 (* 1 = 3.5997 loss)
I0409 21:32:08.832906 25006 sgd_solver.cpp:105] Iteration 1620, lr = 0.00725496
I0409 21:32:13.599884 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1632.caffemodel
I0409 21:32:17.602421 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1632.solverstate
I0409 21:32:20.237185 25006 solver.cpp:330] Iteration 1632, Testing net (#0)
I0409 21:32:20.237211 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:32:24.444134 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:32:25.208942 25006 solver.cpp:397] Test net output #0: accuracy = 0.0894608
I0409 21:32:25.208968 25006 solver.cpp:397] Test net output #1: loss = 4.49101 (* 1 = 4.49101 loss)
I0409 21:32:25.297920 25006 solver.cpp:218] Iteration 1632 (0.728847 iter/s, 16.4644s/12 iters), loss = 3.71572
I0409 21:32:25.298007 25006 solver.cpp:237] Train net output #0: loss = 3.71572 (* 1 = 3.71572 loss)
I0409 21:32:25.298017 25006 sgd_solver.cpp:105] Iteration 1632, lr = 0.00723774
I0409 21:32:29.984935 25006 solver.cpp:218] Iteration 1644 (2.56042 iter/s, 4.68672s/12 iters), loss = 3.2682
I0409 21:32:29.984990 25006 solver.cpp:237] Train net output #0: loss = 3.2682 (* 1 = 3.2682 loss)
I0409 21:32:29.985002 25006 sgd_solver.cpp:105] Iteration 1644, lr = 0.00722056
I0409 21:32:35.038713 25006 solver.cpp:218] Iteration 1656 (2.37459 iter/s, 5.05351s/12 iters), loss = 3.53512
I0409 21:32:35.038771 25006 solver.cpp:237] Train net output #0: loss = 3.53512 (* 1 = 3.53512 loss)
I0409 21:32:35.038782 25006 sgd_solver.cpp:105] Iteration 1656, lr = 0.00720341
I0409 21:32:40.334326 25006 solver.cpp:218] Iteration 1668 (2.26615 iter/s, 5.29533s/12 iters), loss = 3.76652
I0409 21:32:40.334376 25006 solver.cpp:237] Train net output #0: loss = 3.76652 (* 1 = 3.76652 loss)
I0409 21:32:40.334386 25006 sgd_solver.cpp:105] Iteration 1668, lr = 0.00718631
I0409 21:32:45.639379 25006 solver.cpp:218] Iteration 1680 (2.26211 iter/s, 5.30477s/12 iters), loss = 3.29459
I0409 21:32:45.639431 25006 solver.cpp:237] Train net output #0: loss = 3.29459 (* 1 = 3.29459 loss)
I0409 21:32:45.639441 25006 sgd_solver.cpp:105] Iteration 1680, lr = 0.00716925
I0409 21:32:50.666028 25006 solver.cpp:218] Iteration 1692 (2.3874 iter/s, 5.02638s/12 iters), loss = 3.28027
I0409 21:32:50.666074 25006 solver.cpp:237] Train net output #0: loss = 3.28027 (* 1 = 3.28027 loss)
I0409 21:32:50.666083 25006 sgd_solver.cpp:105] Iteration 1692, lr = 0.00715223
I0409 21:32:55.760746 25006 solver.cpp:218] Iteration 1704 (2.3555 iter/s, 5.09445s/12 iters), loss = 3.11126
I0409 21:32:55.760901 25006 solver.cpp:237] Train net output #0: loss = 3.11126 (* 1 = 3.11126 loss)
I0409 21:32:55.760913 25006 sgd_solver.cpp:105] Iteration 1704, lr = 0.00713525
I0409 21:33:00.704869 25006 solver.cpp:218] Iteration 1716 (2.4273 iter/s, 4.94376s/12 iters), loss = 3.5058
I0409 21:33:00.704931 25006 solver.cpp:237] Train net output #0: loss = 3.5058 (* 1 = 3.5058 loss)
I0409 21:33:00.704942 25006 sgd_solver.cpp:105] Iteration 1716, lr = 0.00711831
I0409 21:33:01.724776 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:33:05.710712 25006 solver.cpp:218] Iteration 1728 (2.39733 iter/s, 5.00556s/12 iters), loss = 3.37006
I0409 21:33:05.710778 25006 solver.cpp:237] Train net output #0: loss = 3.37006 (* 1 = 3.37006 loss)
I0409 21:33:05.710791 25006 sgd_solver.cpp:105] Iteration 1728, lr = 0.00710141
I0409 21:33:07.842133 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1734.caffemodel
I0409 21:33:13.588816 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1734.solverstate
I0409 21:33:20.358548 25006 solver.cpp:330] Iteration 1734, Testing net (#0)
I0409 21:33:20.358577 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:33:24.352867 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:33:25.079516 25006 solver.cpp:397] Test net output #0: accuracy = 0.113358
I0409 21:33:25.079569 25006 solver.cpp:397] Test net output #1: loss = 4.40207 (* 1 = 4.40207 loss)
I0409 21:33:26.829187 25006 solver.cpp:218] Iteration 1740 (0.568248 iter/s, 21.1176s/12 iters), loss = 3.43716
I0409 21:33:26.829308 25006 solver.cpp:237] Train net output #0: loss = 3.43716 (* 1 = 3.43716 loss)
I0409 21:33:26.829319 25006 sgd_solver.cpp:105] Iteration 1740, lr = 0.00708455
I0409 21:33:31.994119 25006 solver.cpp:218] Iteration 1752 (2.32352 iter/s, 5.16459s/12 iters), loss = 3.46479
I0409 21:33:31.994177 25006 solver.cpp:237] Train net output #0: loss = 3.46479 (* 1 = 3.46479 loss)
I0409 21:33:31.994189 25006 sgd_solver.cpp:105] Iteration 1752, lr = 0.00706773
I0409 21:33:37.371042 25006 solver.cpp:218] Iteration 1764 (2.23188 iter/s, 5.37663s/12 iters), loss = 3.50104
I0409 21:33:37.371098 25006 solver.cpp:237] Train net output #0: loss = 3.50104 (* 1 = 3.50104 loss)
I0409 21:33:37.371109 25006 sgd_solver.cpp:105] Iteration 1764, lr = 0.00705094
I0409 21:33:42.717789 25006 solver.cpp:218] Iteration 1776 (2.24448 iter/s, 5.34646s/12 iters), loss = 3.46835
I0409 21:33:42.717854 25006 solver.cpp:237] Train net output #0: loss = 3.46835 (* 1 = 3.46835 loss)
I0409 21:33:42.717865 25006 sgd_solver.cpp:105] Iteration 1776, lr = 0.0070342
I0409 21:33:47.960762 25006 solver.cpp:218] Iteration 1788 (2.2889 iter/s, 5.24268s/12 iters), loss = 3.4651
I0409 21:33:47.960820 25006 solver.cpp:237] Train net output #0: loss = 3.4651 (* 1 = 3.4651 loss)
I0409 21:33:47.960832 25006 sgd_solver.cpp:105] Iteration 1788, lr = 0.0070175
I0409 21:33:52.975778 25006 solver.cpp:218] Iteration 1800 (2.39294 iter/s, 5.01474s/12 iters), loss = 3.24479
I0409 21:33:52.975824 25006 solver.cpp:237] Train net output #0: loss = 3.24479 (* 1 = 3.24479 loss)
I0409 21:33:52.975834 25006 sgd_solver.cpp:105] Iteration 1800, lr = 0.00700084
I0409 21:33:58.002357 25006 solver.cpp:218] Iteration 1812 (2.38743 iter/s, 5.02632s/12 iters), loss = 3.38127
I0409 21:33:58.002496 25006 solver.cpp:237] Train net output #0: loss = 3.38127 (* 1 = 3.38127 loss)
I0409 21:33:58.002507 25006 sgd_solver.cpp:105] Iteration 1812, lr = 0.00698422
I0409 21:34:01.192665 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:34:03.023075 25006 solver.cpp:218] Iteration 1824 (2.39027 iter/s, 5.02036s/12 iters), loss = 3.48398
I0409 21:34:03.023147 25006 solver.cpp:237] Train net output #0: loss = 3.48398 (* 1 = 3.48398 loss)
I0409 21:34:03.023164 25006 sgd_solver.cpp:105] Iteration 1824, lr = 0.00696764
I0409 21:34:07.627578 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1836.caffemodel
I0409 21:34:12.531332 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1836.solverstate
I0409 21:34:19.009907 25006 solver.cpp:330] Iteration 1836, Testing net (#0)
I0409 21:34:19.009932 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:34:22.775338 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:34:23.632833 25006 solver.cpp:397] Test net output #0: accuracy = 0.110907
I0409 21:34:23.632869 25006 solver.cpp:397] Test net output #1: loss = 4.3884 (* 1 = 4.3884 loss)
I0409 21:34:23.721688 25006 solver.cpp:218] Iteration 1836 (0.579774 iter/s, 20.6977s/12 iters), loss = 3.27522
I0409 21:34:23.721745 25006 solver.cpp:237] Train net output #0: loss = 3.27522 (* 1 = 3.27522 loss)
I0409 21:34:23.721755 25006 sgd_solver.cpp:105] Iteration 1836, lr = 0.0069511
I0409 21:34:28.036805 25006 solver.cpp:218] Iteration 1848 (2.78108 iter/s, 4.31488s/12 iters), loss = 3.13049
I0409 21:34:28.036908 25006 solver.cpp:237] Train net output #0: loss = 3.13049 (* 1 = 3.13049 loss)
I0409 21:34:28.036921 25006 sgd_solver.cpp:105] Iteration 1848, lr = 0.00693459
I0409 21:34:33.097826 25006 solver.cpp:218] Iteration 1860 (2.37121 iter/s, 5.0607s/12 iters), loss = 3.40327
I0409 21:34:33.097882 25006 solver.cpp:237] Train net output #0: loss = 3.40327 (* 1 = 3.40327 loss)
I0409 21:34:33.097894 25006 sgd_solver.cpp:105] Iteration 1860, lr = 0.00691813
I0409 21:34:38.129664 25006 solver.cpp:218] Iteration 1872 (2.38494 iter/s, 5.03157s/12 iters), loss = 3.26138
I0409 21:34:38.129725 25006 solver.cpp:237] Train net output #0: loss = 3.26138 (* 1 = 3.26138 loss)
I0409 21:34:38.129738 25006 sgd_solver.cpp:105] Iteration 1872, lr = 0.0069017
I0409 21:34:43.576594 25006 solver.cpp:218] Iteration 1884 (2.2032 iter/s, 5.44663s/12 iters), loss = 3.12134
I0409 21:34:43.576651 25006 solver.cpp:237] Train net output #0: loss = 3.12134 (* 1 = 3.12134 loss)
I0409 21:34:43.576665 25006 sgd_solver.cpp:105] Iteration 1884, lr = 0.00688532
I0409 21:34:48.979856 25006 solver.cpp:218] Iteration 1896 (2.221 iter/s, 5.40298s/12 iters), loss = 3.56738
I0409 21:34:48.979905 25006 solver.cpp:237] Train net output #0: loss = 3.56738 (* 1 = 3.56738 loss)
I0409 21:34:48.979915 25006 sgd_solver.cpp:105] Iteration 1896, lr = 0.00686897
I0409 21:34:54.484954 25006 solver.cpp:218] Iteration 1908 (2.17991 iter/s, 5.50481s/12 iters), loss = 3.11073
I0409 21:34:54.485023 25006 solver.cpp:237] Train net output #0: loss = 3.11073 (* 1 = 3.11073 loss)
I0409 21:34:54.485038 25006 sgd_solver.cpp:105] Iteration 1908, lr = 0.00685266
I0409 21:35:00.232789 25006 solver.cpp:218] Iteration 1920 (2.08786 iter/s, 5.74752s/12 iters), loss = 3.30792
I0409 21:35:00.232935 25006 solver.cpp:237] Train net output #0: loss = 3.30792 (* 1 = 3.30792 loss)
I0409 21:35:00.232946 25006 sgd_solver.cpp:105] Iteration 1920, lr = 0.00683639
I0409 21:35:00.595160 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:35:05.934077 25006 solver.cpp:218] Iteration 1932 (2.10493 iter/s, 5.7009s/12 iters), loss = 3.01322
I0409 21:35:05.934123 25006 solver.cpp:237] Train net output #0: loss = 3.01322 (* 1 = 3.01322 loss)
I0409 21:35:05.934132 25006 sgd_solver.cpp:105] Iteration 1932, lr = 0.00682016
I0409 21:35:08.010699 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_1938.caffemodel
I0409 21:35:12.253506 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_1938.solverstate
I0409 21:35:15.259183 25006 solver.cpp:330] Iteration 1938, Testing net (#0)
I0409 21:35:15.259207 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:35:18.967417 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:35:19.788769 25006 solver.cpp:397] Test net output #0: accuracy = 0.119485
I0409 21:35:19.788803 25006 solver.cpp:397] Test net output #1: loss = 4.36223 (* 1 = 4.36223 loss)
I0409 21:35:21.464457 25006 solver.cpp:218] Iteration 1944 (0.772713 iter/s, 15.5297s/12 iters), loss = 3.51128
I0409 21:35:21.464519 25006 solver.cpp:237] Train net output #0: loss = 3.51128 (* 1 = 3.51128 loss)
I0409 21:35:21.464530 25006 sgd_solver.cpp:105] Iteration 1944, lr = 0.00680397
I0409 21:35:27.566282 25006 solver.cpp:218] Iteration 1956 (1.96673 iter/s, 6.1015s/12 iters), loss = 3.22943
I0409 21:35:27.566330 25006 solver.cpp:237] Train net output #0: loss = 3.22943 (* 1 = 3.22943 loss)
I0409 21:35:27.566339 25006 sgd_solver.cpp:105] Iteration 1956, lr = 0.00678782
I0409 21:35:32.670856 25006 solver.cpp:218] Iteration 1968 (2.35096 iter/s, 5.1043s/12 iters), loss = 3.24513
I0409 21:35:32.670979 25006 solver.cpp:237] Train net output #0: loss = 3.24513 (* 1 = 3.24513 loss)
I0409 21:35:32.670992 25006 sgd_solver.cpp:105] Iteration 1968, lr = 0.0067717
I0409 21:35:37.623172 25006 solver.cpp:218] Iteration 1980 (2.42327 iter/s, 4.95198s/12 iters), loss = 3.29928
I0409 21:35:37.623229 25006 solver.cpp:237] Train net output #0: loss = 3.29928 (* 1 = 3.29928 loss)
I0409 21:35:37.623242 25006 sgd_solver.cpp:105] Iteration 1980, lr = 0.00675562
I0409 21:35:42.508764 25006 solver.cpp:218] Iteration 1992 (2.45634 iter/s, 4.88532s/12 iters), loss = 3.28852
I0409 21:35:42.508827 25006 solver.cpp:237] Train net output #0: loss = 3.28852 (* 1 = 3.28852 loss)
I0409 21:35:42.508839 25006 sgd_solver.cpp:105] Iteration 1992, lr = 0.00673958
I0409 21:35:47.424708 25006 solver.cpp:218] Iteration 2004 (2.44117 iter/s, 4.91567s/12 iters), loss = 2.73021
I0409 21:35:47.424763 25006 solver.cpp:237] Train net output #0: loss = 2.73021 (* 1 = 2.73021 loss)
I0409 21:35:47.424775 25006 sgd_solver.cpp:105] Iteration 2004, lr = 0.00672358
I0409 21:35:52.466662 25006 solver.cpp:218] Iteration 2016 (2.38016 iter/s, 5.04168s/12 iters), loss = 2.94967
I0409 21:35:52.466722 25006 solver.cpp:237] Train net output #0: loss = 2.94967 (* 1 = 2.94967 loss)
I0409 21:35:52.466734 25006 sgd_solver.cpp:105] Iteration 2016, lr = 0.00670762
I0409 21:35:55.024003 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:35:57.697502 25006 solver.cpp:218] Iteration 2028 (2.29421 iter/s, 5.23056s/12 iters), loss = 2.79593
I0409 21:35:57.697546 25006 solver.cpp:237] Train net output #0: loss = 2.79593 (* 1 = 2.79593 loss)
I0409 21:35:57.697554 25006 sgd_solver.cpp:105] Iteration 2028, lr = 0.00669169
I0409 21:36:02.287061 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2040.caffemodel
I0409 21:36:04.620045 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2040.solverstate
I0409 21:36:06.243783 25006 solver.cpp:330] Iteration 2040, Testing net (#0)
I0409 21:36:06.243809 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:36:10.326371 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:36:11.344272 25006 solver.cpp:397] Test net output #0: accuracy = 0.114583
I0409 21:36:11.344321 25006 solver.cpp:397] Test net output #1: loss = 4.36755 (* 1 = 4.36755 loss)
I0409 21:36:11.433136 25006 solver.cpp:218] Iteration 2040 (0.873679 iter/s, 13.735s/12 iters), loss = 3.05069
I0409 21:36:11.433193 25006 solver.cpp:237] Train net output #0: loss = 3.05069 (* 1 = 3.05069 loss)
I0409 21:36:11.433204 25006 sgd_solver.cpp:105] Iteration 2040, lr = 0.00667581
I0409 21:36:15.864697 25006 solver.cpp:218] Iteration 2052 (2.708 iter/s, 4.43131s/12 iters), loss = 3.21344
I0409 21:36:15.864746 25006 solver.cpp:237] Train net output #0: loss = 3.21344 (* 1 = 3.21344 loss)
I0409 21:36:15.864758 25006 sgd_solver.cpp:105] Iteration 2052, lr = 0.00665996
I0409 21:36:17.830278 25006 blocking_queue.cpp:49] Waiting for data
I0409 21:36:21.536412 25006 solver.cpp:218] Iteration 2064 (2.11587 iter/s, 5.67142s/12 iters), loss = 3.27499
I0409 21:36:21.536468 25006 solver.cpp:237] Train net output #0: loss = 3.27499 (* 1 = 3.27499 loss)
I0409 21:36:21.536478 25006 sgd_solver.cpp:105] Iteration 2064, lr = 0.00664414
I0409 21:36:26.485289 25006 solver.cpp:218] Iteration 2076 (2.42493 iter/s, 4.9486s/12 iters), loss = 3.50933
I0409 21:36:26.485357 25006 solver.cpp:237] Train net output #0: loss = 3.50933 (* 1 = 3.50933 loss)
I0409 21:36:26.485369 25006 sgd_solver.cpp:105] Iteration 2076, lr = 0.00662837
I0409 21:36:31.500241 25006 solver.cpp:218] Iteration 2088 (2.39298 iter/s, 5.01466s/12 iters), loss = 2.6304
I0409 21:36:31.500288 25006 solver.cpp:237] Train net output #0: loss = 2.6304 (* 1 = 2.6304 loss)
I0409 21:36:31.500296 25006 sgd_solver.cpp:105] Iteration 2088, lr = 0.00661263
I0409 21:36:37.188875 25006 solver.cpp:218] Iteration 2100 (2.10958 iter/s, 5.68834s/12 iters), loss = 2.86408
I0409 21:36:37.189004 25006 solver.cpp:237] Train net output #0: loss = 2.86408 (* 1 = 2.86408 loss)
I0409 21:36:37.189016 25006 sgd_solver.cpp:105] Iteration 2100, lr = 0.00659693
I0409 21:36:42.376811 25006 solver.cpp:218] Iteration 2112 (2.31321 iter/s, 5.18759s/12 iters), loss = 2.7302
I0409 21:36:42.376871 25006 solver.cpp:237] Train net output #0: loss = 2.7302 (* 1 = 2.7302 loss)
I0409 21:36:42.376883 25006 sgd_solver.cpp:105] Iteration 2112, lr = 0.00658127
I0409 21:36:46.986902 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:36:47.303961 25006 solver.cpp:218] Iteration 2124 (2.43562 iter/s, 4.92688s/12 iters), loss = 2.55969
I0409 21:36:47.304009 25006 solver.cpp:237] Train net output #0: loss = 2.55969 (* 1 = 2.55969 loss)
I0409 21:36:47.304020 25006 sgd_solver.cpp:105] Iteration 2124, lr = 0.00656564
I0409 21:36:52.182902 25006 solver.cpp:218] Iteration 2136 (2.45968 iter/s, 4.87868s/12 iters), loss = 2.84002
I0409 21:36:52.182953 25006 solver.cpp:237] Train net output #0: loss = 2.84002 (* 1 = 2.84002 loss)
I0409 21:36:52.182965 25006 sgd_solver.cpp:105] Iteration 2136, lr = 0.00655006
I0409 21:36:54.174468 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2142.caffemodel
I0409 21:36:59.479171 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2142.solverstate
I0409 21:37:05.274111 25006 solver.cpp:330] Iteration 2142, Testing net (#0)
I0409 21:37:05.274140 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:37:08.988505 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:37:09.856876 25006 solver.cpp:397] Test net output #0: accuracy = 0.118873
I0409 21:37:09.856925 25006 solver.cpp:397] Test net output #1: loss = 4.36789 (* 1 = 4.36789 loss)
I0409 21:37:11.785879 25006 solver.cpp:218] Iteration 2148 (0.612179 iter/s, 19.6021s/12 iters), loss = 2.94284
I0409 21:37:11.785936 25006 solver.cpp:237] Train net output #0: loss = 2.94284 (* 1 = 2.94284 loss)
I0409 21:37:11.785949 25006 sgd_solver.cpp:105] Iteration 2148, lr = 0.00653451
I0409 21:37:16.794996 25006 solver.cpp:218] Iteration 2160 (2.39576 iter/s, 5.00885s/12 iters), loss = 3.08723
I0409 21:37:16.795037 25006 solver.cpp:237] Train net output #0: loss = 3.08723 (* 1 = 3.08723 loss)
I0409 21:37:16.795047 25006 sgd_solver.cpp:105] Iteration 2160, lr = 0.00651899
I0409 21:37:21.921603 25006 solver.cpp:218] Iteration 2172 (2.34085 iter/s, 5.12634s/12 iters), loss = 2.7585
I0409 21:37:21.921648 25006 solver.cpp:237] Train net output #0: loss = 2.7585 (* 1 = 2.7585 loss)
I0409 21:37:21.921658 25006 sgd_solver.cpp:105] Iteration 2172, lr = 0.00650351
I0409 21:37:27.139258 25006 solver.cpp:218] Iteration 2184 (2.3 iter/s, 5.21739s/12 iters), loss = 2.65871
I0409 21:37:27.139303 25006 solver.cpp:237] Train net output #0: loss = 2.65871 (* 1 = 2.65871 loss)
I0409 21:37:27.139314 25006 sgd_solver.cpp:105] Iteration 2184, lr = 0.00648807
I0409 21:37:32.133899 25006 solver.cpp:218] Iteration 2196 (2.4027 iter/s, 4.99438s/12 iters), loss = 2.49281
I0409 21:37:32.133975 25006 solver.cpp:237] Train net output #0: loss = 2.49281 (* 1 = 2.49281 loss)
I0409 21:37:32.133987 25006 sgd_solver.cpp:105] Iteration 2196, lr = 0.00647267
I0409 21:37:37.188793 25006 solver.cpp:218] Iteration 2208 (2.37406 iter/s, 5.05462s/12 iters), loss = 2.86457
I0409 21:37:37.188841 25006 solver.cpp:237] Train net output #0: loss = 2.86457 (* 1 = 2.86457 loss)
I0409 21:37:37.188850 25006 sgd_solver.cpp:105] Iteration 2208, lr = 0.0064573
I0409 21:37:42.225847 25006 solver.cpp:218] Iteration 2220 (2.38247 iter/s, 5.03679s/12 iters), loss = 2.49127
I0409 21:37:42.225982 25006 solver.cpp:237] Train net output #0: loss = 2.49127 (* 1 = 2.49127 loss)
I0409 21:37:42.225993 25006 sgd_solver.cpp:105] Iteration 2220, lr = 0.00644197
I0409 21:37:44.009912 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:37:47.149420 25006 solver.cpp:218] Iteration 2232 (2.43741 iter/s, 4.92325s/12 iters), loss = 2.78724
I0409 21:37:47.149463 25006 solver.cpp:237] Train net output #0: loss = 2.78724 (* 1 = 2.78724 loss)
I0409 21:37:47.149474 25006 sgd_solver.cpp:105] Iteration 2232, lr = 0.00642668
I0409 21:37:51.776082 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2244.caffemodel
I0409 21:37:54.511947 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2244.solverstate
I0409 21:37:57.702059 25006 solver.cpp:330] Iteration 2244, Testing net (#0)
I0409 21:37:57.702082 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:38:01.267374 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:38:02.179826 25006 solver.cpp:397] Test net output #0: accuracy = 0.133578
I0409 21:38:02.179872 25006 solver.cpp:397] Test net output #1: loss = 4.4019 (* 1 = 4.4019 loss)
I0409 21:38:02.268905 25006 solver.cpp:218] Iteration 2244 (0.793713 iter/s, 15.1188s/12 iters), loss = 3.04716
I0409 21:38:02.268954 25006 solver.cpp:237] Train net output #0: loss = 3.04716 (* 1 = 3.04716 loss)
I0409 21:38:02.268966 25006 sgd_solver.cpp:105] Iteration 2244, lr = 0.00641142
I0409 21:38:06.635717 25006 solver.cpp:218] Iteration 2256 (2.74815 iter/s, 4.36657s/12 iters), loss = 2.66853
I0409 21:38:06.635761 25006 solver.cpp:237] Train net output #0: loss = 2.66853 (* 1 = 2.66853 loss)
I0409 21:38:06.635771 25006 sgd_solver.cpp:105] Iteration 2256, lr = 0.0063962
I0409 21:38:11.588853 25006 solver.cpp:218] Iteration 2268 (2.42283 iter/s, 4.95288s/12 iters), loss = 2.4371
I0409 21:38:11.588901 25006 solver.cpp:237] Train net output #0: loss = 2.4371 (* 1 = 2.4371 loss)
I0409 21:38:11.588914 25006 sgd_solver.cpp:105] Iteration 2268, lr = 0.00638101
I0409 21:38:16.537294 25006 solver.cpp:218] Iteration 2280 (2.42514 iter/s, 4.94818s/12 iters), loss = 2.9901
I0409 21:38:16.537420 25006 solver.cpp:237] Train net output #0: loss = 2.9901 (* 1 = 2.9901 loss)
I0409 21:38:16.537432 25006 sgd_solver.cpp:105] Iteration 2280, lr = 0.00636586
I0409 21:38:21.460868 25006 solver.cpp:218] Iteration 2292 (2.43742 iter/s, 4.92324s/12 iters), loss = 2.60143
I0409 21:38:21.460929 25006 solver.cpp:237] Train net output #0: loss = 2.60143 (* 1 = 2.60143 loss)
I0409 21:38:21.460945 25006 sgd_solver.cpp:105] Iteration 2292, lr = 0.00635075
I0409 21:38:26.721302 25006 solver.cpp:218] Iteration 2304 (2.2813 iter/s, 5.26015s/12 iters), loss = 2.84553
I0409 21:38:26.721350 25006 solver.cpp:237] Train net output #0: loss = 2.84553 (* 1 = 2.84553 loss)
I0409 21:38:26.721361 25006 sgd_solver.cpp:105] Iteration 2304, lr = 0.00633567
I0409 21:38:31.719173 25006 solver.cpp:218] Iteration 2316 (2.40115 iter/s, 4.9976s/12 iters), loss = 2.88497
I0409 21:38:31.719223 25006 solver.cpp:237] Train net output #0: loss = 2.88497 (* 1 = 2.88497 loss)
I0409 21:38:31.719234 25006 sgd_solver.cpp:105] Iteration 2316, lr = 0.00632063
I0409 21:38:35.668100 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:38:36.708616 25006 solver.cpp:218] Iteration 2328 (2.40521 iter/s, 4.98917s/12 iters), loss = 2.52857
I0409 21:38:36.708670 25006 solver.cpp:237] Train net output #0: loss = 2.52857 (* 1 = 2.52857 loss)
I0409 21:38:36.708683 25006 sgd_solver.cpp:105] Iteration 2328, lr = 0.00630562
I0409 21:38:41.815974 25006 solver.cpp:218] Iteration 2340 (2.34968 iter/s, 5.10708s/12 iters), loss = 2.99655
I0409 21:38:41.816047 25006 solver.cpp:237] Train net output #0: loss = 2.99655 (* 1 = 2.99655 loss)
I0409 21:38:41.816061 25006 sgd_solver.cpp:105] Iteration 2340, lr = 0.00629065
I0409 21:38:43.872313 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2346.caffemodel
I0409 21:38:46.091575 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2346.solverstate
I0409 21:38:47.904424 25006 solver.cpp:330] Iteration 2346, Testing net (#0)
I0409 21:38:47.904563 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:38:51.407984 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:38:52.352267 25006 solver.cpp:397] Test net output #0: accuracy = 0.135417
I0409 21:38:52.352316 25006 solver.cpp:397] Test net output #1: loss = 4.44569 (* 1 = 4.44569 loss)
I0409 21:38:54.305660 25006 solver.cpp:218] Iteration 2352 (0.960838 iter/s, 12.4891s/12 iters), loss = 3.30428
I0409 21:38:54.305708 25006 solver.cpp:237] Train net output #0: loss = 3.30428 (* 1 = 3.30428 loss)
I0409 21:38:54.305721 25006 sgd_solver.cpp:105] Iteration 2352, lr = 0.00627571
I0409 21:38:59.477347 25006 solver.cpp:218] Iteration 2364 (2.32045 iter/s, 5.17141s/12 iters), loss = 2.6661
I0409 21:38:59.477403 25006 solver.cpp:237] Train net output #0: loss = 2.6661 (* 1 = 2.6661 loss)
I0409 21:38:59.477417 25006 sgd_solver.cpp:105] Iteration 2364, lr = 0.00626081
I0409 21:39:04.426914 25006 solver.cpp:218] Iteration 2376 (2.42459 iter/s, 4.9493s/12 iters), loss = 2.75328
I0409 21:39:04.426957 25006 solver.cpp:237] Train net output #0: loss = 2.75328 (* 1 = 2.75328 loss)
I0409 21:39:04.426967 25006 sgd_solver.cpp:105] Iteration 2376, lr = 0.00624595
I0409 21:39:09.405129 25006 solver.cpp:218] Iteration 2388 (2.41063 iter/s, 4.97795s/12 iters), loss = 2.77117
I0409 21:39:09.405184 25006 solver.cpp:237] Train net output #0: loss = 2.77117 (* 1 = 2.77117 loss)
I0409 21:39:09.405197 25006 sgd_solver.cpp:105] Iteration 2388, lr = 0.00623112
I0409 21:39:14.395944 25006 solver.cpp:218] Iteration 2400 (2.40455 iter/s, 4.99054s/12 iters), loss = 2.27188
I0409 21:39:14.396001 25006 solver.cpp:237] Train net output #0: loss = 2.27188 (* 1 = 2.27188 loss)
I0409 21:39:14.396013 25006 sgd_solver.cpp:105] Iteration 2400, lr = 0.00621633
I0409 21:39:19.333171 25006 solver.cpp:218] Iteration 2412 (2.43065 iter/s, 4.93696s/12 iters), loss = 2.19485
I0409 21:39:19.333262 25006 solver.cpp:237] Train net output #0: loss = 2.19485 (* 1 = 2.19485 loss)
I0409 21:39:19.333274 25006 sgd_solver.cpp:105] Iteration 2412, lr = 0.00620157
I0409 21:39:24.358812 25006 solver.cpp:218] Iteration 2424 (2.38791 iter/s, 5.02532s/12 iters), loss = 2.40421
I0409 21:39:24.358925 25006 solver.cpp:237] Train net output #0: loss = 2.40421 (* 1 = 2.40421 loss)
I0409 21:39:24.358942 25006 sgd_solver.cpp:105] Iteration 2424, lr = 0.00618684
I0409 21:39:25.433208 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:39:29.469605 25006 solver.cpp:218] Iteration 2436 (2.34811 iter/s, 5.11049s/12 iters), loss = 2.25761
I0409 21:39:29.469651 25006 solver.cpp:237] Train net output #0: loss = 2.25761 (* 1 = 2.25761 loss)
I0409 21:39:29.469662 25006 sgd_solver.cpp:105] Iteration 2436, lr = 0.00617215
I0409 21:39:33.981096 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2448.caffemodel
I0409 21:39:36.147445 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2448.solverstate
I0409 21:39:37.832226 25006 solver.cpp:330] Iteration 2448, Testing net (#0)
I0409 21:39:37.832250 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:39:41.222208 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:39:42.211091 25006 solver.cpp:397] Test net output #0: accuracy = 0.139093
I0409 21:39:42.211138 25006 solver.cpp:397] Test net output #1: loss = 4.47329 (* 1 = 4.47329 loss)
I0409 21:39:42.300034 25006 solver.cpp:218] Iteration 2448 (0.935319 iter/s, 12.8299s/12 iters), loss = 2.98796
I0409 21:39:42.300087 25006 solver.cpp:237] Train net output #0: loss = 2.98796 (* 1 = 2.98796 loss)
I0409 21:39:42.300099 25006 sgd_solver.cpp:105] Iteration 2448, lr = 0.0061575
I0409 21:39:46.560490 25006 solver.cpp:218] Iteration 2460 (2.81676 iter/s, 4.26022s/12 iters), loss = 2.62791
I0409 21:39:46.560535 25006 solver.cpp:237] Train net output #0: loss = 2.62791 (* 1 = 2.62791 loss)
I0409 21:39:46.560544 25006 sgd_solver.cpp:105] Iteration 2460, lr = 0.00614288
I0409 21:39:51.528775 25006 solver.cpp:218] Iteration 2472 (2.41545 iter/s, 4.96802s/12 iters), loss = 2.52078
I0409 21:39:51.528949 25006 solver.cpp:237] Train net output #0: loss = 2.52078 (* 1 = 2.52078 loss)
I0409 21:39:51.528961 25006 sgd_solver.cpp:105] Iteration 2472, lr = 0.0061283
I0409 21:39:56.462173 25006 solver.cpp:218] Iteration 2484 (2.43259 iter/s, 4.93301s/12 iters), loss = 2.79619
I0409 21:39:56.462230 25006 solver.cpp:237] Train net output #0: loss = 2.79619 (* 1 = 2.79619 loss)
I0409 21:39:56.462241 25006 sgd_solver.cpp:105] Iteration 2484, lr = 0.00611375
I0409 21:40:01.570593 25006 solver.cpp:218] Iteration 2496 (2.34919 iter/s, 5.10815s/12 iters), loss = 2.93512
I0409 21:40:01.570642 25006 solver.cpp:237] Train net output #0: loss = 2.93512 (* 1 = 2.93512 loss)
I0409 21:40:01.570652 25006 sgd_solver.cpp:105] Iteration 2496, lr = 0.00609923
I0409 21:40:06.558813 25006 solver.cpp:218] Iteration 2508 (2.40579 iter/s, 4.98796s/12 iters), loss = 2.09799
I0409 21:40:06.558852 25006 solver.cpp:237] Train net output #0: loss = 2.09799 (* 1 = 2.09799 loss)
I0409 21:40:06.558862 25006 sgd_solver.cpp:105] Iteration 2508, lr = 0.00608475
I0409 21:40:11.593813 25006 solver.cpp:218] Iteration 2520 (2.38344 iter/s, 5.03473s/12 iters), loss = 2.32175
I0409 21:40:11.593873 25006 solver.cpp:237] Train net output #0: loss = 2.32175 (* 1 = 2.32175 loss)
I0409 21:40:11.593885 25006 sgd_solver.cpp:105] Iteration 2520, lr = 0.0060703
I0409 21:40:14.730777 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:40:16.480188 25006 solver.cpp:218] Iteration 2532 (2.45595 iter/s, 4.8861s/12 iters), loss = 2.36995
I0409 21:40:16.480244 25006 solver.cpp:237] Train net output #0: loss = 2.36995 (* 1 = 2.36995 loss)
I0409 21:40:16.480254 25006 sgd_solver.cpp:105] Iteration 2532, lr = 0.00605589
I0409 21:40:21.612020 25006 solver.cpp:218] Iteration 2544 (2.33847 iter/s, 5.13156s/12 iters), loss = 2.2715
I0409 21:40:21.612097 25006 solver.cpp:237] Train net output #0: loss = 2.2715 (* 1 = 2.2715 loss)
I0409 21:40:21.612112 25006 sgd_solver.cpp:105] Iteration 2544, lr = 0.00604151
I0409 21:40:23.741742 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2550.caffemodel
I0409 21:40:27.617733 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2550.solverstate
I0409 21:40:30.878192 25006 solver.cpp:330] Iteration 2550, Testing net (#0)
I0409 21:40:30.878221 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:40:34.374980 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:40:35.399155 25006 solver.cpp:397] Test net output #0: accuracy = 0.153799
I0409 21:40:35.399204 25006 solver.cpp:397] Test net output #1: loss = 4.39604 (* 1 = 4.39604 loss)
I0409 21:40:37.208693 25006 solver.cpp:218] Iteration 2556 (0.76943 iter/s, 15.596s/12 iters), loss = 2.2495
I0409 21:40:37.208736 25006 solver.cpp:237] Train net output #0: loss = 2.2495 (* 1 = 2.2495 loss)
I0409 21:40:37.208745 25006 sgd_solver.cpp:105] Iteration 2556, lr = 0.00602717
I0409 21:40:42.085680 25006 solver.cpp:218] Iteration 2568 (2.46067 iter/s, 4.87673s/12 iters), loss = 2.16899
I0409 21:40:42.085736 25006 solver.cpp:237] Train net output #0: loss = 2.16899 (* 1 = 2.16899 loss)
I0409 21:40:42.085750 25006 sgd_solver.cpp:105] Iteration 2568, lr = 0.00601286
I0409 21:40:47.246595 25006 solver.cpp:218] Iteration 2580 (2.32529 iter/s, 5.16064s/12 iters), loss = 2.15661
I0409 21:40:47.246641 25006 solver.cpp:237] Train net output #0: loss = 2.15661 (* 1 = 2.15661 loss)
I0409 21:40:47.246650 25006 sgd_solver.cpp:105] Iteration 2580, lr = 0.00599858
I0409 21:40:52.235953 25006 solver.cpp:218] Iteration 2592 (2.40525 iter/s, 4.98909s/12 iters), loss = 2.66377
I0409 21:40:52.236116 25006 solver.cpp:237] Train net output #0: loss = 2.66377 (* 1 = 2.66377 loss)
I0409 21:40:52.236135 25006 sgd_solver.cpp:105] Iteration 2592, lr = 0.00598434
I0409 21:40:57.521566 25006 solver.cpp:218] Iteration 2604 (2.27047 iter/s, 5.28524s/12 iters), loss = 2.57631
I0409 21:40:57.521608 25006 solver.cpp:237] Train net output #0: loss = 2.57631 (* 1 = 2.57631 loss)
I0409 21:40:57.521616 25006 sgd_solver.cpp:105] Iteration 2604, lr = 0.00597013
I0409 21:41:02.539420 25006 solver.cpp:218] Iteration 2616 (2.39158 iter/s, 5.0176s/12 iters), loss = 1.99203
I0409 21:41:02.539459 25006 solver.cpp:237] Train net output #0: loss = 1.99203 (* 1 = 1.99203 loss)
I0409 21:41:02.539467 25006 sgd_solver.cpp:105] Iteration 2616, lr = 0.00595596
I0409 21:41:07.506736 25006 solver.cpp:218] Iteration 2628 (2.41591 iter/s, 4.96706s/12 iters), loss = 1.97745
I0409 21:41:07.506774 25006 solver.cpp:237] Train net output #0: loss = 1.97745 (* 1 = 1.97745 loss)
I0409 21:41:07.506783 25006 sgd_solver.cpp:105] Iteration 2628, lr = 0.00594182
I0409 21:41:07.941478 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:41:12.473631 25006 solver.cpp:218] Iteration 2640 (2.41612 iter/s, 4.96664s/12 iters), loss = 2.15489
I0409 21:41:12.473683 25006 solver.cpp:237] Train net output #0: loss = 2.15489 (* 1 = 2.15489 loss)
I0409 21:41:12.473693 25006 sgd_solver.cpp:105] Iteration 2640, lr = 0.00592771
I0409 21:41:16.984958 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2652.caffemodel
I0409 21:41:20.735241 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2652.solverstate
I0409 21:41:22.373209 25006 solver.cpp:330] Iteration 2652, Testing net (#0)
I0409 21:41:22.373281 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:41:25.771838 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:41:26.842603 25006 solver.cpp:397] Test net output #0: accuracy = 0.14951
I0409 21:41:26.842651 25006 solver.cpp:397] Test net output #1: loss = 4.49085 (* 1 = 4.49085 loss)
I0409 21:41:26.931706 25006 solver.cpp:218] Iteration 2652 (0.830023 iter/s, 14.4574s/12 iters), loss = 2.41166
I0409 21:41:26.931754 25006 solver.cpp:237] Train net output #0: loss = 2.41166 (* 1 = 2.41166 loss)
I0409 21:41:26.931766 25006 sgd_solver.cpp:105] Iteration 2652, lr = 0.00591364
I0409 21:41:31.168081 25006 solver.cpp:218] Iteration 2664 (2.83277 iter/s, 4.23613s/12 iters), loss = 2.07782
I0409 21:41:31.168134 25006 solver.cpp:237] Train net output #0: loss = 2.07782 (* 1 = 2.07782 loss)
I0409 21:41:31.168146 25006 sgd_solver.cpp:105] Iteration 2664, lr = 0.0058996
I0409 21:41:36.131443 25006 solver.cpp:218] Iteration 2676 (2.41785 iter/s, 4.96309s/12 iters), loss = 2.33202
I0409 21:41:36.131487 25006 solver.cpp:237] Train net output #0: loss = 2.33202 (* 1 = 2.33202 loss)
I0409 21:41:36.131496 25006 sgd_solver.cpp:105] Iteration 2676, lr = 0.00588559
I0409 21:41:41.338308 25006 solver.cpp:218] Iteration 2688 (2.30477 iter/s, 5.20659s/12 iters), loss = 2.43613
I0409 21:41:41.338366 25006 solver.cpp:237] Train net output #0: loss = 2.43613 (* 1 = 2.43613 loss)
I0409 21:41:41.338378 25006 sgd_solver.cpp:105] Iteration 2688, lr = 0.00587162
I0409 21:41:46.359062 25006 solver.cpp:218] Iteration 2700 (2.39021 iter/s, 5.02048s/12 iters), loss = 2.19087
I0409 21:41:46.359112 25006 solver.cpp:237] Train net output #0: loss = 2.19087 (* 1 = 2.19087 loss)
I0409 21:41:46.359123 25006 sgd_solver.cpp:105] Iteration 2700, lr = 0.00585768
I0409 21:41:51.260063 25006 solver.cpp:218] Iteration 2712 (2.44861 iter/s, 4.90074s/12 iters), loss = 1.76434
I0409 21:41:51.260108 25006 solver.cpp:237] Train net output #0: loss = 1.76434 (* 1 = 1.76434 loss)
I0409 21:41:51.260120 25006 sgd_solver.cpp:105] Iteration 2712, lr = 0.00584377
I0409 21:41:56.332697 25006 solver.cpp:218] Iteration 2724 (2.36576 iter/s, 5.07236s/12 iters), loss = 2.15098
I0409 21:41:56.332890 25006 solver.cpp:237] Train net output #0: loss = 2.15098 (* 1 = 2.15098 loss)
I0409 21:41:56.332911 25006 sgd_solver.cpp:105] Iteration 2724, lr = 0.0058299
I0409 21:41:59.048060 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:42:01.637708 25006 solver.cpp:218] Iteration 2736 (2.26219 iter/s, 5.3046s/12 iters), loss = 1.99277
I0409 21:42:01.637761 25006 solver.cpp:237] Train net output #0: loss = 1.99277 (* 1 = 1.99277 loss)
I0409 21:42:01.637774 25006 sgd_solver.cpp:105] Iteration 2736, lr = 0.00581605
I0409 21:42:06.892917 25006 solver.cpp:218] Iteration 2748 (2.28357 iter/s, 5.25493s/12 iters), loss = 1.75279
I0409 21:42:06.892969 25006 solver.cpp:237] Train net output #0: loss = 1.75279 (* 1 = 1.75279 loss)
I0409 21:42:06.892982 25006 sgd_solver.cpp:105] Iteration 2748, lr = 0.00580225
I0409 21:42:08.924814 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2754.caffemodel
I0409 21:42:11.220228 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2754.solverstate
I0409 21:42:12.887671 25006 solver.cpp:330] Iteration 2754, Testing net (#0)
I0409 21:42:12.887699 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:42:16.010115 25006 blocking_queue.cpp:49] Waiting for data
I0409 21:42:16.248641 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:42:17.414274 25006 solver.cpp:397] Test net output #0: accuracy = 0.153799
I0409 21:42:17.414302 25006 solver.cpp:397] Test net output #1: loss = 4.48059 (* 1 = 4.48059 loss)
I0409 21:42:19.192199 25006 solver.cpp:218] Iteration 2760 (0.975711 iter/s, 12.2987s/12 iters), loss = 1.87677
I0409 21:42:19.192255 25006 solver.cpp:237] Train net output #0: loss = 1.87677 (* 1 = 1.87677 loss)
I0409 21:42:19.192266 25006 sgd_solver.cpp:105] Iteration 2760, lr = 0.00578847
I0409 21:42:24.191799 25006 solver.cpp:218] Iteration 2772 (2.40032 iter/s, 4.99933s/12 iters), loss = 2.09245
I0409 21:42:24.191851 25006 solver.cpp:237] Train net output #0: loss = 2.09245 (* 1 = 2.09245 loss)
I0409 21:42:24.191864 25006 sgd_solver.cpp:105] Iteration 2772, lr = 0.00577473
I0409 21:42:29.555465 25006 solver.cpp:218] Iteration 2784 (2.23739 iter/s, 5.36338s/12 iters), loss = 2.10152
I0409 21:42:29.555591 25006 solver.cpp:237] Train net output #0: loss = 2.10152 (* 1 = 2.10152 loss)
I0409 21:42:29.555603 25006 sgd_solver.cpp:105] Iteration 2784, lr = 0.00576102
I0409 21:42:34.749707 25006 solver.cpp:218] Iteration 2796 (2.3104 iter/s, 5.1939s/12 iters), loss = 2.05921
I0409 21:42:34.749753 25006 solver.cpp:237] Train net output #0: loss = 2.05921 (* 1 = 2.05921 loss)
I0409 21:42:34.749764 25006 sgd_solver.cpp:105] Iteration 2796, lr = 0.00574734
I0409 21:42:39.717682 25006 solver.cpp:218] Iteration 2808 (2.4156 iter/s, 4.96772s/12 iters), loss = 1.70014
I0409 21:42:39.717717 25006 solver.cpp:237] Train net output #0: loss = 1.70014 (* 1 = 1.70014 loss)
I0409 21:42:39.717727 25006 sgd_solver.cpp:105] Iteration 2808, lr = 0.00573369
I0409 21:42:44.868021 25006 solver.cpp:218] Iteration 2820 (2.33006 iter/s, 5.15008s/12 iters), loss = 1.55254
I0409 21:42:44.868064 25006 solver.cpp:237] Train net output #0: loss = 1.55254 (* 1 = 1.55254 loss)
I0409 21:42:44.868075 25006 sgd_solver.cpp:105] Iteration 2820, lr = 0.00572008
I0409 21:42:49.577103 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:42:49.875759 25006 solver.cpp:218] Iteration 2832 (2.39642 iter/s, 5.00747s/12 iters), loss = 1.75044
I0409 21:42:49.875818 25006 solver.cpp:237] Train net output #0: loss = 1.75044 (* 1 = 1.75044 loss)
I0409 21:42:49.875831 25006 sgd_solver.cpp:105] Iteration 2832, lr = 0.0057065
I0409 21:42:54.952101 25006 solver.cpp:218] Iteration 2844 (2.36403 iter/s, 5.07607s/12 iters), loss = 1.93422
I0409 21:42:54.952152 25006 solver.cpp:237] Train net output #0: loss = 1.93422 (* 1 = 1.93422 loss)
I0409 21:42:54.952165 25006 sgd_solver.cpp:105] Iteration 2844, lr = 0.00569295
I0409 21:42:59.435169 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2856.caffemodel
I0409 21:43:04.069113 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2856.solverstate
I0409 21:43:07.313498 25006 solver.cpp:330] Iteration 2856, Testing net (#0)
I0409 21:43:07.313525 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:43:10.657289 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:43:11.796603 25006 solver.cpp:397] Test net output #0: accuracy = 0.145221
I0409 21:43:11.796636 25006 solver.cpp:397] Test net output #1: loss = 4.70127 (* 1 = 4.70127 loss)
I0409 21:43:11.883539 25006 solver.cpp:218] Iteration 2856 (0.708772 iter/s, 16.9307s/12 iters), loss = 2.26584
I0409 21:43:11.883586 25006 solver.cpp:237] Train net output #0: loss = 2.26584 (* 1 = 2.26584 loss)
I0409 21:43:11.883595 25006 sgd_solver.cpp:105] Iteration 2856, lr = 0.00567944
I0409 21:43:16.047673 25006 solver.cpp:218] Iteration 2868 (2.88191 iter/s, 4.1639s/12 iters), loss = 2.02135
I0409 21:43:16.047722 25006 solver.cpp:237] Train net output #0: loss = 2.02135 (* 1 = 2.02135 loss)
I0409 21:43:16.047732 25006 sgd_solver.cpp:105] Iteration 2868, lr = 0.00566595
I0409 21:43:21.039017 25006 solver.cpp:218] Iteration 2880 (2.40429 iter/s, 4.99108s/12 iters), loss = 1.91293
I0409 21:43:21.039067 25006 solver.cpp:237] Train net output #0: loss = 1.91293 (* 1 = 1.91293 loss)
I0409 21:43:21.039078 25006 sgd_solver.cpp:105] Iteration 2880, lr = 0.0056525
I0409 21:43:26.008786 25006 solver.cpp:218] Iteration 2892 (2.41473 iter/s, 4.96951s/12 iters), loss = 2.15469
I0409 21:43:26.008828 25006 solver.cpp:237] Train net output #0: loss = 2.15469 (* 1 = 2.15469 loss)
I0409 21:43:26.008837 25006 sgd_solver.cpp:105] Iteration 2892, lr = 0.00563908
I0409 21:43:31.008508 25006 solver.cpp:218] Iteration 2904 (2.40026 iter/s, 4.99946s/12 iters), loss = 1.8871
I0409 21:43:31.008563 25006 solver.cpp:237] Train net output #0: loss = 1.8871 (* 1 = 1.8871 loss)
I0409 21:43:31.008576 25006 sgd_solver.cpp:105] Iteration 2904, lr = 0.00562569
I0409 21:43:36.029045 25006 solver.cpp:218] Iteration 2916 (2.39031 iter/s, 5.02027s/12 iters), loss = 1.5676
I0409 21:43:36.029160 25006 solver.cpp:237] Train net output #0: loss = 1.5676 (* 1 = 1.5676 loss)
I0409 21:43:36.029173 25006 sgd_solver.cpp:105] Iteration 2916, lr = 0.00561233
I0409 21:43:40.988723 25006 solver.cpp:218] Iteration 2928 (2.41967 iter/s, 4.95935s/12 iters), loss = 1.65068
I0409 21:43:40.988766 25006 solver.cpp:237] Train net output #0: loss = 1.65068 (* 1 = 1.65068 loss)
I0409 21:43:40.988777 25006 sgd_solver.cpp:105] Iteration 2928, lr = 0.00559901
I0409 21:43:42.814007 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:43:45.956957 25006 solver.cpp:218] Iteration 2940 (2.41547 iter/s, 4.96797s/12 iters), loss = 1.88088
I0409 21:43:45.957005 25006 solver.cpp:237] Train net output #0: loss = 1.88088 (* 1 = 1.88088 loss)
I0409 21:43:45.957015 25006 sgd_solver.cpp:105] Iteration 2940, lr = 0.00558572
I0409 21:43:51.003540 25006 solver.cpp:218] Iteration 2952 (2.37797 iter/s, 5.04632s/12 iters), loss = 2.00595
I0409 21:43:51.003584 25006 solver.cpp:237] Train net output #0: loss = 2.00595 (* 1 = 2.00595 loss)
I0409 21:43:51.003594 25006 sgd_solver.cpp:105] Iteration 2952, lr = 0.00557245
I0409 21:43:53.051863 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_2958.caffemodel
I0409 21:43:56.932121 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_2958.solverstate
I0409 21:44:01.626893 25006 solver.cpp:330] Iteration 2958, Testing net (#0)
I0409 21:44:01.626920 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:44:05.152422 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:44:06.336597 25006 solver.cpp:397] Test net output #0: accuracy = 0.133578
I0409 21:44:06.336751 25006 solver.cpp:397] Test net output #1: loss = 4.87577 (* 1 = 4.87577 loss)
I0409 21:44:08.306591 25006 solver.cpp:218] Iteration 2964 (0.69355 iter/s, 17.3023s/12 iters), loss = 1.61618
I0409 21:44:08.306635 25006 solver.cpp:237] Train net output #0: loss = 1.61618 (* 1 = 1.61618 loss)
I0409 21:44:08.306646 25006 sgd_solver.cpp:105] Iteration 2964, lr = 0.00555922
I0409 21:44:13.287966 25006 solver.cpp:218] Iteration 2976 (2.4091 iter/s, 4.98111s/12 iters), loss = 1.75102
I0409 21:44:13.288023 25006 solver.cpp:237] Train net output #0: loss = 1.75102 (* 1 = 1.75102 loss)
I0409 21:44:13.288036 25006 sgd_solver.cpp:105] Iteration 2976, lr = 0.00554603
I0409 21:44:18.258538 25006 solver.cpp:218] Iteration 2988 (2.41434 iter/s, 4.97031s/12 iters), loss = 1.91228
I0409 21:44:18.258575 25006 solver.cpp:237] Train net output #0: loss = 1.91228 (* 1 = 1.91228 loss)
I0409 21:44:18.258584 25006 sgd_solver.cpp:105] Iteration 2988, lr = 0.00553286
I0409 21:44:23.250298 25006 solver.cpp:218] Iteration 3000 (2.40408 iter/s, 4.9915s/12 iters), loss = 2.48488
I0409 21:44:23.250349 25006 solver.cpp:237] Train net output #0: loss = 2.48488 (* 1 = 2.48488 loss)
I0409 21:44:23.250360 25006 sgd_solver.cpp:105] Iteration 3000, lr = 0.00551972
I0409 21:44:28.220914 25006 solver.cpp:218] Iteration 3012 (2.41432 iter/s, 4.97034s/12 iters), loss = 1.87843
I0409 21:44:28.220969 25006 solver.cpp:237] Train net output #0: loss = 1.87843 (* 1 = 1.87843 loss)
I0409 21:44:28.220981 25006 sgd_solver.cpp:105] Iteration 3012, lr = 0.00550662
I0409 21:44:33.229283 25006 solver.cpp:218] Iteration 3024 (2.39612 iter/s, 5.0081s/12 iters), loss = 1.72169
I0409 21:44:33.229326 25006 solver.cpp:237] Train net output #0: loss = 1.72169 (* 1 = 1.72169 loss)
I0409 21:44:33.229336 25006 sgd_solver.cpp:105] Iteration 3024, lr = 0.00549354
I0409 21:44:37.878262 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:44:39.009676 25006 solver.cpp:218] Iteration 3036 (2.07609 iter/s, 5.7801s/12 iters), loss = 1.66618
I0409 21:44:39.009718 25006 solver.cpp:237] Train net output #0: loss = 1.66618 (* 1 = 1.66618 loss)
I0409 21:44:39.009727 25006 sgd_solver.cpp:105] Iteration 3036, lr = 0.0054805
I0409 21:44:44.016296 25006 solver.cpp:218] Iteration 3048 (2.39695 iter/s, 5.00636s/12 iters), loss = 1.59907
I0409 21:44:44.016335 25006 solver.cpp:237] Train net output #0: loss = 1.59907 (* 1 = 1.59907 loss)
I0409 21:44:44.016342 25006 sgd_solver.cpp:105] Iteration 3048, lr = 0.00546749
I0409 21:44:48.554692 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3060.caffemodel
I0409 21:44:52.320315 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3060.solverstate
I0409 21:44:56.821089 25006 solver.cpp:330] Iteration 3060, Testing net (#0)
I0409 21:44:56.821116 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:45:00.175807 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:45:01.394898 25006 solver.cpp:397] Test net output #0: accuracy = 0.141544
I0409 21:45:01.394937 25006 solver.cpp:397] Test net output #1: loss = 5.20828 (* 1 = 5.20828 loss)
I0409 21:45:01.483942 25006 solver.cpp:218] Iteration 3060 (0.687015 iter/s, 17.4669s/12 iters), loss = 2.1392
I0409 21:45:01.483997 25006 solver.cpp:237] Train net output #0: loss = 2.1392 (* 1 = 2.1392 loss)
I0409 21:45:01.484009 25006 sgd_solver.cpp:105] Iteration 3060, lr = 0.00545451
I0409 21:45:05.660981 25006 solver.cpp:218] Iteration 3072 (2.87301 iter/s, 4.1768s/12 iters), loss = 1.80953
I0409 21:45:05.661023 25006 solver.cpp:237] Train net output #0: loss = 1.80953 (* 1 = 1.80953 loss)
I0409 21:45:05.661034 25006 sgd_solver.cpp:105] Iteration 3072, lr = 0.00544156
I0409 21:45:10.987747 25006 solver.cpp:218] Iteration 3084 (2.25289 iter/s, 5.32649s/12 iters), loss = 2.00297
I0409 21:45:10.987866 25006 solver.cpp:237] Train net output #0: loss = 2.00297 (* 1 = 2.00297 loss)
I0409 21:45:10.987877 25006 sgd_solver.cpp:105] Iteration 3084, lr = 0.00542864
I0409 21:45:15.924564 25006 solver.cpp:218] Iteration 3096 (2.43088 iter/s, 4.93649s/12 iters), loss = 2.10923
I0409 21:45:15.924612 25006 solver.cpp:237] Train net output #0: loss = 2.10923 (* 1 = 2.10923 loss)
I0409 21:45:15.924625 25006 sgd_solver.cpp:105] Iteration 3096, lr = 0.00541575
I0409 21:45:21.121562 25006 solver.cpp:218] Iteration 3108 (2.30915 iter/s, 5.19672s/12 iters), loss = 1.88954
I0409 21:45:21.121608 25006 solver.cpp:237] Train net output #0: loss = 1.88954 (* 1 = 1.88954 loss)
I0409 21:45:21.121618 25006 sgd_solver.cpp:105] Iteration 3108, lr = 0.00540289
I0409 21:45:26.105381 25006 solver.cpp:218] Iteration 3120 (2.40792 iter/s, 4.98356s/12 iters), loss = 1.42089
I0409 21:45:26.105427 25006 solver.cpp:237] Train net output #0: loss = 1.42089 (* 1 = 1.42089 loss)
I0409 21:45:26.105437 25006 sgd_solver.cpp:105] Iteration 3120, lr = 0.00539006
I0409 21:45:31.486959 25006 solver.cpp:218] Iteration 3132 (2.22995 iter/s, 5.38129s/12 iters), loss = 1.68306
I0409 21:45:31.487013 25006 solver.cpp:237] Train net output #0: loss = 1.68306 (* 1 = 1.68306 loss)
I0409 21:45:31.487027 25006 sgd_solver.cpp:105] Iteration 3132, lr = 0.00537727
I0409 21:45:32.690999 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:45:36.860433 25006 solver.cpp:218] Iteration 3144 (2.23331 iter/s, 5.37319s/12 iters), loss = 1.63697
I0409 21:45:36.860474 25006 solver.cpp:237] Train net output #0: loss = 1.63697 (* 1 = 1.63697 loss)
I0409 21:45:36.860482 25006 sgd_solver.cpp:105] Iteration 3144, lr = 0.0053645
I0409 21:45:41.851848 25006 solver.cpp:218] Iteration 3156 (2.40425 iter/s, 4.99115s/12 iters), loss = 1.64883
I0409 21:45:41.851958 25006 solver.cpp:237] Train net output #0: loss = 1.64883 (* 1 = 1.64883 loss)
I0409 21:45:41.851969 25006 sgd_solver.cpp:105] Iteration 3156, lr = 0.00535176
I0409 21:45:43.892122 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3162.caffemodel
I0409 21:45:46.135941 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3162.solverstate
I0409 21:45:49.322866 25006 solver.cpp:330] Iteration 3162, Testing net (#0)
I0409 21:45:49.322896 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:45:52.573062 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:45:53.916538 25006 solver.cpp:397] Test net output #0: accuracy = 0.163603
I0409 21:45:53.916577 25006 solver.cpp:397] Test net output #1: loss = 4.8245 (* 1 = 4.8245 loss)
I0409 21:45:55.892750 25006 solver.cpp:218] Iteration 3168 (0.854688 iter/s, 14.0402s/12 iters), loss = 1.56385
I0409 21:45:55.892794 25006 solver.cpp:237] Train net output #0: loss = 1.56385 (* 1 = 1.56385 loss)
I0409 21:45:55.892803 25006 sgd_solver.cpp:105] Iteration 3168, lr = 0.00533906
I0409 21:46:00.857796 25006 solver.cpp:218] Iteration 3180 (2.41703 iter/s, 4.96478s/12 iters), loss = 1.45485
I0409 21:46:00.857858 25006 solver.cpp:237] Train net output #0: loss = 1.45485 (* 1 = 1.45485 loss)
I0409 21:46:00.857870 25006 sgd_solver.cpp:105] Iteration 3180, lr = 0.00532638
I0409 21:46:05.888293 25006 solver.cpp:218] Iteration 3192 (2.38558 iter/s, 5.03021s/12 iters), loss = 1.62634
I0409 21:46:05.888340 25006 solver.cpp:237] Train net output #0: loss = 1.62634 (* 1 = 1.62634 loss)
I0409 21:46:05.888352 25006 sgd_solver.cpp:105] Iteration 3192, lr = 0.00531374
I0409 21:46:10.852035 25006 solver.cpp:218] Iteration 3204 (2.41766 iter/s, 4.96348s/12 iters), loss = 1.50776
I0409 21:46:10.852087 25006 solver.cpp:237] Train net output #0: loss = 1.50776 (* 1 = 1.50776 loss)
I0409 21:46:10.852097 25006 sgd_solver.cpp:105] Iteration 3204, lr = 0.00530112
I0409 21:46:15.801167 25006 solver.cpp:218] Iteration 3216 (2.4248 iter/s, 4.94887s/12 iters), loss = 1.61837
I0409 21:46:15.801271 25006 solver.cpp:237] Train net output #0: loss = 1.61837 (* 1 = 1.61837 loss)
I0409 21:46:15.801282 25006 sgd_solver.cpp:105] Iteration 3216, lr = 0.00528853
I0409 21:46:20.717319 25006 solver.cpp:218] Iteration 3228 (2.44109 iter/s, 4.91583s/12 iters), loss = 1.59529
I0409 21:46:20.717370 25006 solver.cpp:237] Train net output #0: loss = 1.59529 (* 1 = 1.59529 loss)
I0409 21:46:20.717381 25006 sgd_solver.cpp:105] Iteration 3228, lr = 0.00527598
I0409 21:46:23.879354 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:46:25.635357 25006 solver.cpp:218] Iteration 3240 (2.44013 iter/s, 4.91777s/12 iters), loss = 1.52409
I0409 21:46:25.635406 25006 solver.cpp:237] Train net output #0: loss = 1.52409 (* 1 = 1.52409 loss)
I0409 21:46:25.635416 25006 sgd_solver.cpp:105] Iteration 3240, lr = 0.00526345
I0409 21:46:30.619674 25006 solver.cpp:218] Iteration 3252 (2.40768 iter/s, 4.98405s/12 iters), loss = 1.40519
I0409 21:46:30.619724 25006 solver.cpp:237] Train net output #0: loss = 1.40519 (* 1 = 1.40519 loss)
I0409 21:46:30.619733 25006 sgd_solver.cpp:105] Iteration 3252, lr = 0.00525095
I0409 21:46:35.308763 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3264.caffemodel
I0409 21:46:39.671388 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3264.solverstate
I0409 21:46:44.304823 25006 solver.cpp:330] Iteration 3264, Testing net (#0)
I0409 21:46:44.304849 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:46:47.472904 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:46:48.838474 25006 solver.cpp:397] Test net output #0: accuracy = 0.166667
I0409 21:46:48.838510 25006 solver.cpp:397] Test net output #1: loss = 5.00442 (* 1 = 5.00442 loss)
I0409 21:46:48.927629 25006 solver.cpp:218] Iteration 3264 (0.655482 iter/s, 18.3071s/12 iters), loss = 1.54163
I0409 21:46:48.927675 25006 solver.cpp:237] Train net output #0: loss = 1.54163 (* 1 = 1.54163 loss)
I0409 21:46:48.927683 25006 sgd_solver.cpp:105] Iteration 3264, lr = 0.00523849
I0409 21:46:53.009691 25006 solver.cpp:218] Iteration 3276 (2.93985 iter/s, 4.08183s/12 iters), loss = 1.76038
I0409 21:46:53.009734 25006 solver.cpp:237] Train net output #0: loss = 1.76038 (* 1 = 1.76038 loss)
I0409 21:46:53.009743 25006 sgd_solver.cpp:105] Iteration 3276, lr = 0.00522605
I0409 21:46:57.971417 25006 solver.cpp:218] Iteration 3288 (2.41864 iter/s, 4.96146s/12 iters), loss = 1.70089
I0409 21:46:57.971470 25006 solver.cpp:237] Train net output #0: loss = 1.70089 (* 1 = 1.70089 loss)
I0409 21:46:57.971482 25006 sgd_solver.cpp:105] Iteration 3288, lr = 0.00521364
I0409 21:47:02.938467 25006 solver.cpp:218] Iteration 3300 (2.41605 iter/s, 4.96678s/12 iters), loss = 1.62783
I0409 21:47:02.938527 25006 solver.cpp:237] Train net output #0: loss = 1.62783 (* 1 = 1.62783 loss)
I0409 21:47:02.938539 25006 sgd_solver.cpp:105] Iteration 3300, lr = 0.00520126
I0409 21:47:07.911891 25006 solver.cpp:218] Iteration 3312 (2.41296 iter/s, 4.97315s/12 iters), loss = 1.68505
I0409 21:47:07.911942 25006 solver.cpp:237] Train net output #0: loss = 1.68505 (* 1 = 1.68505 loss)
I0409 21:47:07.911952 25006 sgd_solver.cpp:105] Iteration 3312, lr = 0.00518892
I0409 21:47:12.935289 25006 solver.cpp:218] Iteration 3324 (2.38895 iter/s, 5.02313s/12 iters), loss = 1.51624
I0409 21:47:12.935340 25006 solver.cpp:237] Train net output #0: loss = 1.51624 (* 1 = 1.51624 loss)
I0409 21:47:12.935353 25006 sgd_solver.cpp:105] Iteration 3324, lr = 0.0051766
I0409 21:47:17.912696 25006 solver.cpp:218] Iteration 3336 (2.41103 iter/s, 4.97714s/12 iters), loss = 1.83926
I0409 21:47:17.912820 25006 solver.cpp:237] Train net output #0: loss = 1.83926 (* 1 = 1.83926 loss)
I0409 21:47:17.912832 25006 sgd_solver.cpp:105] Iteration 3336, lr = 0.00516431
I0409 21:47:18.380049 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:47:22.904968 25006 solver.cpp:218] Iteration 3348 (2.40388 iter/s, 4.99193s/12 iters), loss = 1.36727
I0409 21:47:22.905020 25006 solver.cpp:237] Train net output #0: loss = 1.36727 (* 1 = 1.36727 loss)
I0409 21:47:22.905035 25006 sgd_solver.cpp:105] Iteration 3348, lr = 0.00515204
I0409 21:47:27.883580 25006 solver.cpp:218] Iteration 3360 (2.41044 iter/s, 4.97834s/12 iters), loss = 1.62478
I0409 21:47:27.883638 25006 solver.cpp:237] Train net output #0: loss = 1.62478 (* 1 = 1.62478 loss)
I0409 21:47:27.883651 25006 sgd_solver.cpp:105] Iteration 3360, lr = 0.00513981
I0409 21:47:29.944629 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3366.caffemodel
I0409 21:47:34.897408 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3366.solverstate
I0409 21:47:38.182466 25006 solver.cpp:330] Iteration 3366, Testing net (#0)
I0409 21:47:38.182488 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:47:41.362995 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:47:42.990806 25006 solver.cpp:397] Test net output #0: accuracy = 0.156863
I0409 21:47:42.990845 25006 solver.cpp:397] Test net output #1: loss = 5.26102 (* 1 = 5.26102 loss)
I0409 21:47:44.921941 25006 solver.cpp:218] Iteration 3372 (0.704325 iter/s, 17.0376s/12 iters), loss = 1.41964
I0409 21:47:44.922020 25006 solver.cpp:237] Train net output #0: loss = 1.41964 (* 1 = 1.41964 loss)
I0409 21:47:44.922034 25006 sgd_solver.cpp:105] Iteration 3372, lr = 0.00512761
I0409 21:47:50.024912 25006 solver.cpp:218] Iteration 3384 (2.35171 iter/s, 5.10266s/12 iters), loss = 1.67503
I0409 21:47:50.025050 25006 solver.cpp:237] Train net output #0: loss = 1.67503 (* 1 = 1.67503 loss)
I0409 21:47:50.025065 25006 sgd_solver.cpp:105] Iteration 3384, lr = 0.00511544
I0409 21:47:54.986757 25006 solver.cpp:218] Iteration 3396 (2.41863 iter/s, 4.96149s/12 iters), loss = 1.20992
I0409 21:47:54.986807 25006 solver.cpp:237] Train net output #0: loss = 1.20992 (* 1 = 1.20992 loss)
I0409 21:47:54.986820 25006 sgd_solver.cpp:105] Iteration 3396, lr = 0.00510329
I0409 21:48:00.172549 25006 solver.cpp:218] Iteration 3408 (2.31414 iter/s, 5.18551s/12 iters), loss = 1.74684
I0409 21:48:00.172600 25006 solver.cpp:237] Train net output #0: loss = 1.74684 (* 1 = 1.74684 loss)
I0409 21:48:00.172613 25006 sgd_solver.cpp:105] Iteration 3408, lr = 0.00509117
I0409 21:48:05.123530 25006 solver.cpp:218] Iteration 3420 (2.42389 iter/s, 4.95071s/12 iters), loss = 1.26412
I0409 21:48:05.123572 25006 solver.cpp:237] Train net output #0: loss = 1.26412 (* 1 = 1.26412 loss)
I0409 21:48:05.123582 25006 sgd_solver.cpp:105] Iteration 3420, lr = 0.00507909
I0409 21:48:10.138717 25006 solver.cpp:218] Iteration 3432 (2.39286 iter/s, 5.01493s/12 iters), loss = 1.57791
I0409 21:48:10.138756 25006 solver.cpp:237] Train net output #0: loss = 1.57791 (* 1 = 1.57791 loss)
I0409 21:48:10.138764 25006 sgd_solver.cpp:105] Iteration 3432, lr = 0.00506703
I0409 21:48:12.766942 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:48:15.153337 25006 solver.cpp:218] Iteration 3444 (2.39313 iter/s, 5.01436s/12 iters), loss = 1.38501
I0409 21:48:15.153393 25006 solver.cpp:237] Train net output #0: loss = 1.38501 (* 1 = 1.38501 loss)
I0409 21:48:15.153403 25006 sgd_solver.cpp:105] Iteration 3444, lr = 0.005055
I0409 21:48:20.231210 25006 solver.cpp:218] Iteration 3456 (2.36332 iter/s, 5.0776s/12 iters), loss = 1.5535
I0409 21:48:20.231333 25006 solver.cpp:237] Train net output #0: loss = 1.5535 (* 1 = 1.5535 loss)
I0409 21:48:20.231344 25006 sgd_solver.cpp:105] Iteration 3456, lr = 0.005043
I0409 21:48:24.845508 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3468.caffemodel
I0409 21:48:27.207473 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3468.solverstate
I0409 21:48:28.929781 25006 solver.cpp:330] Iteration 3468, Testing net (#0)
I0409 21:48:28.929808 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:48:29.396934 25006 blocking_queue.cpp:49] Waiting for data
I0409 21:48:32.238730 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:48:33.770778 25006 solver.cpp:397] Test net output #0: accuracy = 0.158701
I0409 21:48:33.770818 25006 solver.cpp:397] Test net output #1: loss = 5.02614 (* 1 = 5.02614 loss)
I0409 21:48:33.859958 25006 solver.cpp:218] Iteration 3468 (0.880536 iter/s, 13.6281s/12 iters), loss = 1.484
I0409 21:48:33.860013 25006 solver.cpp:237] Train net output #0: loss = 1.484 (* 1 = 1.484 loss)
I0409 21:48:33.860026 25006 sgd_solver.cpp:105] Iteration 3468, lr = 0.00503102
I0409 21:48:38.300540 25006 solver.cpp:218] Iteration 3480 (2.7025 iter/s, 4.44033s/12 iters), loss = 1.64592
I0409 21:48:38.300595 25006 solver.cpp:237] Train net output #0: loss = 1.64592 (* 1 = 1.64592 loss)
I0409 21:48:38.300607 25006 sgd_solver.cpp:105] Iteration 3480, lr = 0.00501908
I0409 21:48:43.312948 25006 solver.cpp:218] Iteration 3492 (2.39419 iter/s, 5.01213s/12 iters), loss = 1.45563
I0409 21:48:43.313004 25006 solver.cpp:237] Train net output #0: loss = 1.45563 (* 1 = 1.45563 loss)
I0409 21:48:43.313015 25006 sgd_solver.cpp:105] Iteration 3492, lr = 0.00500716
I0409 21:48:48.412226 25006 solver.cpp:218] Iteration 3504 (2.3534 iter/s, 5.099s/12 iters), loss = 1.57887
I0409 21:48:48.412279 25006 solver.cpp:237] Train net output #0: loss = 1.57887 (* 1 = 1.57887 loss)
I0409 21:48:48.412292 25006 sgd_solver.cpp:105] Iteration 3504, lr = 0.00499527
I0409 21:48:53.378295 25006 solver.cpp:218] Iteration 3516 (2.41653 iter/s, 4.96581s/12 iters), loss = 1.12945
I0409 21:48:53.378362 25006 solver.cpp:237] Train net output #0: loss = 1.12945 (* 1 = 1.12945 loss)
I0409 21:48:53.378372 25006 sgd_solver.cpp:105] Iteration 3516, lr = 0.00498341
I0409 21:48:58.351560 25006 solver.cpp:218] Iteration 3528 (2.41304 iter/s, 4.97298s/12 iters), loss = 1.0358
I0409 21:48:58.351614 25006 solver.cpp:237] Train net output #0: loss = 1.0358 (* 1 = 1.0358 loss)
I0409 21:48:58.351627 25006 sgd_solver.cpp:105] Iteration 3528, lr = 0.00497158
I0409 21:49:03.054680 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:49:03.313374 25006 solver.cpp:218] Iteration 3540 (2.4186 iter/s, 4.96154s/12 iters), loss = 1.37284
I0409 21:49:03.313421 25006 solver.cpp:237] Train net output #0: loss = 1.37284 (* 1 = 1.37284 loss)
I0409 21:49:03.313431 25006 sgd_solver.cpp:105] Iteration 3540, lr = 0.00495978
I0409 21:49:08.478921 25006 solver.cpp:218] Iteration 3552 (2.32321 iter/s, 5.16527s/12 iters), loss = 0.87762
I0409 21:49:08.478965 25006 solver.cpp:237] Train net output #0: loss = 0.87762 (* 1 = 0.87762 loss)
I0409 21:49:08.478974 25006 sgd_solver.cpp:105] Iteration 3552, lr = 0.004948
I0409 21:49:13.409747 25006 solver.cpp:218] Iteration 3564 (2.4338 iter/s, 4.93056s/12 iters), loss = 1.16808
I0409 21:49:13.409806 25006 solver.cpp:237] Train net output #0: loss = 1.16808 (* 1 = 1.16808 loss)
I0409 21:49:13.409817 25006 sgd_solver.cpp:105] Iteration 3564, lr = 0.00493626
I0409 21:49:15.434520 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3570.caffemodel
I0409 21:49:17.597007 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3570.solverstate
I0409 21:49:19.281900 25006 solver.cpp:330] Iteration 3570, Testing net (#0)
I0409 21:49:19.281929 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:49:22.344568 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:49:23.770423 25006 solver.cpp:397] Test net output #0: accuracy = 0.159314
I0409 21:49:23.770560 25006 solver.cpp:397] Test net output #1: loss = 5.22179 (* 1 = 5.22179 loss)
I0409 21:49:25.629750 25006 solver.cpp:218] Iteration 3576 (0.982042 iter/s, 12.2194s/12 iters), loss = 1.35172
I0409 21:49:25.629796 25006 solver.cpp:237] Train net output #0: loss = 1.35172 (* 1 = 1.35172 loss)
I0409 21:49:25.629804 25006 sgd_solver.cpp:105] Iteration 3576, lr = 0.00492454
I0409 21:49:30.568361 25006 solver.cpp:218] Iteration 3588 (2.42996 iter/s, 4.93834s/12 iters), loss = 1.02424
I0409 21:49:30.568421 25006 solver.cpp:237] Train net output #0: loss = 1.02424 (* 1 = 1.02424 loss)
I0409 21:49:30.568434 25006 sgd_solver.cpp:105] Iteration 3588, lr = 0.00491284
I0409 21:49:35.565793 25006 solver.cpp:218] Iteration 3600 (2.40137 iter/s, 4.99715s/12 iters), loss = 1.17191
I0409 21:49:35.565850 25006 solver.cpp:237] Train net output #0: loss = 1.17191 (* 1 = 1.17191 loss)
I0409 21:49:35.565861 25006 sgd_solver.cpp:105] Iteration 3600, lr = 0.00490118
I0409 21:49:40.908540 25006 solver.cpp:218] Iteration 3612 (2.24616 iter/s, 5.34246s/12 iters), loss = 1.27312
I0409 21:49:40.908591 25006 solver.cpp:237] Train net output #0: loss = 1.27312 (* 1 = 1.27312 loss)
I0409 21:49:40.908604 25006 sgd_solver.cpp:105] Iteration 3612, lr = 0.00488954
I0409 21:49:46.076328 25006 solver.cpp:218] Iteration 3624 (2.3222 iter/s, 5.16751s/12 iters), loss = 1.41776
I0409 21:49:46.076383 25006 solver.cpp:237] Train net output #0: loss = 1.41776 (* 1 = 1.41776 loss)
I0409 21:49:46.076396 25006 sgd_solver.cpp:105] Iteration 3624, lr = 0.00487793
I0409 21:49:51.478657 25006 solver.cpp:218] Iteration 3636 (2.22138 iter/s, 5.40205s/12 iters), loss = 1.31537
I0409 21:49:51.478706 25006 solver.cpp:237] Train net output #0: loss = 1.31537 (* 1 = 1.31537 loss)
I0409 21:49:51.478718 25006 sgd_solver.cpp:105] Iteration 3636, lr = 0.00486635
I0409 21:49:53.413478 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:49:56.538806 25006 solver.cpp:218] Iteration 3648 (2.3716 iter/s, 5.05988s/12 iters), loss = 0.75087
I0409 21:49:56.538918 25006 solver.cpp:237] Train net output #0: loss = 0.75087 (* 1 = 0.75087 loss)
I0409 21:49:56.538931 25006 sgd_solver.cpp:105] Iteration 3648, lr = 0.0048548
I0409 21:50:01.492254 25006 solver.cpp:218] Iteration 3660 (2.42271 iter/s, 4.95313s/12 iters), loss = 1.2479
I0409 21:50:01.492292 25006 solver.cpp:237] Train net output #0: loss = 1.2479 (* 1 = 1.2479 loss)
I0409 21:50:01.492303 25006 sgd_solver.cpp:105] Iteration 3660, lr = 0.00484327
I0409 21:50:05.965760 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3672.caffemodel
I0409 21:50:09.879256 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3672.solverstate
I0409 21:50:11.521847 25006 solver.cpp:330] Iteration 3672, Testing net (#0)
I0409 21:50:11.521874 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:50:14.599889 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:50:16.066184 25006 solver.cpp:397] Test net output #0: accuracy = 0.161152
I0409 21:50:16.066234 25006 solver.cpp:397] Test net output #1: loss = 5.29467 (* 1 = 5.29467 loss)
I0409 21:50:16.155306 25006 solver.cpp:218] Iteration 3672 (0.81842 iter/s, 14.6624s/12 iters), loss = 1.29469
I0409 21:50:16.155354 25006 solver.cpp:237] Train net output #0: loss = 1.29469 (* 1 = 1.29469 loss)
I0409 21:50:16.155365 25006 sgd_solver.cpp:105] Iteration 3672, lr = 0.00483177
I0409 21:50:20.439093 25006 solver.cpp:218] Iteration 3684 (2.80142 iter/s, 4.28355s/12 iters), loss = 1.11155
I0409 21:50:20.439152 25006 solver.cpp:237] Train net output #0: loss = 1.11155 (* 1 = 1.11155 loss)
I0409 21:50:20.439163 25006 sgd_solver.cpp:105] Iteration 3684, lr = 0.0048203
I0409 21:50:25.349889 25006 solver.cpp:218] Iteration 3696 (2.44373 iter/s, 4.91052s/12 iters), loss = 1.20112
I0409 21:50:25.349941 25006 solver.cpp:237] Train net output #0: loss = 1.20112 (* 1 = 1.20112 loss)
I0409 21:50:25.349978 25006 sgd_solver.cpp:105] Iteration 3696, lr = 0.00480886
I0409 21:50:30.260605 25006 solver.cpp:218] Iteration 3708 (2.44377 iter/s, 4.91045s/12 iters), loss = 1.19654
I0409 21:50:30.260768 25006 solver.cpp:237] Train net output #0: loss = 1.19654 (* 1 = 1.19654 loss)
I0409 21:50:30.260780 25006 sgd_solver.cpp:105] Iteration 3708, lr = 0.00479744
I0409 21:50:35.278934 25006 solver.cpp:218] Iteration 3720 (2.39141 iter/s, 5.01795s/12 iters), loss = 1.10118
I0409 21:50:35.278972 25006 solver.cpp:237] Train net output #0: loss = 1.10118 (* 1 = 1.10118 loss)
I0409 21:50:35.278983 25006 sgd_solver.cpp:105] Iteration 3720, lr = 0.00478605
I0409 21:50:40.243175 25006 solver.cpp:218] Iteration 3732 (2.41741 iter/s, 4.96399s/12 iters), loss = 1.03401
I0409 21:50:40.243229 25006 solver.cpp:237] Train net output #0: loss = 1.03401 (* 1 = 1.03401 loss)
I0409 21:50:40.243242 25006 sgd_solver.cpp:105] Iteration 3732, lr = 0.00477469
I0409 21:50:44.283186 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:50:45.257683 25006 solver.cpp:218] Iteration 3744 (2.39319 iter/s, 5.01424s/12 iters), loss = 1.00902
I0409 21:50:45.257736 25006 solver.cpp:237] Train net output #0: loss = 1.00902 (* 1 = 1.00902 loss)
I0409 21:50:45.257750 25006 sgd_solver.cpp:105] Iteration 3744, lr = 0.00476335
I0409 21:50:50.457602 25006 solver.cpp:218] Iteration 3756 (2.30785 iter/s, 5.19964s/12 iters), loss = 1.11666
I0409 21:50:50.457649 25006 solver.cpp:237] Train net output #0: loss = 1.11666 (* 1 = 1.11666 loss)
I0409 21:50:50.457660 25006 sgd_solver.cpp:105] Iteration 3756, lr = 0.00475204
I0409 21:50:55.454929 25006 solver.cpp:218] Iteration 3768 (2.40141 iter/s, 4.99706s/12 iters), loss = 1.15379
I0409 21:50:55.454979 25006 solver.cpp:237] Train net output #0: loss = 1.15379 (* 1 = 1.15379 loss)
I0409 21:50:55.454993 25006 sgd_solver.cpp:105] Iteration 3768, lr = 0.00474076
I0409 21:50:57.465131 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3774.caffemodel
I0409 21:51:08.094992 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3774.solverstate
I0409 21:51:13.786110 25006 solver.cpp:330] Iteration 3774, Testing net (#0)
I0409 21:51:13.786132 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:51:17.029115 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:51:18.624366 25006 solver.cpp:397] Test net output #0: accuracy = 0.16299
I0409 21:51:18.624411 25006 solver.cpp:397] Test net output #1: loss = 5.25554 (* 1 = 5.25554 loss)
I0409 21:51:20.562042 25006 solver.cpp:218] Iteration 3780 (0.477973 iter/s, 25.106s/12 iters), loss = 0.861768
I0409 21:51:20.562089 25006 solver.cpp:237] Train net output #0: loss = 0.861768 (* 1 = 0.861768 loss)
I0409 21:51:20.562099 25006 sgd_solver.cpp:105] Iteration 3780, lr = 0.00472951
I0409 21:51:25.551637 25006 solver.cpp:218] Iteration 3792 (2.40513 iter/s, 4.98933s/12 iters), loss = 1.18434
I0409 21:51:25.551679 25006 solver.cpp:237] Train net output #0: loss = 1.18434 (* 1 = 1.18434 loss)
I0409 21:51:25.551689 25006 sgd_solver.cpp:105] Iteration 3792, lr = 0.00471828
I0409 21:51:30.470006 25006 solver.cpp:218] Iteration 3804 (2.43996 iter/s, 4.91811s/12 iters), loss = 1.10935
I0409 21:51:30.470057 25006 solver.cpp:237] Train net output #0: loss = 1.10935 (* 1 = 1.10935 loss)
I0409 21:51:30.470067 25006 sgd_solver.cpp:105] Iteration 3804, lr = 0.00470707
I0409 21:51:35.452100 25006 solver.cpp:218] Iteration 3816 (2.40875 iter/s, 4.98183s/12 iters), loss = 0.96988
I0409 21:51:35.452142 25006 solver.cpp:237] Train net output #0: loss = 0.96988 (* 1 = 0.96988 loss)
I0409 21:51:35.452152 25006 sgd_solver.cpp:105] Iteration 3816, lr = 0.0046959
I0409 21:51:40.367046 25006 solver.cpp:218] Iteration 3828 (2.44166 iter/s, 4.91469s/12 iters), loss = 1.20652
I0409 21:51:40.367180 25006 solver.cpp:237] Train net output #0: loss = 1.20652 (* 1 = 1.20652 loss)
I0409 21:51:40.367192 25006 sgd_solver.cpp:105] Iteration 3828, lr = 0.00468475
I0409 21:51:45.513981 25006 solver.cpp:218] Iteration 3840 (2.33166 iter/s, 5.14655s/12 iters), loss = 1.04251
I0409 21:51:45.514027 25006 solver.cpp:237] Train net output #0: loss = 1.04251 (* 1 = 1.04251 loss)
I0409 21:51:45.514036 25006 sgd_solver.cpp:105] Iteration 3840, lr = 0.00467363
I0409 21:51:46.639103 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:51:50.559674 25006 solver.cpp:218] Iteration 3852 (2.3784 iter/s, 5.04542s/12 iters), loss = 0.925223
I0409 21:51:50.559733 25006 solver.cpp:237] Train net output #0: loss = 0.925223 (* 1 = 0.925223 loss)
I0409 21:51:50.559747 25006 sgd_solver.cpp:105] Iteration 3852, lr = 0.00466253
I0409 21:51:55.662014 25006 solver.cpp:218] Iteration 3864 (2.35199 iter/s, 5.10206s/12 iters), loss = 1.06473
I0409 21:51:55.662065 25006 solver.cpp:237] Train net output #0: loss = 1.06473 (* 1 = 1.06473 loss)
I0409 21:51:55.662078 25006 sgd_solver.cpp:105] Iteration 3864, lr = 0.00465146
I0409 21:52:00.144850 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3876.caffemodel
I0409 21:52:05.218600 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3876.solverstate
I0409 21:52:10.833048 25006 solver.cpp:330] Iteration 3876, Testing net (#0)
I0409 21:52:10.833140 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:52:13.934273 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:52:15.610731 25006 solver.cpp:397] Test net output #0: accuracy = 0.162377
I0409 21:52:15.610775 25006 solver.cpp:397] Test net output #1: loss = 5.38714 (* 1 = 5.38714 loss)
I0409 21:52:15.699411 25006 solver.cpp:218] Iteration 3876 (0.598907 iter/s, 20.0365s/12 iters), loss = 1.19182
I0409 21:52:15.699462 25006 solver.cpp:237] Train net output #0: loss = 1.19182 (* 1 = 1.19182 loss)
I0409 21:52:15.699473 25006 sgd_solver.cpp:105] Iteration 3876, lr = 0.00464042
I0409 21:52:19.903473 25006 solver.cpp:218] Iteration 3888 (2.85454 iter/s, 4.20382s/12 iters), loss = 1.14302
I0409 21:52:19.903519 25006 solver.cpp:237] Train net output #0: loss = 1.14302 (* 1 = 1.14302 loss)
I0409 21:52:19.903530 25006 sgd_solver.cpp:105] Iteration 3888, lr = 0.0046294
I0409 21:52:25.036777 25006 solver.cpp:218] Iteration 3900 (2.3378 iter/s, 5.13303s/12 iters), loss = 1.13939
I0409 21:52:25.036830 25006 solver.cpp:237] Train net output #0: loss = 1.13939 (* 1 = 1.13939 loss)
I0409 21:52:25.036844 25006 sgd_solver.cpp:105] Iteration 3900, lr = 0.00461841
I0409 21:52:30.022019 25006 solver.cpp:218] Iteration 3912 (2.40724 iter/s, 4.98497s/12 iters), loss = 1.19089
I0409 21:52:30.022068 25006 solver.cpp:237] Train net output #0: loss = 1.19089 (* 1 = 1.19089 loss)
I0409 21:52:30.022081 25006 sgd_solver.cpp:105] Iteration 3912, lr = 0.00460744
I0409 21:52:34.952874 25006 solver.cpp:218] Iteration 3924 (2.43378 iter/s, 4.93059s/12 iters), loss = 0.998463
I0409 21:52:34.952916 25006 solver.cpp:237] Train net output #0: loss = 0.998463 (* 1 = 0.998463 loss)
I0409 21:52:34.952925 25006 sgd_solver.cpp:105] Iteration 3924, lr = 0.0045965
I0409 21:52:39.954859 25006 solver.cpp:218] Iteration 3936 (2.39917 iter/s, 5.00172s/12 iters), loss = 0.837692
I0409 21:52:39.954905 25006 solver.cpp:237] Train net output #0: loss = 0.837692 (* 1 = 0.837692 loss)
I0409 21:52:39.954916 25006 sgd_solver.cpp:105] Iteration 3936, lr = 0.00458559
I0409 21:52:43.327394 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:52:44.928364 25006 solver.cpp:218] Iteration 3948 (2.41291 iter/s, 4.97324s/12 iters), loss = 1.10376
I0409 21:52:44.928419 25006 solver.cpp:237] Train net output #0: loss = 1.10376 (* 1 = 1.10376 loss)
I0409 21:52:44.928431 25006 sgd_solver.cpp:105] Iteration 3948, lr = 0.0045747
I0409 21:52:49.948151 25006 solver.cpp:218] Iteration 3960 (2.39067 iter/s, 5.01951s/12 iters), loss = 0.82852
I0409 21:52:49.948208 25006 solver.cpp:237] Train net output #0: loss = 0.82852 (* 1 = 0.82852 loss)
I0409 21:52:49.948220 25006 sgd_solver.cpp:105] Iteration 3960, lr = 0.00456384
I0409 21:52:54.909003 25006 solver.cpp:218] Iteration 3972 (2.41907 iter/s, 4.96058s/12 iters), loss = 1.13715
I0409 21:52:54.909051 25006 solver.cpp:237] Train net output #0: loss = 1.13715 (* 1 = 1.13715 loss)
I0409 21:52:54.909062 25006 sgd_solver.cpp:105] Iteration 3972, lr = 0.00455301
I0409 21:52:56.934271 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_3978.caffemodel
I0409 21:52:59.092470 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_3978.solverstate
I0409 21:53:00.740820 25006 solver.cpp:330] Iteration 3978, Testing net (#0)
I0409 21:53:00.740849 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:53:03.635362 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:53:05.246963 25006 solver.cpp:397] Test net output #0: accuracy = 0.158088
I0409 21:53:05.247012 25006 solver.cpp:397] Test net output #1: loss = 5.50424 (* 1 = 5.50424 loss)
I0409 21:53:07.237932 25006 solver.cpp:218] Iteration 3984 (0.973365 iter/s, 12.3284s/12 iters), loss = 0.970268
I0409 21:53:07.238006 25006 solver.cpp:237] Train net output #0: loss = 0.970268 (* 1 = 0.970268 loss)
I0409 21:53:07.238019 25006 sgd_solver.cpp:105] Iteration 3984, lr = 0.0045422
I0409 21:53:12.265337 25006 solver.cpp:218] Iteration 3996 (2.38706 iter/s, 5.02711s/12 iters), loss = 0.898375
I0409 21:53:12.265410 25006 solver.cpp:237] Train net output #0: loss = 0.898375 (* 1 = 0.898375 loss)
I0409 21:53:12.265427 25006 sgd_solver.cpp:105] Iteration 3996, lr = 0.00453141
I0409 21:53:17.186745 25006 solver.cpp:218] Iteration 4008 (2.43847 iter/s, 4.92111s/12 iters), loss = 1.19244
I0409 21:53:17.186946 25006 solver.cpp:237] Train net output #0: loss = 1.19244 (* 1 = 1.19244 loss)
I0409 21:53:17.186972 25006 sgd_solver.cpp:105] Iteration 4008, lr = 0.00452066
I0409 21:53:22.139822 25006 solver.cpp:218] Iteration 4020 (2.42294 iter/s, 4.95267s/12 iters), loss = 0.995886
I0409 21:53:22.139863 25006 solver.cpp:237] Train net output #0: loss = 0.995886 (* 1 = 0.995886 loss)
I0409 21:53:22.139873 25006 sgd_solver.cpp:105] Iteration 4020, lr = 0.00450992
I0409 21:53:27.063621 25006 solver.cpp:218] Iteration 4032 (2.43727 iter/s, 4.92354s/12 iters), loss = 1.26551
I0409 21:53:27.063673 25006 solver.cpp:237] Train net output #0: loss = 1.26551 (* 1 = 1.26551 loss)
I0409 21:53:27.063685 25006 sgd_solver.cpp:105] Iteration 4032, lr = 0.00449921
I0409 21:53:32.178633 25006 solver.cpp:218] Iteration 4044 (2.34616 iter/s, 5.11474s/12 iters), loss = 0.907651
I0409 21:53:32.178680 25006 solver.cpp:237] Train net output #0: loss = 0.907651 (* 1 = 0.907651 loss)
I0409 21:53:32.178690 25006 sgd_solver.cpp:105] Iteration 4044, lr = 0.00448853
I0409 21:53:32.735716 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:53:37.370700 25006 solver.cpp:218] Iteration 4056 (2.31134 iter/s, 5.19179s/12 iters), loss = 1.08828
I0409 21:53:37.370752 25006 solver.cpp:237] Train net output #0: loss = 1.08828 (* 1 = 1.08828 loss)
I0409 21:53:37.370767 25006 sgd_solver.cpp:105] Iteration 4056, lr = 0.00447788
I0409 21:53:42.304908 25006 solver.cpp:218] Iteration 4068 (2.43213 iter/s, 4.93394s/12 iters), loss = 0.929911
I0409 21:53:42.304963 25006 solver.cpp:237] Train net output #0: loss = 0.929911 (* 1 = 0.929911 loss)
I0409 21:53:42.304981 25006 sgd_solver.cpp:105] Iteration 4068, lr = 0.00446724
I0409 21:53:46.826743 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4080.caffemodel
I0409 21:53:49.003538 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4080.solverstate
I0409 21:53:50.661424 25006 solver.cpp:330] Iteration 4080, Testing net (#0)
I0409 21:53:50.661453 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:53:53.494257 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:53:55.111127 25006 solver.cpp:397] Test net output #0: accuracy = 0.161765
I0409 21:53:55.111168 25006 solver.cpp:397] Test net output #1: loss = 5.29631 (* 1 = 5.29631 loss)
I0409 21:53:55.200227 25006 solver.cpp:218] Iteration 4080 (0.930613 iter/s, 12.8947s/12 iters), loss = 0.978103
I0409 21:53:55.200273 25006 solver.cpp:237] Train net output #0: loss = 0.978103 (* 1 = 0.978103 loss)
I0409 21:53:55.200285 25006 sgd_solver.cpp:105] Iteration 4080, lr = 0.00445664
I0409 21:53:59.482537 25006 solver.cpp:218] Iteration 4092 (2.80238 iter/s, 4.28207s/12 iters), loss = 0.675977
I0409 21:53:59.482596 25006 solver.cpp:237] Train net output #0: loss = 0.675977 (* 1 = 0.675977 loss)
I0409 21:53:59.482609 25006 sgd_solver.cpp:105] Iteration 4092, lr = 0.00444606
I0409 21:54:04.816030 25006 solver.cpp:218] Iteration 4104 (2.25006 iter/s, 5.3332s/12 iters), loss = 0.93119
I0409 21:54:04.816088 25006 solver.cpp:237] Train net output #0: loss = 0.93119 (* 1 = 0.93119 loss)
I0409 21:54:04.816102 25006 sgd_solver.cpp:105] Iteration 4104, lr = 0.0044355
I0409 21:54:10.064688 25006 solver.cpp:218] Iteration 4116 (2.28642 iter/s, 5.24837s/12 iters), loss = 0.694307
I0409 21:54:10.064740 25006 solver.cpp:237] Train net output #0: loss = 0.694307 (* 1 = 0.694307 loss)
I0409 21:54:10.064754 25006 sgd_solver.cpp:105] Iteration 4116, lr = 0.00442497
I0409 21:54:15.098179 25006 solver.cpp:218] Iteration 4128 (2.38416 iter/s, 5.03322s/12 iters), loss = 0.860723
I0409 21:54:15.098232 25006 solver.cpp:237] Train net output #0: loss = 0.860723 (* 1 = 0.860723 loss)
I0409 21:54:15.098246 25006 sgd_solver.cpp:105] Iteration 4128, lr = 0.00441447
I0409 21:54:20.216220 25006 solver.cpp:218] Iteration 4140 (2.34477 iter/s, 5.11776s/12 iters), loss = 0.959092
I0409 21:54:20.216361 25006 solver.cpp:237] Train net output #0: loss = 0.959092 (* 1 = 0.959092 loss)
I0409 21:54:20.216373 25006 sgd_solver.cpp:105] Iteration 4140, lr = 0.00440398
I0409 21:54:22.947144 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:54:25.339136 25006 solver.cpp:218] Iteration 4152 (2.34258 iter/s, 5.12255s/12 iters), loss = 0.977723
I0409 21:54:25.339198 25006 solver.cpp:237] Train net output #0: loss = 0.977723 (* 1 = 0.977723 loss)
I0409 21:54:25.339213 25006 sgd_solver.cpp:105] Iteration 4152, lr = 0.00439353
I0409 21:54:26.968046 25006 blocking_queue.cpp:49] Waiting for data
I0409 21:54:30.370002 25006 solver.cpp:218] Iteration 4164 (2.38541 iter/s, 5.03058s/12 iters), loss = 1.0028
I0409 21:54:30.370047 25006 solver.cpp:237] Train net output #0: loss = 1.0028 (* 1 = 1.0028 loss)
I0409 21:54:30.370057 25006 sgd_solver.cpp:105] Iteration 4164, lr = 0.0043831
I0409 21:54:35.337035 25006 solver.cpp:218] Iteration 4176 (2.41606 iter/s, 4.96677s/12 iters), loss = 0.980114
I0409 21:54:35.337085 25006 solver.cpp:237] Train net output #0: loss = 0.980114 (* 1 = 0.980114 loss)
I0409 21:54:35.337097 25006 sgd_solver.cpp:105] Iteration 4176, lr = 0.00437269
I0409 21:54:37.363693 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4182.caffemodel
I0409 21:54:41.103075 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4182.solverstate
I0409 21:54:43.796439 25006 solver.cpp:330] Iteration 4182, Testing net (#0)
I0409 21:54:43.796464 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:54:46.612545 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:54:48.449194 25006 solver.cpp:397] Test net output #0: accuracy = 0.186887
I0409 21:54:48.449241 25006 solver.cpp:397] Test net output #1: loss = 5.19311 (* 1 = 5.19311 loss)
I0409 21:54:50.318315 25006 solver.cpp:218] Iteration 4188 (0.801035 iter/s, 14.9806s/12 iters), loss = 0.984168
I0409 21:54:50.318405 25006 solver.cpp:237] Train net output #0: loss = 0.984168 (* 1 = 0.984168 loss)
I0409 21:54:50.318419 25006 sgd_solver.cpp:105] Iteration 4188, lr = 0.00436231
I0409 21:54:55.436841 25006 solver.cpp:218] Iteration 4200 (2.34456 iter/s, 5.11823s/12 iters), loss = 0.900818
I0409 21:54:55.436897 25006 solver.cpp:237] Train net output #0: loss = 0.900818 (* 1 = 0.900818 loss)
I0409 21:54:55.436909 25006 sgd_solver.cpp:105] Iteration 4200, lr = 0.00435195
I0409 21:55:00.659412 25006 solver.cpp:218] Iteration 4212 (2.29784 iter/s, 5.2223s/12 iters), loss = 0.856859
I0409 21:55:00.659467 25006 solver.cpp:237] Train net output #0: loss = 0.856859 (* 1 = 0.856859 loss)
I0409 21:55:00.659478 25006 sgd_solver.cpp:105] Iteration 4212, lr = 0.00434162
I0409 21:55:05.663422 25006 solver.cpp:218] Iteration 4224 (2.39821 iter/s, 5.00374s/12 iters), loss = 0.676034
I0409 21:55:05.663482 25006 solver.cpp:237] Train net output #0: loss = 0.676034 (* 1 = 0.676034 loss)
I0409 21:55:05.663496 25006 sgd_solver.cpp:105] Iteration 4224, lr = 0.00433131
I0409 21:55:10.691723 25006 solver.cpp:218] Iteration 4236 (2.38662 iter/s, 5.02803s/12 iters), loss = 0.650418
I0409 21:55:10.691777 25006 solver.cpp:237] Train net output #0: loss = 0.650418 (* 1 = 0.650418 loss)
I0409 21:55:10.691790 25006 sgd_solver.cpp:105] Iteration 4236, lr = 0.00432103
I0409 21:55:15.473634 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:55:15.699165 25006 solver.cpp:218] Iteration 4248 (2.39656 iter/s, 5.00718s/12 iters), loss = 0.956628
I0409 21:55:15.699219 25006 solver.cpp:237] Train net output #0: loss = 0.956628 (* 1 = 0.956628 loss)
I0409 21:55:15.699232 25006 sgd_solver.cpp:105] Iteration 4248, lr = 0.00431077
I0409 21:55:20.697719 25006 solver.cpp:218] Iteration 4260 (2.40082 iter/s, 4.99829s/12 iters), loss = 1.0438
I0409 21:55:20.697847 25006 solver.cpp:237] Train net output #0: loss = 1.0438 (* 1 = 1.0438 loss)
I0409 21:55:20.697856 25006 sgd_solver.cpp:105] Iteration 4260, lr = 0.00430053
I0409 21:55:25.708271 25006 solver.cpp:218] Iteration 4272 (2.39511 iter/s, 5.01022s/12 iters), loss = 1.17427
I0409 21:55:25.708312 25006 solver.cpp:237] Train net output #0: loss = 1.17427 (* 1 = 1.17427 loss)
I0409 21:55:25.708323 25006 sgd_solver.cpp:105] Iteration 4272, lr = 0.00429032
I0409 21:55:30.207557 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4284.caffemodel
I0409 21:55:32.436760 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4284.solverstate
I0409 21:55:34.109489 25006 solver.cpp:330] Iteration 4284, Testing net (#0)
I0409 21:55:34.109511 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:55:36.882123 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:55:38.641881 25006 solver.cpp:397] Test net output #0: accuracy = 0.191176
I0409 21:55:38.641932 25006 solver.cpp:397] Test net output #1: loss = 5.06113 (* 1 = 5.06113 loss)
I0409 21:55:38.730717 25006 solver.cpp:218] Iteration 4284 (0.921527 iter/s, 13.0219s/12 iters), loss = 0.828151
I0409 21:55:38.730789 25006 solver.cpp:237] Train net output #0: loss = 0.828151 (* 1 = 0.828151 loss)
I0409 21:55:38.730805 25006 sgd_solver.cpp:105] Iteration 4284, lr = 0.00428014
I0409 21:55:42.925756 25006 solver.cpp:218] Iteration 4296 (2.86069 iter/s, 4.19479s/12 iters), loss = 0.668972
I0409 21:55:42.925817 25006 solver.cpp:237] Train net output #0: loss = 0.668972 (* 1 = 0.668972 loss)
I0409 21:55:42.925828 25006 sgd_solver.cpp:105] Iteration 4296, lr = 0.00426998
I0409 21:55:47.806569 25006 solver.cpp:218] Iteration 4308 (2.45874 iter/s, 4.88054s/12 iters), loss = 0.889865
I0409 21:55:47.806627 25006 solver.cpp:237] Train net output #0: loss = 0.889865 (* 1 = 0.889865 loss)
I0409 21:55:47.806639 25006 sgd_solver.cpp:105] Iteration 4308, lr = 0.00425984
I0409 21:55:52.685091 25006 solver.cpp:218] Iteration 4320 (2.4599 iter/s, 4.87826s/12 iters), loss = 0.69303
I0409 21:55:52.685206 25006 solver.cpp:237] Train net output #0: loss = 0.69303 (* 1 = 0.69303 loss)
I0409 21:55:52.685218 25006 sgd_solver.cpp:105] Iteration 4320, lr = 0.00424972
I0409 21:55:57.549211 25006 solver.cpp:218] Iteration 4332 (2.46721 iter/s, 4.8638s/12 iters), loss = 0.731977
I0409 21:55:57.549274 25006 solver.cpp:237] Train net output #0: loss = 0.731977 (* 1 = 0.731977 loss)
I0409 21:55:57.549288 25006 sgd_solver.cpp:105] Iteration 4332, lr = 0.00423964
I0409 21:56:02.431844 25006 solver.cpp:218] Iteration 4344 (2.45783 iter/s, 4.88236s/12 iters), loss = 0.735424
I0409 21:56:02.431908 25006 solver.cpp:237] Train net output #0: loss = 0.735424 (* 1 = 0.735424 loss)
I0409 21:56:02.431921 25006 sgd_solver.cpp:105] Iteration 4344, lr = 0.00422957
I0409 21:56:04.286048 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:56:07.310019 25006 solver.cpp:218] Iteration 4356 (2.46007 iter/s, 4.87791s/12 iters), loss = 0.864659
I0409 21:56:07.310071 25006 solver.cpp:237] Train net output #0: loss = 0.864659 (* 1 = 0.864659 loss)
I0409 21:56:07.310082 25006 sgd_solver.cpp:105] Iteration 4356, lr = 0.00421953
I0409 21:56:12.197991 25006 solver.cpp:218] Iteration 4368 (2.45515 iter/s, 4.88769s/12 iters), loss = 0.9192
I0409 21:56:12.198055 25006 solver.cpp:237] Train net output #0: loss = 0.9192 (* 1 = 0.9192 loss)
I0409 21:56:12.198067 25006 sgd_solver.cpp:105] Iteration 4368, lr = 0.00420951
I0409 21:56:17.068976 25006 solver.cpp:218] Iteration 4380 (2.4637 iter/s, 4.87072s/12 iters), loss = 0.948817
I0409 21:56:17.069031 25006 solver.cpp:237] Train net output #0: loss = 0.948817 (* 1 = 0.948817 loss)
I0409 21:56:17.069041 25006 sgd_solver.cpp:105] Iteration 4380, lr = 0.00419952
I0409 21:56:19.039856 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4386.caffemodel
I0409 21:56:21.416211 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4386.solverstate
I0409 21:56:23.073544 25006 solver.cpp:330] Iteration 4386, Testing net (#0)
I0409 21:56:23.073658 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:56:25.868346 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:56:27.671644 25006 solver.cpp:397] Test net output #0: accuracy = 0.179534
I0409 21:56:27.671689 25006 solver.cpp:397] Test net output #1: loss = 5.48108 (* 1 = 5.48108 loss)
I0409 21:56:29.568986 25006 solver.cpp:218] Iteration 4392 (0.960042 iter/s, 12.4995s/12 iters), loss = 0.543995
I0409 21:56:29.569025 25006 solver.cpp:237] Train net output #0: loss = 0.543995 (* 1 = 0.543995 loss)
I0409 21:56:29.569036 25006 sgd_solver.cpp:105] Iteration 4392, lr = 0.00418954
I0409 21:56:34.585637 25006 solver.cpp:218] Iteration 4404 (2.39216 iter/s, 5.0164s/12 iters), loss = 0.717772
I0409 21:56:34.585693 25006 solver.cpp:237] Train net output #0: loss = 0.717772 (* 1 = 0.717772 loss)
I0409 21:56:34.585705 25006 sgd_solver.cpp:105] Iteration 4404, lr = 0.0041796
I0409 21:56:39.516212 25006 solver.cpp:218] Iteration 4416 (2.43393 iter/s, 4.93031s/12 iters), loss = 0.694613
I0409 21:56:39.516273 25006 solver.cpp:237] Train net output #0: loss = 0.694613 (* 1 = 0.694613 loss)
I0409 21:56:39.516286 25006 sgd_solver.cpp:105] Iteration 4416, lr = 0.00416967
I0409 21:56:44.474853 25006 solver.cpp:218] Iteration 4428 (2.42015 iter/s, 4.95837s/12 iters), loss = 0.843965
I0409 21:56:44.474912 25006 solver.cpp:237] Train net output #0: loss = 0.843965 (* 1 = 0.843965 loss)
I0409 21:56:44.474926 25006 sgd_solver.cpp:105] Iteration 4428, lr = 0.00415977
I0409 21:56:49.443039 25006 solver.cpp:218] Iteration 4440 (2.4155 iter/s, 4.96791s/12 iters), loss = 0.628424
I0409 21:56:49.443100 25006 solver.cpp:237] Train net output #0: loss = 0.628424 (* 1 = 0.628424 loss)
I0409 21:56:49.443114 25006 sgd_solver.cpp:105] Iteration 4440, lr = 0.0041499
I0409 21:56:53.497001 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:56:54.473323 25006 solver.cpp:218] Iteration 4452 (2.38568 iter/s, 5.03001s/12 iters), loss = 0.837347
I0409 21:56:54.473378 25006 solver.cpp:237] Train net output #0: loss = 0.837347 (* 1 = 0.837347 loss)
I0409 21:56:54.473392 25006 sgd_solver.cpp:105] Iteration 4452, lr = 0.00414005
I0409 21:56:59.461527 25006 solver.cpp:218] Iteration 4464 (2.40581 iter/s, 4.98793s/12 iters), loss = 0.433293
I0409 21:56:59.461591 25006 solver.cpp:237] Train net output #0: loss = 0.433293 (* 1 = 0.433293 loss)
I0409 21:56:59.461604 25006 sgd_solver.cpp:105] Iteration 4464, lr = 0.00413022
I0409 21:57:04.365917 25006 solver.cpp:218] Iteration 4476 (2.44692 iter/s, 4.90413s/12 iters), loss = 0.534709
I0409 21:57:04.365972 25006 solver.cpp:237] Train net output #0: loss = 0.534709 (* 1 = 0.534709 loss)
I0409 21:57:04.365983 25006 sgd_solver.cpp:105] Iteration 4476, lr = 0.00412041
I0409 21:57:08.866849 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4488.caffemodel
I0409 21:57:15.792387 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4488.solverstate
I0409 21:57:22.240137 25006 solver.cpp:330] Iteration 4488, Testing net (#0)
I0409 21:57:22.240165 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:57:24.890439 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:57:26.716506 25006 solver.cpp:397] Test net output #0: accuracy = 0.190564
I0409 21:57:26.716550 25006 solver.cpp:397] Test net output #1: loss = 5.4349 (* 1 = 5.4349 loss)
I0409 21:57:26.805735 25006 solver.cpp:218] Iteration 4488 (0.534786 iter/s, 22.4389s/12 iters), loss = 0.694606
I0409 21:57:26.805786 25006 solver.cpp:237] Train net output #0: loss = 0.694606 (* 1 = 0.694606 loss)
I0409 21:57:26.805800 25006 sgd_solver.cpp:105] Iteration 4488, lr = 0.00411063
I0409 21:57:31.351783 25006 solver.cpp:218] Iteration 4500 (2.6398 iter/s, 4.5458s/12 iters), loss = 0.654883
I0409 21:57:31.351830 25006 solver.cpp:237] Train net output #0: loss = 0.654883 (* 1 = 0.654883 loss)
I0409 21:57:31.351841 25006 sgd_solver.cpp:105] Iteration 4500, lr = 0.00410087
I0409 21:57:36.309366 25006 solver.cpp:218] Iteration 4512 (2.42066 iter/s, 4.95732s/12 iters), loss = 0.577489
I0409 21:57:36.309417 25006 solver.cpp:237] Train net output #0: loss = 0.577489 (* 1 = 0.577489 loss)
I0409 21:57:36.309429 25006 sgd_solver.cpp:105] Iteration 4512, lr = 0.00409113
I0409 21:57:41.328562 25006 solver.cpp:218] Iteration 4524 (2.39095 iter/s, 5.01893s/12 iters), loss = 0.727591
I0409 21:57:41.328606 25006 solver.cpp:237] Train net output #0: loss = 0.727591 (* 1 = 0.727591 loss)
I0409 21:57:41.328616 25006 sgd_solver.cpp:105] Iteration 4524, lr = 0.00408142
I0409 21:57:46.315855 25006 solver.cpp:218] Iteration 4536 (2.40624 iter/s, 4.98703s/12 iters), loss = 0.88425
I0409 21:57:46.315912 25006 solver.cpp:237] Train net output #0: loss = 0.88425 (* 1 = 0.88425 loss)
I0409 21:57:46.315927 25006 sgd_solver.cpp:105] Iteration 4536, lr = 0.00407173
I0409 21:57:51.383857 25006 solver.cpp:218] Iteration 4548 (2.36792 iter/s, 5.06774s/12 iters), loss = 0.627228
I0409 21:57:51.383903 25006 solver.cpp:237] Train net output #0: loss = 0.627228 (* 1 = 0.627228 loss)
I0409 21:57:51.383913 25006 sgd_solver.cpp:105] Iteration 4548, lr = 0.00406206
I0409 21:57:52.625175 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:57:56.303959 25006 solver.cpp:218] Iteration 4560 (2.4391 iter/s, 4.91984s/12 iters), loss = 0.692074
I0409 21:57:56.304061 25006 solver.cpp:237] Train net output #0: loss = 0.692074 (* 1 = 0.692074 loss)
I0409 21:57:56.304073 25006 sgd_solver.cpp:105] Iteration 4560, lr = 0.00405242
I0409 21:58:01.344846 25006 solver.cpp:218] Iteration 4572 (2.38069 iter/s, 5.04057s/12 iters), loss = 0.729387
I0409 21:58:01.344910 25006 solver.cpp:237] Train net output #0: loss = 0.729387 (* 1 = 0.729387 loss)
I0409 21:58:01.344928 25006 sgd_solver.cpp:105] Iteration 4572, lr = 0.0040428
I0409 21:58:06.459064 25006 solver.cpp:218] Iteration 4584 (2.34652 iter/s, 5.11395s/12 iters), loss = 0.70897
I0409 21:58:06.459103 25006 solver.cpp:237] Train net output #0: loss = 0.70897 (* 1 = 0.70897 loss)
I0409 21:58:06.459112 25006 sgd_solver.cpp:105] Iteration 4584, lr = 0.0040332
I0409 21:58:08.444303 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4590.caffemodel
I0409 21:58:13.113806 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4590.solverstate
I0409 21:58:20.609179 25006 solver.cpp:330] Iteration 4590, Testing net (#0)
I0409 21:58:20.609213 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:58:23.247318 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:58:25.067051 25006 solver.cpp:397] Test net output #0: accuracy = 0.198529
I0409 21:58:25.067106 25006 solver.cpp:397] Test net output #1: loss = 5.26447 (* 1 = 5.26447 loss)
I0409 21:58:26.946275 25006 solver.cpp:218] Iteration 4596 (0.585756 iter/s, 20.4864s/12 iters), loss = 0.671854
I0409 21:58:26.946393 25006 solver.cpp:237] Train net output #0: loss = 0.671854 (* 1 = 0.671854 loss)
I0409 21:58:26.946403 25006 sgd_solver.cpp:105] Iteration 4596, lr = 0.00402362
I0409 21:58:31.905356 25006 solver.cpp:218] Iteration 4608 (2.41997 iter/s, 4.95875s/12 iters), loss = 0.478164
I0409 21:58:31.905413 25006 solver.cpp:237] Train net output #0: loss = 0.478164 (* 1 = 0.478164 loss)
I0409 21:58:31.905426 25006 sgd_solver.cpp:105] Iteration 4608, lr = 0.00401407
I0409 21:58:36.803359 25006 solver.cpp:218] Iteration 4620 (2.45011 iter/s, 4.89774s/12 iters), loss = 0.328615
I0409 21:58:36.803408 25006 solver.cpp:237] Train net output #0: loss = 0.328615 (* 1 = 0.328615 loss)
I0409 21:58:36.803421 25006 sgd_solver.cpp:105] Iteration 4620, lr = 0.00400454
I0409 21:58:41.809923 25006 solver.cpp:218] Iteration 4632 (2.39698 iter/s, 5.0063s/12 iters), loss = 0.684306
I0409 21:58:41.809979 25006 solver.cpp:237] Train net output #0: loss = 0.684306 (* 1 = 0.684306 loss)
I0409 21:58:41.809989 25006 sgd_solver.cpp:105] Iteration 4632, lr = 0.00399503
I0409 21:58:46.805126 25006 solver.cpp:218] Iteration 4644 (2.40244 iter/s, 4.99493s/12 iters), loss = 0.577121
I0409 21:58:46.805174 25006 solver.cpp:237] Train net output #0: loss = 0.577121 (* 1 = 0.577121 loss)
I0409 21:58:46.805184 25006 sgd_solver.cpp:105] Iteration 4644, lr = 0.00398555
I0409 21:58:50.158949 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:58:51.746328 25006 solver.cpp:218] Iteration 4656 (2.42869 iter/s, 4.94094s/12 iters), loss = 0.684065
I0409 21:58:51.746393 25006 solver.cpp:237] Train net output #0: loss = 0.684065 (* 1 = 0.684065 loss)
I0409 21:58:51.746407 25006 sgd_solver.cpp:105] Iteration 4656, lr = 0.00397608
I0409 21:58:56.690368 25006 solver.cpp:218] Iteration 4668 (2.4273 iter/s, 4.94376s/12 iters), loss = 0.537189
I0409 21:58:56.690430 25006 solver.cpp:237] Train net output #0: loss = 0.537189 (* 1 = 0.537189 loss)
I0409 21:58:56.690443 25006 sgd_solver.cpp:105] Iteration 4668, lr = 0.00396664
I0409 21:59:01.664615 25006 solver.cpp:218] Iteration 4680 (2.41256 iter/s, 4.97398s/12 iters), loss = 0.689875
I0409 21:59:01.664714 25006 solver.cpp:237] Train net output #0: loss = 0.689875 (* 1 = 0.689875 loss)
I0409 21:59:01.664724 25006 sgd_solver.cpp:105] Iteration 4680, lr = 0.00395723
I0409 21:59:06.204771 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4692.caffemodel
I0409 21:59:08.449124 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4692.solverstate
I0409 21:59:11.192437 25006 solver.cpp:330] Iteration 4692, Testing net (#0)
I0409 21:59:11.192468 25006 net.cpp:676] Ignoring source layer train-data
I0409 21:59:13.798179 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:59:15.661823 25006 solver.cpp:397] Test net output #0: accuracy = 0.193015
I0409 21:59:15.661850 25006 solver.cpp:397] Test net output #1: loss = 5.60432 (* 1 = 5.60432 loss)
I0409 21:59:15.750025 25006 solver.cpp:218] Iteration 4692 (0.851986 iter/s, 14.0847s/12 iters), loss = 0.733985
I0409 21:59:15.750070 25006 solver.cpp:237] Train net output #0: loss = 0.733985 (* 1 = 0.733985 loss)
I0409 21:59:15.750082 25006 sgd_solver.cpp:105] Iteration 4692, lr = 0.00394783
I0409 21:59:19.951907 25006 solver.cpp:218] Iteration 4704 (2.85602 iter/s, 4.20165s/12 iters), loss = 0.80562
I0409 21:59:19.951959 25006 solver.cpp:237] Train net output #0: loss = 0.80562 (* 1 = 0.80562 loss)
I0409 21:59:19.951970 25006 sgd_solver.cpp:105] Iteration 4704, lr = 0.00393846
I0409 21:59:24.876731 25006 solver.cpp:218] Iteration 4716 (2.43677 iter/s, 4.92456s/12 iters), loss = 0.716364
I0409 21:59:24.876785 25006 solver.cpp:237] Train net output #0: loss = 0.716364 (* 1 = 0.716364 loss)
I0409 21:59:24.876794 25006 sgd_solver.cpp:105] Iteration 4716, lr = 0.00392911
I0409 21:59:29.893046 25006 solver.cpp:218] Iteration 4728 (2.39232 iter/s, 5.01605s/12 iters), loss = 0.769689
I0409 21:59:29.893096 25006 solver.cpp:237] Train net output #0: loss = 0.769689 (* 1 = 0.769689 loss)
I0409 21:59:29.893110 25006 sgd_solver.cpp:105] Iteration 4728, lr = 0.00391978
I0409 21:59:34.894896 25006 solver.cpp:218] Iteration 4740 (2.39924 iter/s, 5.00158s/12 iters), loss = 0.777992
I0409 21:59:34.906064 25006 solver.cpp:237] Train net output #0: loss = 0.777992 (* 1 = 0.777992 loss)
I0409 21:59:34.906081 25006 sgd_solver.cpp:105] Iteration 4740, lr = 0.00391047
I0409 21:59:39.876246 25006 solver.cpp:218] Iteration 4752 (2.41449 iter/s, 4.96998s/12 iters), loss = 0.821228
I0409 21:59:39.876294 25006 solver.cpp:237] Train net output #0: loss = 0.821228 (* 1 = 0.821228 loss)
I0409 21:59:39.876304 25006 sgd_solver.cpp:105] Iteration 4752, lr = 0.00390119
I0409 21:59:40.404321 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 21:59:44.919024 25006 solver.cpp:218] Iteration 4764 (2.37976 iter/s, 5.04252s/12 iters), loss = 0.825893
I0409 21:59:44.919072 25006 solver.cpp:237] Train net output #0: loss = 0.825893 (* 1 = 0.825893 loss)
I0409 21:59:44.919085 25006 sgd_solver.cpp:105] Iteration 4764, lr = 0.00389193
I0409 21:59:49.936655 25006 solver.cpp:218] Iteration 4776 (2.39169 iter/s, 5.01737s/12 iters), loss = 0.621821
I0409 21:59:49.936714 25006 solver.cpp:237] Train net output #0: loss = 0.621821 (* 1 = 0.621821 loss)
I0409 21:59:49.936726 25006 sgd_solver.cpp:105] Iteration 4776, lr = 0.00388269
I0409 21:59:54.985821 25006 solver.cpp:218] Iteration 4788 (2.37676 iter/s, 5.04888s/12 iters), loss = 0.555536
I0409 21:59:54.985893 25006 solver.cpp:237] Train net output #0: loss = 0.555536 (* 1 = 0.555536 loss)
I0409 21:59:54.985910 25006 sgd_solver.cpp:105] Iteration 4788, lr = 0.00387347
I0409 21:59:56.963512 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4794.caffemodel
I0409 22:00:00.074453 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4794.solverstate
I0409 22:00:01.714735 25006 solver.cpp:330] Iteration 4794, Testing net (#0)
I0409 22:00:01.714762 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:00:04.268100 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:00:06.176906 25006 solver.cpp:397] Test net output #0: accuracy = 0.208946
I0409 22:00:06.177017 25006 solver.cpp:397] Test net output #1: loss = 5.63649 (* 1 = 5.63649 loss)
I0409 22:00:07.950495 25006 solver.cpp:218] Iteration 4800 (0.925635 iter/s, 12.9641s/12 iters), loss = 0.716898
I0409 22:00:07.950546 25006 solver.cpp:237] Train net output #0: loss = 0.716898 (* 1 = 0.716898 loss)
I0409 22:00:07.950558 25006 sgd_solver.cpp:105] Iteration 4800, lr = 0.00386427
I0409 22:00:12.962589 25006 solver.cpp:218] Iteration 4812 (2.39434 iter/s, 5.01182s/12 iters), loss = 0.621421
I0409 22:00:12.962643 25006 solver.cpp:237] Train net output #0: loss = 0.621421 (* 1 = 0.621421 loss)
I0409 22:00:12.962656 25006 sgd_solver.cpp:105] Iteration 4812, lr = 0.0038551
I0409 22:00:17.980737 25006 solver.cpp:218] Iteration 4824 (2.39145 iter/s, 5.01788s/12 iters), loss = 0.66203
I0409 22:00:17.980785 25006 solver.cpp:237] Train net output #0: loss = 0.66203 (* 1 = 0.66203 loss)
I0409 22:00:17.980795 25006 sgd_solver.cpp:105] Iteration 4824, lr = 0.00384594
I0409 22:00:22.985153 25006 solver.cpp:218] Iteration 4836 (2.39801 iter/s, 5.00415s/12 iters), loss = 0.644833
I0409 22:00:22.985219 25006 solver.cpp:237] Train net output #0: loss = 0.644833 (* 1 = 0.644833 loss)
I0409 22:00:22.985234 25006 sgd_solver.cpp:105] Iteration 4836, lr = 0.00383681
I0409 22:00:25.017028 25006 blocking_queue.cpp:49] Waiting for data
I0409 22:00:28.001441 25006 solver.cpp:218] Iteration 4848 (2.39234 iter/s, 5.01601s/12 iters), loss = 0.832636
I0409 22:00:28.001492 25006 solver.cpp:237] Train net output #0: loss = 0.832636 (* 1 = 0.832636 loss)
I0409 22:00:28.001502 25006 sgd_solver.cpp:105] Iteration 4848, lr = 0.0038277
I0409 22:00:30.682955 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:00:32.994580 25006 solver.cpp:218] Iteration 4860 (2.40343 iter/s, 4.99287s/12 iters), loss = 0.500013
I0409 22:00:32.994637 25006 solver.cpp:237] Train net output #0: loss = 0.500013 (* 1 = 0.500013 loss)
I0409 22:00:32.994649 25006 sgd_solver.cpp:105] Iteration 4860, lr = 0.00381862
I0409 22:00:37.990361 25006 solver.cpp:218] Iteration 4872 (2.40215 iter/s, 4.99551s/12 iters), loss = 0.593955
I0409 22:00:37.990483 25006 solver.cpp:237] Train net output #0: loss = 0.593955 (* 1 = 0.593955 loss)
I0409 22:00:37.990497 25006 sgd_solver.cpp:105] Iteration 4872, lr = 0.00380955
I0409 22:00:42.961902 25006 solver.cpp:218] Iteration 4884 (2.4139 iter/s, 4.97121s/12 iters), loss = 0.402554
I0409 22:00:42.961973 25006 solver.cpp:237] Train net output #0: loss = 0.402554 (* 1 = 0.402554 loss)
I0409 22:00:42.961987 25006 sgd_solver.cpp:105] Iteration 4884, lr = 0.0038005
I0409 22:00:47.645063 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4896.caffemodel
I0409 22:00:55.754308 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4896.solverstate
I0409 22:00:58.986086 25006 solver.cpp:330] Iteration 4896, Testing net (#0)
I0409 22:00:58.986116 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:01:01.620092 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:01:03.690181 25006 solver.cpp:397] Test net output #0: accuracy = 0.196691
I0409 22:01:03.690229 25006 solver.cpp:397] Test net output #1: loss = 5.58746 (* 1 = 5.58746 loss)
I0409 22:01:03.780874 25006 solver.cpp:218] Iteration 4896 (0.576422 iter/s, 20.8181s/12 iters), loss = 0.725341
I0409 22:01:03.780925 25006 solver.cpp:237] Train net output #0: loss = 0.725341 (* 1 = 0.725341 loss)
I0409 22:01:03.780938 25006 sgd_solver.cpp:105] Iteration 4896, lr = 0.00379148
I0409 22:01:07.996141 25006 solver.cpp:218] Iteration 4908 (2.84696 iter/s, 4.21503s/12 iters), loss = 0.688298
I0409 22:01:07.996260 25006 solver.cpp:237] Train net output #0: loss = 0.688298 (* 1 = 0.688298 loss)
I0409 22:01:07.996273 25006 sgd_solver.cpp:105] Iteration 4908, lr = 0.00378248
I0409 22:01:12.988044 25006 solver.cpp:218] Iteration 4920 (2.40405 iter/s, 4.99157s/12 iters), loss = 0.768236
I0409 22:01:12.988101 25006 solver.cpp:237] Train net output #0: loss = 0.768236 (* 1 = 0.768236 loss)
I0409 22:01:12.988112 25006 sgd_solver.cpp:105] Iteration 4920, lr = 0.0037735
I0409 22:01:17.988291 25006 solver.cpp:218] Iteration 4932 (2.40001 iter/s, 4.99998s/12 iters), loss = 0.734501
I0409 22:01:17.988346 25006 solver.cpp:237] Train net output #0: loss = 0.734501 (* 1 = 0.734501 loss)
I0409 22:01:17.988358 25006 sgd_solver.cpp:105] Iteration 4932, lr = 0.00376454
I0409 22:01:22.965752 25006 solver.cpp:218] Iteration 4944 (2.411 iter/s, 4.97719s/12 iters), loss = 0.677158
I0409 22:01:22.965802 25006 solver.cpp:237] Train net output #0: loss = 0.677158 (* 1 = 0.677158 loss)
I0409 22:01:22.965813 25006 sgd_solver.cpp:105] Iteration 4944, lr = 0.0037556
I0409 22:01:27.739444 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:01:27.934063 25006 solver.cpp:218] Iteration 4956 (2.41544 iter/s, 4.96804s/12 iters), loss = 0.743151
I0409 22:01:27.934123 25006 solver.cpp:237] Train net output #0: loss = 0.743151 (* 1 = 0.743151 loss)
I0409 22:01:27.934135 25006 sgd_solver.cpp:105] Iteration 4956, lr = 0.00374669
I0409 22:01:32.862273 25006 solver.cpp:218] Iteration 4968 (2.4351 iter/s, 4.92794s/12 iters), loss = 0.804593
I0409 22:01:32.862321 25006 solver.cpp:237] Train net output #0: loss = 0.804593 (* 1 = 0.804593 loss)
I0409 22:01:32.862334 25006 sgd_solver.cpp:105] Iteration 4968, lr = 0.00373779
I0409 22:01:37.801421 25006 solver.cpp:218] Iteration 4980 (2.4297 iter/s, 4.93889s/12 iters), loss = 0.567337
I0409 22:01:37.801470 25006 solver.cpp:237] Train net output #0: loss = 0.567337 (* 1 = 0.567337 loss)
I0409 22:01:37.801482 25006 sgd_solver.cpp:105] Iteration 4980, lr = 0.00372892
I0409 22:01:42.970656 25006 solver.cpp:218] Iteration 4992 (2.32155 iter/s, 5.16897s/12 iters), loss = 0.613894
I0409 22:01:42.970793 25006 solver.cpp:237] Train net output #0: loss = 0.613894 (* 1 = 0.613894 loss)
I0409 22:01:42.970804 25006 sgd_solver.cpp:105] Iteration 4992, lr = 0.00372006
I0409 22:01:44.992169 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_4998.caffemodel
I0409 22:01:47.450639 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_4998.solverstate
I0409 22:01:50.724440 25006 solver.cpp:330] Iteration 4998, Testing net (#0)
I0409 22:01:50.724462 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:01:53.212059 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:01:55.195310 25006 solver.cpp:397] Test net output #0: accuracy = 0.201593
I0409 22:01:55.195339 25006 solver.cpp:397] Test net output #1: loss = 5.36554 (* 1 = 5.36554 loss)
I0409 22:01:57.170605 25006 solver.cpp:218] Iteration 5004 (0.845116 iter/s, 14.1992s/12 iters), loss = 0.545271
I0409 22:01:57.170663 25006 solver.cpp:237] Train net output #0: loss = 0.545271 (* 1 = 0.545271 loss)
I0409 22:01:57.170676 25006 sgd_solver.cpp:105] Iteration 5004, lr = 0.00371123
I0409 22:02:02.299862 25006 solver.cpp:218] Iteration 5016 (2.33965 iter/s, 5.12898s/12 iters), loss = 0.573486
I0409 22:02:02.299912 25006 solver.cpp:237] Train net output #0: loss = 0.573486 (* 1 = 0.573486 loss)
I0409 22:02:02.299926 25006 sgd_solver.cpp:105] Iteration 5016, lr = 0.00370242
I0409 22:02:07.235889 25006 solver.cpp:218] Iteration 5028 (2.43123 iter/s, 4.93577s/12 iters), loss = 0.530263
I0409 22:02:07.235942 25006 solver.cpp:237] Train net output #0: loss = 0.530263 (* 1 = 0.530263 loss)
I0409 22:02:07.235953 25006 sgd_solver.cpp:105] Iteration 5028, lr = 0.00369363
I0409 22:02:12.242612 25006 solver.cpp:218] Iteration 5040 (2.39691 iter/s, 5.00645s/12 iters), loss = 0.63625
I0409 22:02:12.242669 25006 solver.cpp:237] Train net output #0: loss = 0.63625 (* 1 = 0.63625 loss)
I0409 22:02:12.242681 25006 sgd_solver.cpp:105] Iteration 5040, lr = 0.00368486
I0409 22:02:17.206521 25006 solver.cpp:218] Iteration 5052 (2.41758 iter/s, 4.96364s/12 iters), loss = 0.866583
I0409 22:02:17.206638 25006 solver.cpp:237] Train net output #0: loss = 0.866583 (* 1 = 0.866583 loss)
I0409 22:02:17.206652 25006 sgd_solver.cpp:105] Iteration 5052, lr = 0.00367611
I0409 22:02:19.151247 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:02:22.220784 25006 solver.cpp:218] Iteration 5064 (2.39333 iter/s, 5.01394s/12 iters), loss = 0.556732
I0409 22:02:22.220830 25006 solver.cpp:237] Train net output #0: loss = 0.556732 (* 1 = 0.556732 loss)
I0409 22:02:22.220840 25006 sgd_solver.cpp:105] Iteration 5064, lr = 0.00366738
I0409 22:02:27.177145 25006 solver.cpp:218] Iteration 5076 (2.42126 iter/s, 4.9561s/12 iters), loss = 0.744262
I0409 22:02:27.177193 25006 solver.cpp:237] Train net output #0: loss = 0.744262 (* 1 = 0.744262 loss)
I0409 22:02:27.177206 25006 sgd_solver.cpp:105] Iteration 5076, lr = 0.00365868
I0409 22:02:32.199060 25006 solver.cpp:218] Iteration 5088 (2.38965 iter/s, 5.02165s/12 iters), loss = 0.5207
I0409 22:02:32.199120 25006 solver.cpp:237] Train net output #0: loss = 0.5207 (* 1 = 0.5207 loss)
I0409 22:02:32.199134 25006 sgd_solver.cpp:105] Iteration 5088, lr = 0.00364999
I0409 22:02:36.753072 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5100.caffemodel
I0409 22:02:39.066416 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5100.solverstate
I0409 22:02:40.716604 25006 solver.cpp:330] Iteration 5100, Testing net (#0)
I0409 22:02:40.716631 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:02:43.128959 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:02:45.151695 25006 solver.cpp:397] Test net output #0: accuracy = 0.204657
I0409 22:02:45.151757 25006 solver.cpp:397] Test net output #1: loss = 5.32725 (* 1 = 5.32725 loss)
I0409 22:02:45.245127 25006 solver.cpp:218] Iteration 5100 (0.91986 iter/s, 13.0455s/12 iters), loss = 0.721763
I0409 22:02:45.245175 25006 solver.cpp:237] Train net output #0: loss = 0.721763 (* 1 = 0.721763 loss)
I0409 22:02:45.245185 25006 sgd_solver.cpp:105] Iteration 5100, lr = 0.00364132
I0409 22:02:49.501374 25006 solver.cpp:218] Iteration 5112 (2.81954 iter/s, 4.25601s/12 iters), loss = 0.409853
I0409 22:02:49.501515 25006 solver.cpp:237] Train net output #0: loss = 0.409853 (* 1 = 0.409853 loss)
I0409 22:02:49.501528 25006 sgd_solver.cpp:105] Iteration 5112, lr = 0.00363268
I0409 22:02:54.465070 25006 solver.cpp:218] Iteration 5124 (2.41772 iter/s, 4.96335s/12 iters), loss = 0.349424
I0409 22:02:54.465112 25006 solver.cpp:237] Train net output #0: loss = 0.349424 (* 1 = 0.349424 loss)
I0409 22:02:54.465121 25006 sgd_solver.cpp:105] Iteration 5124, lr = 0.00362405
I0409 22:02:59.479997 25006 solver.cpp:218] Iteration 5136 (2.39298 iter/s, 5.01467s/12 iters), loss = 0.613858
I0409 22:02:59.480054 25006 solver.cpp:237] Train net output #0: loss = 0.613858 (* 1 = 0.613858 loss)
I0409 22:02:59.480067 25006 sgd_solver.cpp:105] Iteration 5136, lr = 0.00361545
I0409 22:03:04.451086 25006 solver.cpp:218] Iteration 5148 (2.41409 iter/s, 4.97082s/12 iters), loss = 0.453122
I0409 22:03:04.451141 25006 solver.cpp:237] Train net output #0: loss = 0.453122 (* 1 = 0.453122 loss)
I0409 22:03:04.451155 25006 sgd_solver.cpp:105] Iteration 5148, lr = 0.00360687
I0409 22:03:08.469871 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:03:09.408917 25006 solver.cpp:218] Iteration 5160 (2.42055 iter/s, 4.95756s/12 iters), loss = 0.607775
I0409 22:03:09.408974 25006 solver.cpp:237] Train net output #0: loss = 0.607775 (* 1 = 0.607775 loss)
I0409 22:03:09.408987 25006 sgd_solver.cpp:105] Iteration 5160, lr = 0.0035983
I0409 22:03:14.363672 25006 solver.cpp:218] Iteration 5172 (2.42205 iter/s, 4.95448s/12 iters), loss = 0.80446
I0409 22:03:14.363726 25006 solver.cpp:237] Train net output #0: loss = 0.80446 (* 1 = 0.80446 loss)
I0409 22:03:14.363739 25006 sgd_solver.cpp:105] Iteration 5172, lr = 0.00358976
I0409 22:03:19.301328 25006 solver.cpp:218] Iteration 5184 (2.43043 iter/s, 4.93739s/12 iters), loss = 0.532447
I0409 22:03:19.301374 25006 solver.cpp:237] Train net output #0: loss = 0.532447 (* 1 = 0.532447 loss)
I0409 22:03:19.301386 25006 sgd_solver.cpp:105] Iteration 5184, lr = 0.00358124
I0409 22:03:24.338065 25006 solver.cpp:218] Iteration 5196 (2.38262 iter/s, 5.03648s/12 iters), loss = 0.553223
I0409 22:03:24.338135 25006 solver.cpp:237] Train net output #0: loss = 0.553223 (* 1 = 0.553223 loss)
I0409 22:03:24.338143 25006 sgd_solver.cpp:105] Iteration 5196, lr = 0.00357273
I0409 22:03:26.547194 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5202.caffemodel
I0409 22:03:29.747040 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5202.solverstate
I0409 22:03:32.788631 25006 solver.cpp:330] Iteration 5202, Testing net (#0)
I0409 22:03:32.788661 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:03:35.218575 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:03:37.310027 25006 solver.cpp:397] Test net output #0: accuracy = 0.20527
I0409 22:03:37.310056 25006 solver.cpp:397] Test net output #1: loss = 5.66218 (* 1 = 5.66218 loss)
I0409 22:03:39.114756 25006 solver.cpp:218] Iteration 5208 (0.812127 iter/s, 14.776s/12 iters), loss = 0.725371
I0409 22:03:39.114804 25006 solver.cpp:237] Train net output #0: loss = 0.725371 (* 1 = 0.725371 loss)
I0409 22:03:39.114814 25006 sgd_solver.cpp:105] Iteration 5208, lr = 0.00356425
I0409 22:03:44.071494 25006 solver.cpp:218] Iteration 5220 (2.42108 iter/s, 4.95647s/12 iters), loss = 0.571272
I0409 22:03:44.071557 25006 solver.cpp:237] Train net output #0: loss = 0.571272 (* 1 = 0.571272 loss)
I0409 22:03:44.071568 25006 sgd_solver.cpp:105] Iteration 5220, lr = 0.00355579
I0409 22:03:49.182121 25006 solver.cpp:218] Iteration 5232 (2.34818 iter/s, 5.11035s/12 iters), loss = 0.426237
I0409 22:03:49.182175 25006 solver.cpp:237] Train net output #0: loss = 0.426236 (* 1 = 0.426236 loss)
I0409 22:03:49.182188 25006 sgd_solver.cpp:105] Iteration 5232, lr = 0.00354735
I0409 22:03:54.214730 25006 solver.cpp:218] Iteration 5244 (2.38458 iter/s, 5.03234s/12 iters), loss = 0.407462
I0409 22:03:54.214777 25006 solver.cpp:237] Train net output #0: loss = 0.407462 (* 1 = 0.407462 loss)
I0409 22:03:54.214787 25006 sgd_solver.cpp:105] Iteration 5244, lr = 0.00353892
I0409 22:03:59.304666 25006 solver.cpp:218] Iteration 5256 (2.35772 iter/s, 5.08967s/12 iters), loss = 0.533853
I0409 22:03:59.304811 25006 solver.cpp:237] Train net output #0: loss = 0.533852 (* 1 = 0.533852 loss)
I0409 22:03:59.304822 25006 sgd_solver.cpp:105] Iteration 5256, lr = 0.00353052
I0409 22:04:00.594646 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:04:04.463111 25006 solver.cpp:218] Iteration 5268 (2.32645 iter/s, 5.15808s/12 iters), loss = 0.489543
I0409 22:04:04.463163 25006 solver.cpp:237] Train net output #0: loss = 0.489543 (* 1 = 0.489543 loss)
I0409 22:04:04.463177 25006 sgd_solver.cpp:105] Iteration 5268, lr = 0.00352214
I0409 22:04:09.465013 25006 solver.cpp:218] Iteration 5280 (2.39921 iter/s, 5.00164s/12 iters), loss = 0.558785
I0409 22:04:09.465061 25006 solver.cpp:237] Train net output #0: loss = 0.558785 (* 1 = 0.558785 loss)
I0409 22:04:09.465070 25006 sgd_solver.cpp:105] Iteration 5280, lr = 0.00351378
I0409 22:04:14.399175 25006 solver.cpp:218] Iteration 5292 (2.43216 iter/s, 4.9339s/12 iters), loss = 0.53757
I0409 22:04:14.399222 25006 solver.cpp:237] Train net output #0: loss = 0.53757 (* 1 = 0.53757 loss)
I0409 22:04:14.399231 25006 sgd_solver.cpp:105] Iteration 5292, lr = 0.00350544
I0409 22:04:19.000042 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5304.caffemodel
I0409 22:04:23.141824 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5304.solverstate
I0409 22:04:27.831959 25006 solver.cpp:330] Iteration 5304, Testing net (#0)
I0409 22:04:27.831987 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:04:30.214529 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:04:32.452169 25006 solver.cpp:397] Test net output #0: accuracy = 0.190564
I0409 22:04:32.452216 25006 solver.cpp:397] Test net output #1: loss = 5.47035 (* 1 = 5.47035 loss)
I0409 22:04:32.541265 25006 solver.cpp:218] Iteration 5304 (0.661474 iter/s, 18.1413s/12 iters), loss = 0.655217
I0409 22:04:32.541340 25006 solver.cpp:237] Train net output #0: loss = 0.655217 (* 1 = 0.655217 loss)
I0409 22:04:32.541359 25006 sgd_solver.cpp:105] Iteration 5304, lr = 0.00349711
I0409 22:04:36.862998 25006 solver.cpp:218] Iteration 5316 (2.77683 iter/s, 4.32148s/12 iters), loss = 0.582885
I0409 22:04:36.863044 25006 solver.cpp:237] Train net output #0: loss = 0.582885 (* 1 = 0.582885 loss)
I0409 22:04:36.863055 25006 sgd_solver.cpp:105] Iteration 5316, lr = 0.00348881
I0409 22:04:41.923185 25006 solver.cpp:218] Iteration 5328 (2.37158 iter/s, 5.05992s/12 iters), loss = 0.867847
I0409 22:04:41.923245 25006 solver.cpp:237] Train net output #0: loss = 0.867847 (* 1 = 0.867847 loss)
I0409 22:04:41.923257 25006 sgd_solver.cpp:105] Iteration 5328, lr = 0.00348053
I0409 22:04:46.914949 25006 solver.cpp:218] Iteration 5340 (2.40409 iter/s, 4.99149s/12 iters), loss = 0.388703
I0409 22:04:46.915002 25006 solver.cpp:237] Train net output #0: loss = 0.388703 (* 1 = 0.388703 loss)
I0409 22:04:46.915015 25006 sgd_solver.cpp:105] Iteration 5340, lr = 0.00347226
I0409 22:04:51.978821 25006 solver.cpp:218] Iteration 5352 (2.36986 iter/s, 5.06359s/12 iters), loss = 0.448283
I0409 22:04:51.978876 25006 solver.cpp:237] Train net output #0: loss = 0.448283 (* 1 = 0.448283 loss)
I0409 22:04:51.978889 25006 sgd_solver.cpp:105] Iteration 5352, lr = 0.00346402
I0409 22:04:55.445166 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:04:57.113135 25006 solver.cpp:218] Iteration 5364 (2.33734 iter/s, 5.13404s/12 iters), loss = 0.45846
I0409 22:04:57.113186 25006 solver.cpp:237] Train net output #0: loss = 0.45846 (* 1 = 0.45846 loss)
I0409 22:04:57.113200 25006 sgd_solver.cpp:105] Iteration 5364, lr = 0.0034558
I0409 22:05:02.085088 25006 solver.cpp:218] Iteration 5376 (2.41367 iter/s, 4.97168s/12 iters), loss = 0.622666
I0409 22:05:02.085253 25006 solver.cpp:237] Train net output #0: loss = 0.622666 (* 1 = 0.622666 loss)
I0409 22:05:02.085268 25006 sgd_solver.cpp:105] Iteration 5376, lr = 0.00344759
I0409 22:05:07.090812 25006 solver.cpp:218] Iteration 5388 (2.39744 iter/s, 5.00535s/12 iters), loss = 0.586709
I0409 22:05:07.090865 25006 solver.cpp:237] Train net output #0: loss = 0.586709 (* 1 = 0.586709 loss)
I0409 22:05:07.090878 25006 sgd_solver.cpp:105] Iteration 5388, lr = 0.00343941
I0409 22:05:12.039850 25006 solver.cpp:218] Iteration 5400 (2.42484 iter/s, 4.94878s/12 iters), loss = 0.675353
I0409 22:05:12.039898 25006 solver.cpp:237] Train net output #0: loss = 0.675353 (* 1 = 0.675353 loss)
I0409 22:05:12.039909 25006 sgd_solver.cpp:105] Iteration 5400, lr = 0.00343124
I0409 22:05:14.101994 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5406.caffemodel
I0409 22:05:17.620579 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5406.solverstate
I0409 22:05:19.587512 25006 solver.cpp:330] Iteration 5406, Testing net (#0)
I0409 22:05:19.587533 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:05:21.938100 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:05:24.131358 25006 solver.cpp:397] Test net output #0: accuracy = 0.211397
I0409 22:05:24.131397 25006 solver.cpp:397] Test net output #1: loss = 5.40029 (* 1 = 5.40029 loss)
I0409 22:05:25.844187 25006 solver.cpp:218] Iteration 5412 (0.869331 iter/s, 13.8037s/12 iters), loss = 0.546257
I0409 22:05:25.844239 25006 solver.cpp:237] Train net output #0: loss = 0.546257 (* 1 = 0.546257 loss)
I0409 22:05:25.844249 25006 sgd_solver.cpp:105] Iteration 5412, lr = 0.00342309
I0409 22:05:30.900002 25006 solver.cpp:218] Iteration 5424 (2.37363 iter/s, 5.05554s/12 iters), loss = 0.45264
I0409 22:05:30.900051 25006 solver.cpp:237] Train net output #0: loss = 0.45264 (* 1 = 0.45264 loss)
I0409 22:05:30.900063 25006 sgd_solver.cpp:105] Iteration 5424, lr = 0.00341497
I0409 22:05:36.119855 25006 solver.cpp:218] Iteration 5436 (2.29903 iter/s, 5.21958s/12 iters), loss = 0.490433
I0409 22:05:36.119936 25006 solver.cpp:237] Train net output #0: loss = 0.490433 (* 1 = 0.490433 loss)
I0409 22:05:36.119949 25006 sgd_solver.cpp:105] Iteration 5436, lr = 0.00340686
I0409 22:05:41.406242 25006 solver.cpp:218] Iteration 5448 (2.27011 iter/s, 5.28608s/12 iters), loss = 0.435543
I0409 22:05:41.406296 25006 solver.cpp:237] Train net output #0: loss = 0.435543 (* 1 = 0.435543 loss)
I0409 22:05:41.406308 25006 sgd_solver.cpp:105] Iteration 5448, lr = 0.00339877
I0409 22:05:46.742053 25006 solver.cpp:218] Iteration 5460 (2.24908 iter/s, 5.33552s/12 iters), loss = 0.526452
I0409 22:05:46.742115 25006 solver.cpp:237] Train net output #0: loss = 0.526452 (* 1 = 0.526452 loss)
I0409 22:05:46.742126 25006 sgd_solver.cpp:105] Iteration 5460, lr = 0.0033907
I0409 22:05:47.291855 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:05:51.883904 25006 solver.cpp:218] Iteration 5472 (2.33392 iter/s, 5.14157s/12 iters), loss = 0.489696
I0409 22:05:51.883949 25006 solver.cpp:237] Train net output #0: loss = 0.489696 (* 1 = 0.489696 loss)
I0409 22:05:51.883957 25006 sgd_solver.cpp:105] Iteration 5472, lr = 0.00338265
I0409 22:05:56.801985 25006 solver.cpp:218] Iteration 5484 (2.44011 iter/s, 4.9178s/12 iters), loss = 0.528281
I0409 22:05:56.802042 25006 solver.cpp:237] Train net output #0: loss = 0.528281 (* 1 = 0.528281 loss)
I0409 22:05:56.802054 25006 sgd_solver.cpp:105] Iteration 5484, lr = 0.00337462
I0409 22:06:01.711112 25006 solver.cpp:218] Iteration 5496 (2.44456 iter/s, 4.90886s/12 iters), loss = 0.408451
I0409 22:06:01.711156 25006 solver.cpp:237] Train net output #0: loss = 0.408451 (* 1 = 0.408451 loss)
I0409 22:06:01.711165 25006 sgd_solver.cpp:105] Iteration 5496, lr = 0.00336661
I0409 22:06:06.199542 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5508.caffemodel
I0409 22:06:08.408238 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5508.solverstate
I0409 22:06:10.052953 25006 solver.cpp:330] Iteration 5508, Testing net (#0)
I0409 22:06:10.052980 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:06:12.352165 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:06:14.537294 25006 solver.cpp:397] Test net output #0: accuracy = 0.213235
I0409 22:06:14.537326 25006 solver.cpp:397] Test net output #1: loss = 5.58188 (* 1 = 5.58188 loss)
I0409 22:06:14.622702 25006 solver.cpp:218] Iteration 5508 (0.929439 iter/s, 12.911s/12 iters), loss = 0.659472
I0409 22:06:14.622756 25006 solver.cpp:237] Train net output #0: loss = 0.659472 (* 1 = 0.659472 loss)
I0409 22:06:14.622767 25006 sgd_solver.cpp:105] Iteration 5508, lr = 0.00335861
I0409 22:06:18.930581 25006 solver.cpp:218] Iteration 5520 (2.78575 iter/s, 4.30764s/12 iters), loss = 0.431571
I0409 22:06:18.930632 25006 solver.cpp:237] Train net output #0: loss = 0.431571 (* 1 = 0.431571 loss)
I0409 22:06:18.930646 25006 sgd_solver.cpp:105] Iteration 5520, lr = 0.00335064
I0409 22:06:21.433027 25006 blocking_queue.cpp:49] Waiting for data
I0409 22:06:24.101821 25006 solver.cpp:218] Iteration 5532 (2.32065 iter/s, 5.17097s/12 iters), loss = 0.335276
I0409 22:06:24.101876 25006 solver.cpp:237] Train net output #0: loss = 0.335276 (* 1 = 0.335276 loss)
I0409 22:06:24.101887 25006 sgd_solver.cpp:105] Iteration 5532, lr = 0.00334268
I0409 22:06:29.295503 25006 solver.cpp:218] Iteration 5544 (2.31062 iter/s, 5.19341s/12 iters), loss = 0.579703
I0409 22:06:29.295542 25006 solver.cpp:237] Train net output #0: loss = 0.579703 (* 1 = 0.579703 loss)
I0409 22:06:29.295552 25006 sgd_solver.cpp:105] Iteration 5544, lr = 0.00333475
I0409 22:06:34.541788 25006 solver.cpp:218] Iteration 5556 (2.28745 iter/s, 5.24601s/12 iters), loss = 0.349963
I0409 22:06:34.541846 25006 solver.cpp:237] Train net output #0: loss = 0.349963 (* 1 = 0.349963 loss)
I0409 22:06:34.541858 25006 sgd_solver.cpp:105] Iteration 5556, lr = 0.00332683
I0409 22:06:37.211726 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:06:39.462738 25006 solver.cpp:218] Iteration 5568 (2.43869 iter/s, 4.92068s/12 iters), loss = 0.547236
I0409 22:06:39.462790 25006 solver.cpp:237] Train net output #0: loss = 0.547236 (* 1 = 0.547236 loss)
I0409 22:06:39.462803 25006 sgd_solver.cpp:105] Iteration 5568, lr = 0.00331893
I0409 22:06:44.354562 25006 solver.cpp:218] Iteration 5580 (2.4532 iter/s, 4.89156s/12 iters), loss = 0.601479
I0409 22:06:44.354614 25006 solver.cpp:237] Train net output #0: loss = 0.601479 (* 1 = 0.601479 loss)
I0409 22:06:44.354629 25006 sgd_solver.cpp:105] Iteration 5580, lr = 0.00331105
I0409 22:06:49.394768 25006 solver.cpp:218] Iteration 5592 (2.38098 iter/s, 5.03994s/12 iters), loss = 0.457191
I0409 22:06:49.394814 25006 solver.cpp:237] Train net output #0: loss = 0.457191 (* 1 = 0.457191 loss)
I0409 22:06:49.394822 25006 sgd_solver.cpp:105] Iteration 5592, lr = 0.00330319
I0409 22:06:54.423291 25006 solver.cpp:218] Iteration 5604 (2.38651 iter/s, 5.02825s/12 iters), loss = 0.547075
I0409 22:06:54.423341 25006 solver.cpp:237] Train net output #0: loss = 0.547075 (* 1 = 0.547075 loss)
I0409 22:06:54.423350 25006 sgd_solver.cpp:105] Iteration 5604, lr = 0.00329535
I0409 22:06:56.463025 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5610.caffemodel
I0409 22:07:03.484494 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5610.solverstate
I0409 22:07:06.667253 25006 solver.cpp:330] Iteration 5610, Testing net (#0)
I0409 22:07:06.667279 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:07:08.991536 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:07:11.270179 25006 solver.cpp:397] Test net output #0: accuracy = 0.221201
I0409 22:07:11.270226 25006 solver.cpp:397] Test net output #1: loss = 5.72365 (* 1 = 5.72365 loss)
I0409 22:07:13.225984 25006 solver.cpp:218] Iteration 5616 (0.638235 iter/s, 18.8019s/12 iters), loss = 0.494077
I0409 22:07:13.226025 25006 solver.cpp:237] Train net output #0: loss = 0.494077 (* 1 = 0.494077 loss)
I0409 22:07:13.226034 25006 sgd_solver.cpp:105] Iteration 5616, lr = 0.00328752
I0409 22:07:18.212608 25006 solver.cpp:218] Iteration 5628 (2.40656 iter/s, 4.98636s/12 iters), loss = 0.244582
I0409 22:07:18.212671 25006 solver.cpp:237] Train net output #0: loss = 0.244582 (* 1 = 0.244582 loss)
I0409 22:07:18.212685 25006 sgd_solver.cpp:105] Iteration 5628, lr = 0.00327972
I0409 22:07:23.100793 25006 solver.cpp:218] Iteration 5640 (2.45504 iter/s, 4.88791s/12 iters), loss = 0.596497
I0409 22:07:23.100852 25006 solver.cpp:237] Train net output #0: loss = 0.596497 (* 1 = 0.596497 loss)
I0409 22:07:23.100864 25006 sgd_solver.cpp:105] Iteration 5640, lr = 0.00327193
I0409 22:07:28.124946 25006 solver.cpp:218] Iteration 5652 (2.38859 iter/s, 5.02388s/12 iters), loss = 0.435545
I0409 22:07:28.125005 25006 solver.cpp:237] Train net output #0: loss = 0.435545 (* 1 = 0.435545 loss)
I0409 22:07:28.125018 25006 sgd_solver.cpp:105] Iteration 5652, lr = 0.00326416
I0409 22:07:33.019183 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:07:33.189703 25006 solver.cpp:218] Iteration 5664 (2.36944 iter/s, 5.06448s/12 iters), loss = 0.469092
I0409 22:07:33.189755 25006 solver.cpp:237] Train net output #0: loss = 0.469092 (* 1 = 0.469092 loss)
I0409 22:07:33.189767 25006 sgd_solver.cpp:105] Iteration 5664, lr = 0.00325641
I0409 22:07:38.284442 25006 solver.cpp:218] Iteration 5676 (2.3555 iter/s, 5.09446s/12 iters), loss = 0.474554
I0409 22:07:38.284494 25006 solver.cpp:237] Train net output #0: loss = 0.474554 (* 1 = 0.474554 loss)
I0409 22:07:38.284507 25006 sgd_solver.cpp:105] Iteration 5676, lr = 0.00324868
I0409 22:07:43.210722 25006 solver.cpp:218] Iteration 5688 (2.43604 iter/s, 4.92602s/12 iters), loss = 0.482074
I0409 22:07:43.210799 25006 solver.cpp:237] Train net output #0: loss = 0.482074 (* 1 = 0.482074 loss)
I0409 22:07:43.210809 25006 sgd_solver.cpp:105] Iteration 5688, lr = 0.00324097
I0409 22:07:48.198145 25006 solver.cpp:218] Iteration 5700 (2.40619 iter/s, 4.98713s/12 iters), loss = 0.326604
I0409 22:07:48.198196 25006 solver.cpp:237] Train net output #0: loss = 0.326604 (* 1 = 0.326604 loss)
I0409 22:07:48.198208 25006 sgd_solver.cpp:105] Iteration 5700, lr = 0.00323328
I0409 22:07:52.869300 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5712.caffemodel
I0409 22:07:57.601231 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5712.solverstate
I0409 22:08:03.545189 25006 solver.cpp:330] Iteration 5712, Testing net (#0)
I0409 22:08:03.545220 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:08:05.678881 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:08:07.926146 25006 solver.cpp:397] Test net output #0: accuracy = 0.228554
I0409 22:08:07.926195 25006 solver.cpp:397] Test net output #1: loss = 5.46988 (* 1 = 5.46988 loss)
I0409 22:08:08.015173 25006 solver.cpp:218] Iteration 5712 (0.605566 iter/s, 19.8162s/12 iters), loss = 0.44424
I0409 22:08:08.015225 25006 solver.cpp:237] Train net output #0: loss = 0.44424 (* 1 = 0.44424 loss)
I0409 22:08:08.015237 25006 sgd_solver.cpp:105] Iteration 5712, lr = 0.0032256
I0409 22:08:12.329680 25006 solver.cpp:218] Iteration 5724 (2.78147 iter/s, 4.31426s/12 iters), loss = 0.324718
I0409 22:08:12.329733 25006 solver.cpp:237] Train net output #0: loss = 0.324718 (* 1 = 0.324718 loss)
I0409 22:08:12.329746 25006 sgd_solver.cpp:105] Iteration 5724, lr = 0.00321794
I0409 22:08:17.277011 25006 solver.cpp:218] Iteration 5736 (2.42568 iter/s, 4.94706s/12 iters), loss = 0.400701
I0409 22:08:17.277154 25006 solver.cpp:237] Train net output #0: loss = 0.400701 (* 1 = 0.400701 loss)
I0409 22:08:17.277168 25006 sgd_solver.cpp:105] Iteration 5736, lr = 0.0032103
I0409 22:08:22.408540 25006 solver.cpp:218] Iteration 5748 (2.33865 iter/s, 5.13117s/12 iters), loss = 0.526352
I0409 22:08:22.408592 25006 solver.cpp:237] Train net output #0: loss = 0.526352 (* 1 = 0.526352 loss)
I0409 22:08:22.408607 25006 sgd_solver.cpp:105] Iteration 5748, lr = 0.00320268
I0409 22:08:27.426497 25006 solver.cpp:218] Iteration 5760 (2.39154 iter/s, 5.01769s/12 iters), loss = 0.438079
I0409 22:08:27.426542 25006 solver.cpp:237] Train net output #0: loss = 0.438079 (* 1 = 0.438079 loss)
I0409 22:08:27.426553 25006 sgd_solver.cpp:105] Iteration 5760, lr = 0.00319508
I0409 22:08:29.423915 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:08:32.775692 25006 solver.cpp:218] Iteration 5772 (2.24345 iter/s, 5.34892s/12 iters), loss = 0.419209
I0409 22:08:32.775738 25006 solver.cpp:237] Train net output #0: loss = 0.419209 (* 1 = 0.419209 loss)
I0409 22:08:32.775748 25006 sgd_solver.cpp:105] Iteration 5772, lr = 0.00318749
I0409 22:08:37.965377 25006 solver.cpp:218] Iteration 5784 (2.3124 iter/s, 5.18941s/12 iters), loss = 0.415741
I0409 22:08:37.965430 25006 solver.cpp:237] Train net output #0: loss = 0.415741 (* 1 = 0.415741 loss)
I0409 22:08:37.965445 25006 sgd_solver.cpp:105] Iteration 5784, lr = 0.00317992
I0409 22:08:42.899129 25006 solver.cpp:218] Iteration 5796 (2.43236 iter/s, 4.93349s/12 iters), loss = 0.306536
I0409 22:08:42.899173 25006 solver.cpp:237] Train net output #0: loss = 0.306536 (* 1 = 0.306536 loss)
I0409 22:08:42.899181 25006 sgd_solver.cpp:105] Iteration 5796, lr = 0.00317237
I0409 22:08:47.959013 25006 solver.cpp:218] Iteration 5808 (2.37172 iter/s, 5.05962s/12 iters), loss = 0.541893
I0409 22:08:47.959120 25006 solver.cpp:237] Train net output #0: loss = 0.541893 (* 1 = 0.541893 loss)
I0409 22:08:47.959132 25006 sgd_solver.cpp:105] Iteration 5808, lr = 0.00316484
I0409 22:08:49.968612 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5814.caffemodel
I0409 22:08:52.115569 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5814.solverstate
I0409 22:08:53.750442 25006 solver.cpp:330] Iteration 5814, Testing net (#0)
I0409 22:08:53.750473 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:08:55.797160 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:08:58.097312 25006 solver.cpp:397] Test net output #0: accuracy = 0.237132
I0409 22:08:58.097358 25006 solver.cpp:397] Test net output #1: loss = 5.47074 (* 1 = 5.47074 loss)
I0409 22:08:59.891858 25006 solver.cpp:218] Iteration 5820 (1.00568 iter/s, 11.9322s/12 iters), loss = 0.832817
I0409 22:08:59.891918 25006 solver.cpp:237] Train net output #0: loss = 0.832817 (* 1 = 0.832817 loss)
I0409 22:08:59.891932 25006 sgd_solver.cpp:105] Iteration 5820, lr = 0.00315733
I0409 22:09:04.864764 25006 solver.cpp:218] Iteration 5832 (2.41321 iter/s, 4.97263s/12 iters), loss = 0.378226
I0409 22:09:04.864817 25006 solver.cpp:237] Train net output #0: loss = 0.378225 (* 1 = 0.378225 loss)
I0409 22:09:04.864830 25006 sgd_solver.cpp:105] Iteration 5832, lr = 0.00314983
I0409 22:09:09.932603 25006 solver.cpp:218] Iteration 5844 (2.368 iter/s, 5.06757s/12 iters), loss = 0.310239
I0409 22:09:09.932649 25006 solver.cpp:237] Train net output #0: loss = 0.310239 (* 1 = 0.310239 loss)
I0409 22:09:09.932660 25006 sgd_solver.cpp:105] Iteration 5844, lr = 0.00314235
I0409 22:09:15.105201 25006 solver.cpp:218] Iteration 5856 (2.32004 iter/s, 5.17232s/12 iters), loss = 0.271724
I0409 22:09:15.105265 25006 solver.cpp:237] Train net output #0: loss = 0.271724 (* 1 = 0.271724 loss)
I0409 22:09:15.105283 25006 sgd_solver.cpp:105] Iteration 5856, lr = 0.00313489
I0409 22:09:19.380432 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:09:20.191666 25006 solver.cpp:218] Iteration 5868 (2.35933 iter/s, 5.08618s/12 iters), loss = 0.521401
I0409 22:09:20.191722 25006 solver.cpp:237] Train net output #0: loss = 0.521401 (* 1 = 0.521401 loss)
I0409 22:09:20.191735 25006 sgd_solver.cpp:105] Iteration 5868, lr = 0.00312745
I0409 22:09:25.186316 25006 solver.cpp:218] Iteration 5880 (2.4027 iter/s, 4.99438s/12 iters), loss = 0.504196
I0409 22:09:25.186370 25006 solver.cpp:237] Train net output #0: loss = 0.504196 (* 1 = 0.504196 loss)
I0409 22:09:25.186383 25006 sgd_solver.cpp:105] Iteration 5880, lr = 0.00312002
I0409 22:09:30.234721 25006 solver.cpp:218] Iteration 5892 (2.37712 iter/s, 5.04812s/12 iters), loss = 0.37566
I0409 22:09:30.234792 25006 solver.cpp:237] Train net output #0: loss = 0.37566 (* 1 = 0.37566 loss)
I0409 22:09:30.234808 25006 sgd_solver.cpp:105] Iteration 5892, lr = 0.00311262
I0409 22:09:35.243660 25006 solver.cpp:218] Iteration 5904 (2.39585 iter/s, 5.00866s/12 iters), loss = 0.443555
I0409 22:09:35.243710 25006 solver.cpp:237] Train net output #0: loss = 0.443555 (* 1 = 0.443555 loss)
I0409 22:09:35.243718 25006 sgd_solver.cpp:105] Iteration 5904, lr = 0.00310523
I0409 22:09:39.766479 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_5916.caffemodel
I0409 22:09:41.958552 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_5916.solverstate
I0409 22:09:43.596252 25006 solver.cpp:330] Iteration 5916, Testing net (#0)
I0409 22:09:43.596278 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:09:45.640204 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:09:48.031781 25006 solver.cpp:397] Test net output #0: accuracy = 0.238971
I0409 22:09:48.031831 25006 solver.cpp:397] Test net output #1: loss = 5.52489 (* 1 = 5.52489 loss)
I0409 22:09:48.120965 25006 solver.cpp:218] Iteration 5916 (0.931914 iter/s, 12.8767s/12 iters), loss = 0.412886
I0409 22:09:48.121480 25006 solver.cpp:237] Train net output #0: loss = 0.412886 (* 1 = 0.412886 loss)
I0409 22:09:48.121493 25006 sgd_solver.cpp:105] Iteration 5916, lr = 0.00309785
I0409 22:09:52.326292 25006 solver.cpp:218] Iteration 5928 (2.854 iter/s, 4.20463s/12 iters), loss = 0.282225
I0409 22:09:52.326386 25006 solver.cpp:237] Train net output #0: loss = 0.282225 (* 1 = 0.282225 loss)
I0409 22:09:52.326400 25006 sgd_solver.cpp:105] Iteration 5928, lr = 0.0030905
I0409 22:09:57.432039 25006 solver.cpp:218] Iteration 5940 (2.35044 iter/s, 5.10543s/12 iters), loss = 0.309298
I0409 22:09:57.432094 25006 solver.cpp:237] Train net output #0: loss = 0.309298 (* 1 = 0.309298 loss)
I0409 22:09:57.432106 25006 sgd_solver.cpp:105] Iteration 5940, lr = 0.00308316
I0409 22:10:02.377526 25006 solver.cpp:218] Iteration 5952 (2.42659 iter/s, 4.94522s/12 iters), loss = 0.362163
I0409 22:10:02.377579 25006 solver.cpp:237] Train net output #0: loss = 0.362163 (* 1 = 0.362163 loss)
I0409 22:10:02.377590 25006 sgd_solver.cpp:105] Iteration 5952, lr = 0.00307584
I0409 22:10:07.528102 25006 solver.cpp:218] Iteration 5964 (2.32996 iter/s, 5.1503s/12 iters), loss = 0.36617
I0409 22:10:07.528156 25006 solver.cpp:237] Train net output #0: loss = 0.36617 (* 1 = 0.36617 loss)
I0409 22:10:07.528167 25006 sgd_solver.cpp:105] Iteration 5964, lr = 0.00306854
I0409 22:10:08.887208 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:10:12.565261 25006 solver.cpp:218] Iteration 5976 (2.38242 iter/s, 5.03689s/12 iters), loss = 0.224922
I0409 22:10:12.565317 25006 solver.cpp:237] Train net output #0: loss = 0.224922 (* 1 = 0.224922 loss)
I0409 22:10:12.565330 25006 sgd_solver.cpp:105] Iteration 5976, lr = 0.00306125
I0409 22:10:17.565332 25006 solver.cpp:218] Iteration 5988 (2.4001 iter/s, 4.99979s/12 iters), loss = 0.455409
I0409 22:10:17.565392 25006 solver.cpp:237] Train net output #0: loss = 0.455409 (* 1 = 0.455409 loss)
I0409 22:10:17.565403 25006 sgd_solver.cpp:105] Iteration 5988, lr = 0.00305398
I0409 22:10:22.452244 25006 solver.cpp:218] Iteration 6000 (2.45567 iter/s, 4.88664s/12 iters), loss = 0.385961
I0409 22:10:22.452351 25006 solver.cpp:237] Train net output #0: loss = 0.385961 (* 1 = 0.385961 loss)
I0409 22:10:22.452363 25006 sgd_solver.cpp:105] Iteration 6000, lr = 0.00304673
I0409 22:10:27.323168 25006 solver.cpp:218] Iteration 6012 (2.46376 iter/s, 4.8706s/12 iters), loss = 0.341512
I0409 22:10:27.323236 25006 solver.cpp:237] Train net output #0: loss = 0.341512 (* 1 = 0.341512 loss)
I0409 22:10:27.323248 25006 sgd_solver.cpp:105] Iteration 6012, lr = 0.0030395
I0409 22:10:29.303611 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6018.caffemodel
I0409 22:10:34.874696 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6018.solverstate
I0409 22:10:40.593919 25006 solver.cpp:330] Iteration 6018, Testing net (#0)
I0409 22:10:40.593952 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:10:42.712080 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:10:45.105299 25006 solver.cpp:397] Test net output #0: accuracy = 0.238358
I0409 22:10:45.105347 25006 solver.cpp:397] Test net output #1: loss = 5.53914 (* 1 = 5.53914 loss)
I0409 22:10:47.060891 25006 solver.cpp:218] Iteration 6024 (0.608 iter/s, 19.7368s/12 iters), loss = 0.366403
I0409 22:10:47.060945 25006 solver.cpp:237] Train net output #0: loss = 0.366403 (* 1 = 0.366403 loss)
I0409 22:10:47.060959 25006 sgd_solver.cpp:105] Iteration 6024, lr = 0.00303228
I0409 22:10:52.254539 25006 solver.cpp:218] Iteration 6036 (2.31064 iter/s, 5.19337s/12 iters), loss = 0.43291
I0409 22:10:52.254598 25006 solver.cpp:237] Train net output #0: loss = 0.43291 (* 1 = 0.43291 loss)
I0409 22:10:52.254611 25006 sgd_solver.cpp:105] Iteration 6036, lr = 0.00302508
I0409 22:10:57.226577 25006 solver.cpp:218] Iteration 6048 (2.41363 iter/s, 4.97176s/12 iters), loss = 0.332657
I0409 22:10:57.226662 25006 solver.cpp:237] Train net output #0: loss = 0.332657 (* 1 = 0.332657 loss)
I0409 22:10:57.226672 25006 sgd_solver.cpp:105] Iteration 6048, lr = 0.0030179
I0409 22:11:02.441121 25006 solver.cpp:218] Iteration 6060 (2.30139 iter/s, 5.21423s/12 iters), loss = 0.335027
I0409 22:11:02.441174 25006 solver.cpp:237] Train net output #0: loss = 0.335027 (* 1 = 0.335027 loss)
I0409 22:11:02.441186 25006 sgd_solver.cpp:105] Iteration 6060, lr = 0.00301074
I0409 22:11:05.999120 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:11:07.551141 25006 solver.cpp:218] Iteration 6072 (2.34845 iter/s, 5.10975s/12 iters), loss = 0.363986
I0409 22:11:07.551192 25006 solver.cpp:237] Train net output #0: loss = 0.363986 (* 1 = 0.363986 loss)
I0409 22:11:07.551204 25006 sgd_solver.cpp:105] Iteration 6072, lr = 0.00300359
I0409 22:11:12.581535 25006 solver.cpp:218] Iteration 6084 (2.38563 iter/s, 5.03013s/12 iters), loss = 0.334663
I0409 22:11:12.581583 25006 solver.cpp:237] Train net output #0: loss = 0.334663 (* 1 = 0.334663 loss)
I0409 22:11:12.581596 25006 sgd_solver.cpp:105] Iteration 6084, lr = 0.00299646
I0409 22:11:17.616024 25006 solver.cpp:218] Iteration 6096 (2.38369 iter/s, 5.03422s/12 iters), loss = 0.302342
I0409 22:11:17.616078 25006 solver.cpp:237] Train net output #0: loss = 0.302342 (* 1 = 0.302342 loss)
I0409 22:11:17.616091 25006 sgd_solver.cpp:105] Iteration 6096, lr = 0.00298934
I0409 22:11:22.657812 25006 solver.cpp:218] Iteration 6108 (2.38024 iter/s, 5.04151s/12 iters), loss = 0.456322
I0409 22:11:22.657864 25006 solver.cpp:237] Train net output #0: loss = 0.456322 (* 1 = 0.456322 loss)
I0409 22:11:22.657876 25006 sgd_solver.cpp:105] Iteration 6108, lr = 0.00298225
I0409 22:11:27.212770 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6120.caffemodel
I0409 22:11:30.198904 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6120.solverstate
I0409 22:11:33.810429 25006 solver.cpp:330] Iteration 6120, Testing net (#0)
I0409 22:11:33.810457 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:11:35.927945 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:11:38.391465 25006 solver.cpp:397] Test net output #0: accuracy = 0.231618
I0409 22:11:38.391513 25006 solver.cpp:397] Test net output #1: loss = 5.45158 (* 1 = 5.45158 loss)
I0409 22:11:38.480590 25006 solver.cpp:218] Iteration 6120 (0.758434 iter/s, 15.8221s/12 iters), loss = 0.29773
I0409 22:11:38.480648 25006 solver.cpp:237] Train net output #0: loss = 0.29773 (* 1 = 0.29773 loss)
I0409 22:11:38.480659 25006 sgd_solver.cpp:105] Iteration 6120, lr = 0.00297517
I0409 22:11:42.947628 25006 solver.cpp:218] Iteration 6132 (2.68649 iter/s, 4.46679s/12 iters), loss = 0.221221
I0409 22:11:42.947679 25006 solver.cpp:237] Train net output #0: loss = 0.221221 (* 1 = 0.221221 loss)
I0409 22:11:42.947692 25006 sgd_solver.cpp:105] Iteration 6132, lr = 0.0029681
I0409 22:11:47.956385 25006 solver.cpp:218] Iteration 6144 (2.39593 iter/s, 5.00849s/12 iters), loss = 0.33054
I0409 22:11:47.956439 25006 solver.cpp:237] Train net output #0: loss = 0.33054 (* 1 = 0.33054 loss)
I0409 22:11:47.956452 25006 sgd_solver.cpp:105] Iteration 6144, lr = 0.00296105
I0409 22:11:52.938700 25006 solver.cpp:218] Iteration 6156 (2.40865 iter/s, 4.98205s/12 iters), loss = 0.427585
I0409 22:11:52.938746 25006 solver.cpp:237] Train net output #0: loss = 0.427585 (* 1 = 0.427585 loss)
I0409 22:11:52.938758 25006 sgd_solver.cpp:105] Iteration 6156, lr = 0.00295402
I0409 22:11:58.100399 25006 solver.cpp:218] Iteration 6168 (2.32494 iter/s, 5.16143s/12 iters), loss = 0.307431
I0409 22:11:58.100455 25006 solver.cpp:237] Train net output #0: loss = 0.30743 (* 1 = 0.30743 loss)
I0409 22:11:58.100466 25006 sgd_solver.cpp:105] Iteration 6168, lr = 0.00294701
I0409 22:11:58.687444 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:12:03.134354 25006 solver.cpp:218] Iteration 6180 (2.38394 iter/s, 5.03368s/12 iters), loss = 0.424587
I0409 22:12:03.134550 25006 solver.cpp:237] Train net output #0: loss = 0.424587 (* 1 = 0.424587 loss)
I0409 22:12:03.134563 25006 sgd_solver.cpp:105] Iteration 6180, lr = 0.00294001
I0409 22:12:08.090101 25006 solver.cpp:218] Iteration 6192 (2.42163 iter/s, 4.95533s/12 iters), loss = 0.352326
I0409 22:12:08.090148 25006 solver.cpp:237] Train net output #0: loss = 0.352326 (* 1 = 0.352326 loss)
I0409 22:12:08.090157 25006 sgd_solver.cpp:105] Iteration 6192, lr = 0.00293303
I0409 22:12:13.121994 25006 solver.cpp:218] Iteration 6204 (2.38492 iter/s, 5.03161s/12 iters), loss = 0.347714
I0409 22:12:13.122043 25006 solver.cpp:237] Train net output #0: loss = 0.347714 (* 1 = 0.347714 loss)
I0409 22:12:13.122054 25006 sgd_solver.cpp:105] Iteration 6204, lr = 0.00292607
I0409 22:12:18.095369 25006 solver.cpp:218] Iteration 6216 (2.41298 iter/s, 4.97311s/12 iters), loss = 0.408465
I0409 22:12:18.095417 25006 solver.cpp:237] Train net output #0: loss = 0.408465 (* 1 = 0.408465 loss)
I0409 22:12:18.095427 25006 sgd_solver.cpp:105] Iteration 6216, lr = 0.00291912
I0409 22:12:20.093770 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6222.caffemodel
I0409 22:12:22.384773 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6222.solverstate
I0409 22:12:24.037060 25006 solver.cpp:330] Iteration 6222, Testing net (#0)
I0409 22:12:24.037086 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:12:26.031354 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:12:27.310675 25006 blocking_queue.cpp:49] Waiting for data
I0409 22:12:28.489253 25006 solver.cpp:397] Test net output #0: accuracy = 0.248775
I0409 22:12:28.489284 25006 solver.cpp:397] Test net output #1: loss = 5.42945 (* 1 = 5.42945 loss)
I0409 22:12:30.447039 25006 solver.cpp:218] Iteration 6228 (0.971573 iter/s, 12.3511s/12 iters), loss = 0.542087
I0409 22:12:30.447098 25006 solver.cpp:237] Train net output #0: loss = 0.542086 (* 1 = 0.542086 loss)
I0409 22:12:30.447111 25006 sgd_solver.cpp:105] Iteration 6228, lr = 0.00291219
I0409 22:12:35.431689 25006 solver.cpp:218] Iteration 6240 (2.40752 iter/s, 4.98438s/12 iters), loss = 0.297829
I0409 22:12:35.431799 25006 solver.cpp:237] Train net output #0: loss = 0.297829 (* 1 = 0.297829 loss)
I0409 22:12:35.431813 25006 sgd_solver.cpp:105] Iteration 6240, lr = 0.00290528
I0409 22:12:40.373240 25006 solver.cpp:218] Iteration 6252 (2.42855 iter/s, 4.94122s/12 iters), loss = 0.293262
I0409 22:12:40.373301 25006 solver.cpp:237] Train net output #0: loss = 0.293262 (* 1 = 0.293262 loss)
I0409 22:12:40.373312 25006 sgd_solver.cpp:105] Iteration 6252, lr = 0.00289838
I0409 22:12:45.375813 25006 solver.cpp:218] Iteration 6264 (2.3989 iter/s, 5.00229s/12 iters), loss = 0.294032
I0409 22:12:45.375859 25006 solver.cpp:237] Train net output #0: loss = 0.294031 (* 1 = 0.294031 loss)
I0409 22:12:45.375869 25006 sgd_solver.cpp:105] Iteration 6264, lr = 0.0028915
I0409 22:12:48.066720 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:12:50.330773 25006 solver.cpp:218] Iteration 6276 (2.42194 iter/s, 4.9547s/12 iters), loss = 0.485233
I0409 22:12:50.330819 25006 solver.cpp:237] Train net output #0: loss = 0.485233 (* 1 = 0.485233 loss)
I0409 22:12:50.330832 25006 sgd_solver.cpp:105] Iteration 6276, lr = 0.00288463
I0409 22:12:55.244055 25006 solver.cpp:218] Iteration 6288 (2.44249 iter/s, 4.91302s/12 iters), loss = 0.268294
I0409 22:12:55.244107 25006 solver.cpp:237] Train net output #0: loss = 0.268294 (* 1 = 0.268294 loss)
I0409 22:12:55.244118 25006 sgd_solver.cpp:105] Iteration 6288, lr = 0.00287779
I0409 22:13:00.249948 25006 solver.cpp:218] Iteration 6300 (2.3973 iter/s, 5.00563s/12 iters), loss = 0.180622
I0409 22:13:00.249998 25006 solver.cpp:237] Train net output #0: loss = 0.180622 (* 1 = 0.180622 loss)
I0409 22:13:00.250008 25006 sgd_solver.cpp:105] Iteration 6300, lr = 0.00287095
I0409 22:13:05.385164 25006 solver.cpp:218] Iteration 6312 (2.33693 iter/s, 5.13495s/12 iters), loss = 0.352138
I0409 22:13:05.385205 25006 solver.cpp:237] Train net output #0: loss = 0.352138 (* 1 = 0.352138 loss)
I0409 22:13:05.385215 25006 sgd_solver.cpp:105] Iteration 6312, lr = 0.00286414
I0409 22:13:09.909440 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6324.caffemodel
I0409 22:13:12.888851 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6324.solverstate
I0409 22:13:14.576072 25006 solver.cpp:330] Iteration 6324, Testing net (#0)
I0409 22:13:14.576093 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:13:16.481573 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:13:18.971570 25006 solver.cpp:397] Test net output #0: accuracy = 0.238971
I0409 22:13:18.971608 25006 solver.cpp:397] Test net output #1: loss = 5.46746 (* 1 = 5.46746 loss)
I0409 22:13:19.060550 25006 solver.cpp:218] Iteration 6324 (0.877528 iter/s, 13.6748s/12 iters), loss = 0.401596
I0409 22:13:19.060597 25006 solver.cpp:237] Train net output #0: loss = 0.401596 (* 1 = 0.401596 loss)
I0409 22:13:19.060608 25006 sgd_solver.cpp:105] Iteration 6324, lr = 0.00285734
I0409 22:13:23.582803 25006 solver.cpp:218] Iteration 6336 (2.65369 iter/s, 4.52201s/12 iters), loss = 0.335409
I0409 22:13:23.582854 25006 solver.cpp:237] Train net output #0: loss = 0.335408 (* 1 = 0.335408 loss)
I0409 22:13:23.582864 25006 sgd_solver.cpp:105] Iteration 6336, lr = 0.00285055
I0409 22:13:28.967306 25006 solver.cpp:218] Iteration 6348 (2.22874 iter/s, 5.38421s/12 iters), loss = 0.3584
I0409 22:13:28.967370 25006 solver.cpp:237] Train net output #0: loss = 0.3584 (* 1 = 0.3584 loss)
I0409 22:13:28.967382 25006 sgd_solver.cpp:105] Iteration 6348, lr = 0.00284379
I0409 22:13:33.974241 25006 solver.cpp:218] Iteration 6360 (2.39681 iter/s, 5.00665s/12 iters), loss = 0.317317
I0409 22:13:33.974295 25006 solver.cpp:237] Train net output #0: loss = 0.317317 (* 1 = 0.317317 loss)
I0409 22:13:33.974308 25006 sgd_solver.cpp:105] Iteration 6360, lr = 0.00283703
I0409 22:13:38.823563 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:13:38.959884 25006 solver.cpp:218] Iteration 6372 (2.40704 iter/s, 4.98537s/12 iters), loss = 0.115469
I0409 22:13:38.959937 25006 solver.cpp:237] Train net output #0: loss = 0.115469 (* 1 = 0.115469 loss)
I0409 22:13:38.959950 25006 sgd_solver.cpp:105] Iteration 6372, lr = 0.0028303
I0409 22:13:44.230545 25006 solver.cpp:218] Iteration 6384 (2.27688 iter/s, 5.27038s/12 iters), loss = 0.433244
I0409 22:13:44.230672 25006 solver.cpp:237] Train net output #0: loss = 0.433244 (* 1 = 0.433244 loss)
I0409 22:13:44.230684 25006 sgd_solver.cpp:105] Iteration 6384, lr = 0.00282358
I0409 22:13:49.219195 25006 solver.cpp:218] Iteration 6396 (2.40563 iter/s, 4.9883s/12 iters), loss = 0.217405
I0409 22:13:49.219251 25006 solver.cpp:237] Train net output #0: loss = 0.217405 (* 1 = 0.217405 loss)
I0409 22:13:49.219264 25006 sgd_solver.cpp:105] Iteration 6396, lr = 0.00281687
I0409 22:13:54.392870 25006 solver.cpp:218] Iteration 6408 (2.31956 iter/s, 5.1734s/12 iters), loss = 0.263616
I0409 22:13:54.392921 25006 solver.cpp:237] Train net output #0: loss = 0.263616 (* 1 = 0.263616 loss)
I0409 22:13:54.392932 25006 sgd_solver.cpp:105] Iteration 6408, lr = 0.00281019
I0409 22:13:59.460500 25006 solver.cpp:218] Iteration 6420 (2.3681 iter/s, 5.06736s/12 iters), loss = 0.24084
I0409 22:13:59.460546 25006 solver.cpp:237] Train net output #0: loss = 0.24084 (* 1 = 0.24084 loss)
I0409 22:13:59.460556 25006 sgd_solver.cpp:105] Iteration 6420, lr = 0.00280351
I0409 22:14:01.604496 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6426.caffemodel
I0409 22:14:08.403618 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6426.solverstate
I0409 22:14:11.951901 25006 solver.cpp:330] Iteration 6426, Testing net (#0)
I0409 22:14:11.951929 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:14:13.856842 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:14:16.392259 25006 solver.cpp:397] Test net output #0: accuracy = 0.245711
I0409 22:14:16.392333 25006 solver.cpp:397] Test net output #1: loss = 5.24381 (* 1 = 5.24381 loss)
I0409 22:14:18.395692 25006 solver.cpp:218] Iteration 6432 (0.633768 iter/s, 18.9344s/12 iters), loss = 0.320057
I0409 22:14:18.395750 25006 solver.cpp:237] Train net output #0: loss = 0.320057 (* 1 = 0.320057 loss)
I0409 22:14:18.395763 25006 sgd_solver.cpp:105] Iteration 6432, lr = 0.00279686
I0409 22:14:23.526278 25006 solver.cpp:218] Iteration 6444 (2.33904 iter/s, 5.1303s/12 iters), loss = 0.180135
I0409 22:14:23.526335 25006 solver.cpp:237] Train net output #0: loss = 0.180135 (* 1 = 0.180135 loss)
I0409 22:14:23.526346 25006 sgd_solver.cpp:105] Iteration 6444, lr = 0.00279022
I0409 22:14:28.634897 25006 solver.cpp:218] Iteration 6456 (2.3491 iter/s, 5.10835s/12 iters), loss = 0.264067
I0409 22:14:28.634936 25006 solver.cpp:237] Train net output #0: loss = 0.264067 (* 1 = 0.264067 loss)
I0409 22:14:28.634948 25006 sgd_solver.cpp:105] Iteration 6456, lr = 0.00278359
I0409 22:14:33.592875 25006 solver.cpp:218] Iteration 6468 (2.42047 iter/s, 4.95772s/12 iters), loss = 0.114222
I0409 22:14:33.592931 25006 solver.cpp:237] Train net output #0: loss = 0.114222 (* 1 = 0.114222 loss)
I0409 22:14:33.592943 25006 sgd_solver.cpp:105] Iteration 6468, lr = 0.00277698
I0409 22:14:35.607347 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:14:38.644665 25006 solver.cpp:218] Iteration 6480 (2.37553 iter/s, 5.05151s/12 iters), loss = 0.329284
I0409 22:14:38.644721 25006 solver.cpp:237] Train net output #0: loss = 0.329284 (* 1 = 0.329284 loss)
I0409 22:14:38.644732 25006 sgd_solver.cpp:105] Iteration 6480, lr = 0.00277039
I0409 22:14:43.698671 25006 solver.cpp:218] Iteration 6492 (2.37448 iter/s, 5.05373s/12 iters), loss = 0.250556
I0409 22:14:43.698721 25006 solver.cpp:237] Train net output #0: loss = 0.250556 (* 1 = 0.250556 loss)
I0409 22:14:43.698732 25006 sgd_solver.cpp:105] Iteration 6492, lr = 0.00276381
I0409 22:14:48.706034 25006 solver.cpp:218] Iteration 6504 (2.3966 iter/s, 5.00709s/12 iters), loss = 0.301388
I0409 22:14:48.706182 25006 solver.cpp:237] Train net output #0: loss = 0.301387 (* 1 = 0.301387 loss)
I0409 22:14:48.706197 25006 sgd_solver.cpp:105] Iteration 6504, lr = 0.00275725
I0409 22:14:53.748955 25006 solver.cpp:218] Iteration 6516 (2.37975 iter/s, 5.04256s/12 iters), loss = 0.327957
I0409 22:14:53.749007 25006 solver.cpp:237] Train net output #0: loss = 0.327957 (* 1 = 0.327957 loss)
I0409 22:14:53.749020 25006 sgd_solver.cpp:105] Iteration 6516, lr = 0.00275071
I0409 22:14:58.355975 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6528.caffemodel
I0409 22:15:01.434918 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6528.solverstate
I0409 22:15:03.807969 25006 solver.cpp:330] Iteration 6528, Testing net (#0)
I0409 22:15:03.807989 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:15:05.714524 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:15:08.297392 25006 solver.cpp:397] Test net output #0: accuracy = 0.244485
I0409 22:15:08.297441 25006 solver.cpp:397] Test net output #1: loss = 5.40284 (* 1 = 5.40284 loss)
I0409 22:15:08.387660 25006 solver.cpp:218] Iteration 6528 (0.819782 iter/s, 14.638s/12 iters), loss = 0.284078
I0409 22:15:08.387722 25006 solver.cpp:237] Train net output #0: loss = 0.284078 (* 1 = 0.284078 loss)
I0409 22:15:08.387738 25006 sgd_solver.cpp:105] Iteration 6528, lr = 0.00274418
I0409 22:15:12.634987 25006 solver.cpp:218] Iteration 6540 (2.82547 iter/s, 4.24708s/12 iters), loss = 0.332275
I0409 22:15:12.635040 25006 solver.cpp:237] Train net output #0: loss = 0.332275 (* 1 = 0.332275 loss)
I0409 22:15:12.635052 25006 sgd_solver.cpp:105] Iteration 6540, lr = 0.00273766
I0409 22:15:17.624174 25006 solver.cpp:218] Iteration 6552 (2.40533 iter/s, 4.98892s/12 iters), loss = 0.333585
I0409 22:15:17.624214 25006 solver.cpp:237] Train net output #0: loss = 0.333585 (* 1 = 0.333585 loss)
I0409 22:15:17.624224 25006 sgd_solver.cpp:105] Iteration 6552, lr = 0.00273116
I0409 22:15:22.625874 25006 solver.cpp:218] Iteration 6564 (2.39931 iter/s, 5.00144s/12 iters), loss = 0.379836
I0409 22:15:22.626010 25006 solver.cpp:237] Train net output #0: loss = 0.379836 (* 1 = 0.379836 loss)
I0409 22:15:22.626024 25006 sgd_solver.cpp:105] Iteration 6564, lr = 0.00272468
I0409 22:15:26.821099 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:15:27.590487 25006 solver.cpp:218] Iteration 6576 (2.41727 iter/s, 4.96427s/12 iters), loss = 0.382739
I0409 22:15:27.590528 25006 solver.cpp:237] Train net output #0: loss = 0.382739 (* 1 = 0.382739 loss)
I0409 22:15:27.590538 25006 sgd_solver.cpp:105] Iteration 6576, lr = 0.00271821
I0409 22:15:32.591553 25006 solver.cpp:218] Iteration 6588 (2.39961 iter/s, 5.00081s/12 iters), loss = 0.205121
I0409 22:15:32.591589 25006 solver.cpp:237] Train net output #0: loss = 0.205121 (* 1 = 0.205121 loss)
I0409 22:15:32.591598 25006 sgd_solver.cpp:105] Iteration 6588, lr = 0.00271175
I0409 22:15:37.601079 25006 solver.cpp:218] Iteration 6600 (2.39556 iter/s, 5.00927s/12 iters), loss = 0.445573
I0409 22:15:37.601126 25006 solver.cpp:237] Train net output #0: loss = 0.445573 (* 1 = 0.445573 loss)
I0409 22:15:37.601138 25006 sgd_solver.cpp:105] Iteration 6600, lr = 0.00270532
I0409 22:15:42.568017 25006 solver.cpp:218] Iteration 6612 (2.41611 iter/s, 4.96667s/12 iters), loss = 0.29826
I0409 22:15:42.568085 25006 solver.cpp:237] Train net output #0: loss = 0.29826 (* 1 = 0.29826 loss)
I0409 22:15:42.568104 25006 sgd_solver.cpp:105] Iteration 6612, lr = 0.00269889
I0409 22:15:47.573566 25006 solver.cpp:218] Iteration 6624 (2.39748 iter/s, 5.00527s/12 iters), loss = 0.252177
I0409 22:15:47.573613 25006 solver.cpp:237] Train net output #0: loss = 0.252177 (* 1 = 0.252177 loss)
I0409 22:15:47.573623 25006 sgd_solver.cpp:105] Iteration 6624, lr = 0.00269248
I0409 22:15:49.617976 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6630.caffemodel
I0409 22:15:53.812124 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6630.solverstate
I0409 22:15:55.454110 25006 solver.cpp:330] Iteration 6630, Testing net (#0)
I0409 22:15:55.454135 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:15:57.221719 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:15:59.853312 25006 solver.cpp:397] Test net output #0: accuracy = 0.252451
I0409 22:15:59.853354 25006 solver.cpp:397] Test net output #1: loss = 5.34667 (* 1 = 5.34667 loss)
I0409 22:16:01.708671 25006 solver.cpp:218] Iteration 6636 (0.848988 iter/s, 14.1345s/12 iters), loss = 0.210091
I0409 22:16:01.708726 25006 solver.cpp:237] Train net output #0: loss = 0.210091 (* 1 = 0.210091 loss)
I0409 22:16:01.708737 25006 sgd_solver.cpp:105] Iteration 6636, lr = 0.00268609
I0409 22:16:06.692278 25006 solver.cpp:218] Iteration 6648 (2.40803 iter/s, 4.98334s/12 iters), loss = 0.396747
I0409 22:16:06.692325 25006 solver.cpp:237] Train net output #0: loss = 0.396747 (* 1 = 0.396747 loss)
I0409 22:16:06.692334 25006 sgd_solver.cpp:105] Iteration 6648, lr = 0.00267971
I0409 22:16:11.708966 25006 solver.cpp:218] Iteration 6660 (2.39214 iter/s, 5.01642s/12 iters), loss = 0.465776
I0409 22:16:11.709015 25006 solver.cpp:237] Train net output #0: loss = 0.465776 (* 1 = 0.465776 loss)
I0409 22:16:11.709025 25006 sgd_solver.cpp:105] Iteration 6660, lr = 0.00267335
I0409 22:16:16.689463 25006 solver.cpp:218] Iteration 6672 (2.40953 iter/s, 4.98023s/12 iters), loss = 0.218215
I0409 22:16:16.689510 25006 solver.cpp:237] Train net output #0: loss = 0.218215 (* 1 = 0.218215 loss)
I0409 22:16:16.689519 25006 sgd_solver.cpp:105] Iteration 6672, lr = 0.00266701
I0409 22:16:18.036120 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:16:21.623555 25006 solver.cpp:218] Iteration 6684 (2.43219 iter/s, 4.93382s/12 iters), loss = 0.172991
I0409 22:16:21.623606 25006 solver.cpp:237] Train net output #0: loss = 0.172991 (* 1 = 0.172991 loss)
I0409 22:16:21.623617 25006 sgd_solver.cpp:105] Iteration 6684, lr = 0.00266067
I0409 22:16:26.576277 25006 solver.cpp:218] Iteration 6696 (2.42304 iter/s, 4.95246s/12 iters), loss = 0.462812
I0409 22:16:26.576380 25006 solver.cpp:237] Train net output #0: loss = 0.462812 (* 1 = 0.462812 loss)
I0409 22:16:26.576393 25006 sgd_solver.cpp:105] Iteration 6696, lr = 0.00265436
I0409 22:16:31.662549 25006 solver.cpp:218] Iteration 6708 (2.35944 iter/s, 5.08595s/12 iters), loss = 0.193684
I0409 22:16:31.662607 25006 solver.cpp:237] Train net output #0: loss = 0.193684 (* 1 = 0.193684 loss)
I0409 22:16:31.662622 25006 sgd_solver.cpp:105] Iteration 6708, lr = 0.00264805
I0409 22:16:36.770169 25006 solver.cpp:218] Iteration 6720 (2.34957 iter/s, 5.10733s/12 iters), loss = 0.285761
I0409 22:16:36.770243 25006 solver.cpp:237] Train net output #0: loss = 0.285761 (* 1 = 0.285761 loss)
I0409 22:16:36.770262 25006 sgd_solver.cpp:105] Iteration 6720, lr = 0.00264177
I0409 22:16:41.294135 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6732.caffemodel
I0409 22:16:45.886013 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6732.solverstate
I0409 22:16:48.509982 25006 solver.cpp:330] Iteration 6732, Testing net (#0)
I0409 22:16:48.510010 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:16:50.377948 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:16:53.098703 25006 solver.cpp:397] Test net output #0: accuracy = 0.245098
I0409 22:16:53.098733 25006 solver.cpp:397] Test net output #1: loss = 5.42019 (* 1 = 5.42019 loss)
I0409 22:16:53.187479 25006 solver.cpp:218] Iteration 6732 (0.730969 iter/s, 16.4166s/12 iters), loss = 0.245222
I0409 22:16:53.187526 25006 solver.cpp:237] Train net output #0: loss = 0.245222 (* 1 = 0.245222 loss)
I0409 22:16:53.187536 25006 sgd_solver.cpp:105] Iteration 6732, lr = 0.0026355
I0409 22:16:57.503396 25006 solver.cpp:218] Iteration 6744 (2.78056 iter/s, 4.31568s/12 iters), loss = 0.256279
I0409 22:16:57.503532 25006 solver.cpp:237] Train net output #0: loss = 0.256279 (* 1 = 0.256279 loss)
I0409 22:16:57.503546 25006 sgd_solver.cpp:105] Iteration 6744, lr = 0.00262924
I0409 22:17:02.417735 25006 solver.cpp:218] Iteration 6756 (2.44201 iter/s, 4.91398s/12 iters), loss = 0.267026
I0409 22:17:02.417811 25006 solver.cpp:237] Train net output #0: loss = 0.267026 (* 1 = 0.267026 loss)
I0409 22:17:02.417829 25006 sgd_solver.cpp:105] Iteration 6756, lr = 0.002623
I0409 22:17:07.417819 25006 solver.cpp:218] Iteration 6768 (2.4001 iter/s, 4.99979s/12 iters), loss = 0.225113
I0409 22:17:07.417867 25006 solver.cpp:237] Train net output #0: loss = 0.225113 (* 1 = 0.225113 loss)
I0409 22:17:07.417877 25006 sgd_solver.cpp:105] Iteration 6768, lr = 0.00261677
I0409 22:17:10.893970 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:17:12.372323 25006 solver.cpp:218] Iteration 6780 (2.42217 iter/s, 4.95424s/12 iters), loss = 0.348961
I0409 22:17:12.372371 25006 solver.cpp:237] Train net output #0: loss = 0.348961 (* 1 = 0.348961 loss)
I0409 22:17:12.372381 25006 sgd_solver.cpp:105] Iteration 6780, lr = 0.00261056
I0409 22:17:17.504834 25006 solver.cpp:218] Iteration 6792 (2.33816 iter/s, 5.13224s/12 iters), loss = 0.197908
I0409 22:17:17.504884 25006 solver.cpp:237] Train net output #0: loss = 0.197908 (* 1 = 0.197908 loss)
I0409 22:17:17.504894 25006 sgd_solver.cpp:105] Iteration 6792, lr = 0.00260436
I0409 22:17:22.499001 25006 solver.cpp:218] Iteration 6804 (2.40293 iter/s, 4.99389s/12 iters), loss = 0.257229
I0409 22:17:22.499054 25006 solver.cpp:237] Train net output #0: loss = 0.257228 (* 1 = 0.257228 loss)
I0409 22:17:22.499068 25006 sgd_solver.cpp:105] Iteration 6804, lr = 0.00259817
I0409 22:17:27.424419 25006 solver.cpp:218] Iteration 6816 (2.43647 iter/s, 4.92515s/12 iters), loss = 0.221278
I0409 22:17:27.424465 25006 solver.cpp:237] Train net output #0: loss = 0.221278 (* 1 = 0.221278 loss)
I0409 22:17:27.424475 25006 sgd_solver.cpp:105] Iteration 6816, lr = 0.00259201
I0409 22:17:32.376116 25006 solver.cpp:218] Iteration 6828 (2.42354 iter/s, 4.95143s/12 iters), loss = 0.30417
I0409 22:17:32.376207 25006 solver.cpp:237] Train net output #0: loss = 0.30417 (* 1 = 0.30417 loss)
I0409 22:17:32.376221 25006 sgd_solver.cpp:105] Iteration 6828, lr = 0.00258585
I0409 22:17:34.399597 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6834.caffemodel
I0409 22:17:38.111963 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6834.solverstate
I0409 22:17:41.441803 25006 solver.cpp:330] Iteration 6834, Testing net (#0)
I0409 22:17:41.441833 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:17:43.264262 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:17:45.944583 25006 solver.cpp:397] Test net output #0: accuracy = 0.251226
I0409 22:17:45.944631 25006 solver.cpp:397] Test net output #1: loss = 5.45334 (* 1 = 5.45334 loss)
I0409 22:17:47.866657 25006 solver.cpp:218] Iteration 6840 (0.774703 iter/s, 15.4898s/12 iters), loss = 0.236018
I0409 22:17:47.866711 25006 solver.cpp:237] Train net output #0: loss = 0.236018 (* 1 = 0.236018 loss)
I0409 22:17:47.866724 25006 sgd_solver.cpp:105] Iteration 6840, lr = 0.00257971
I0409 22:17:53.114784 25006 solver.cpp:218] Iteration 6852 (2.28665 iter/s, 5.24785s/12 iters), loss = 0.233817
I0409 22:17:53.114836 25006 solver.cpp:237] Train net output #0: loss = 0.233817 (* 1 = 0.233817 loss)
I0409 22:17:53.114848 25006 sgd_solver.cpp:105] Iteration 6852, lr = 0.00257359
I0409 22:17:58.227402 25006 solver.cpp:218] Iteration 6864 (2.34726 iter/s, 5.11234s/12 iters), loss = 0.248838
I0409 22:17:58.227456 25006 solver.cpp:237] Train net output #0: loss = 0.248838 (* 1 = 0.248838 loss)
I0409 22:17:58.227468 25006 sgd_solver.cpp:105] Iteration 6864, lr = 0.00256748
I0409 22:18:03.288472 25006 solver.cpp:218] Iteration 6876 (2.37117 iter/s, 5.06079s/12 iters), loss = 0.272944
I0409 22:18:03.288627 25006 solver.cpp:237] Train net output #0: loss = 0.272944 (* 1 = 0.272944 loss)
I0409 22:18:03.288640 25006 sgd_solver.cpp:105] Iteration 6876, lr = 0.00256138
I0409 22:18:03.919901 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:18:08.340411 25006 solver.cpp:218] Iteration 6888 (2.3755 iter/s, 5.05157s/12 iters), loss = 0.205278
I0409 22:18:08.340462 25006 solver.cpp:237] Train net output #0: loss = 0.205278 (* 1 = 0.205278 loss)
I0409 22:18:08.340476 25006 sgd_solver.cpp:105] Iteration 6888, lr = 0.0025553
I0409 22:18:13.306143 25006 solver.cpp:218] Iteration 6900 (2.41669 iter/s, 4.96546s/12 iters), loss = 0.17662
I0409 22:18:13.306200 25006 solver.cpp:237] Train net output #0: loss = 0.17662 (* 1 = 0.17662 loss)
I0409 22:18:13.306212 25006 sgd_solver.cpp:105] Iteration 6900, lr = 0.00254923
I0409 22:18:18.309419 25006 solver.cpp:218] Iteration 6912 (2.39856 iter/s, 5.003s/12 iters), loss = 0.216561
I0409 22:18:18.309479 25006 solver.cpp:237] Train net output #0: loss = 0.216561 (* 1 = 0.216561 loss)
I0409 22:18:18.309491 25006 sgd_solver.cpp:105] Iteration 6912, lr = 0.00254318
I0409 22:18:23.298509 25006 solver.cpp:218] Iteration 6924 (2.40538 iter/s, 4.98881s/12 iters), loss = 0.291794
I0409 22:18:23.298564 25006 solver.cpp:237] Train net output #0: loss = 0.291794 (* 1 = 0.291794 loss)
I0409 22:18:23.298575 25006 sgd_solver.cpp:105] Iteration 6924, lr = 0.00253714
I0409 22:18:27.808861 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_6936.caffemodel
I0409 22:18:31.116310 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_6936.solverstate
I0409 22:18:32.738404 25006 solver.cpp:330] Iteration 6936, Testing net (#0)
I0409 22:18:32.738427 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:18:33.329674 25006 blocking_queue.cpp:49] Waiting for data
I0409 22:18:34.384608 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:18:37.130020 25006 solver.cpp:397] Test net output #0: accuracy = 0.249387
I0409 22:18:37.130069 25006 solver.cpp:397] Test net output #1: loss = 5.47247 (* 1 = 5.47247 loss)
I0409 22:18:37.215948 25006 solver.cpp:218] Iteration 6936 (0.862266 iter/s, 13.9168s/12 iters), loss = 0.13991
I0409 22:18:37.215996 25006 solver.cpp:237] Train net output #0: loss = 0.139909 (* 1 = 0.139909 loss)
I0409 22:18:37.216007 25006 sgd_solver.cpp:105] Iteration 6936, lr = 0.00253112
I0409 22:18:41.308461 25006 solver.cpp:218] Iteration 6948 (2.93235 iter/s, 4.09228s/12 iters), loss = 0.229267
I0409 22:18:41.308509 25006 solver.cpp:237] Train net output #0: loss = 0.229267 (* 1 = 0.229267 loss)
I0409 22:18:41.308522 25006 sgd_solver.cpp:105] Iteration 6948, lr = 0.00252511
I0409 22:18:46.222900 25006 solver.cpp:218] Iteration 6960 (2.44192 iter/s, 4.91417s/12 iters), loss = 0.240302
I0409 22:18:46.222955 25006 solver.cpp:237] Train net output #0: loss = 0.240302 (* 1 = 0.240302 loss)
I0409 22:18:46.222968 25006 sgd_solver.cpp:105] Iteration 6960, lr = 0.00251911
I0409 22:18:51.171487 25006 solver.cpp:218] Iteration 6972 (2.42506 iter/s, 4.94832s/12 iters), loss = 0.263464
I0409 22:18:51.171535 25006 solver.cpp:237] Train net output #0: loss = 0.263464 (* 1 = 0.263464 loss)
I0409 22:18:51.171546 25006 sgd_solver.cpp:105] Iteration 6972, lr = 0.00251313
I0409 22:18:53.922895 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:18:56.154781 25006 solver.cpp:218] Iteration 6984 (2.40817 iter/s, 4.98303s/12 iters), loss = 0.171449
I0409 22:18:56.154827 25006 solver.cpp:237] Train net output #0: loss = 0.171449 (* 1 = 0.171449 loss)
I0409 22:18:56.154837 25006 sgd_solver.cpp:105] Iteration 6984, lr = 0.00250717
I0409 22:19:01.165359 25006 solver.cpp:218] Iteration 6996 (2.39506 iter/s, 5.01031s/12 iters), loss = 0.312189
I0409 22:19:01.165403 25006 solver.cpp:237] Train net output #0: loss = 0.312188 (* 1 = 0.312188 loss)
I0409 22:19:01.165413 25006 sgd_solver.cpp:105] Iteration 6996, lr = 0.00250121
I0409 22:19:06.186513 25006 solver.cpp:218] Iteration 7008 (2.39001 iter/s, 5.0209s/12 iters), loss = 0.129918
I0409 22:19:06.186645 25006 solver.cpp:237] Train net output #0: loss = 0.129918 (* 1 = 0.129918 loss)
I0409 22:19:06.186655 25006 sgd_solver.cpp:105] Iteration 7008, lr = 0.00249528
I0409 22:19:11.211905 25006 solver.cpp:218] Iteration 7020 (2.38804 iter/s, 5.02504s/12 iters), loss = 0.182413
I0409 22:19:11.211951 25006 solver.cpp:237] Train net output #0: loss = 0.182413 (* 1 = 0.182413 loss)
I0409 22:19:11.211962 25006 sgd_solver.cpp:105] Iteration 7020, lr = 0.00248935
I0409 22:19:16.288115 25006 solver.cpp:218] Iteration 7032 (2.36409 iter/s, 5.07594s/12 iters), loss = 0.288807
I0409 22:19:16.288162 25006 solver.cpp:237] Train net output #0: loss = 0.288807 (* 1 = 0.288807 loss)
I0409 22:19:16.288170 25006 sgd_solver.cpp:105] Iteration 7032, lr = 0.00248344
I0409 22:19:18.307731 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7038.caffemodel
I0409 22:19:20.536696 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7038.solverstate
I0409 22:19:23.748615 25006 solver.cpp:330] Iteration 7038, Testing net (#0)
I0409 22:19:23.748644 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:19:25.388671 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:19:28.307420 25006 solver.cpp:397] Test net output #0: accuracy = 0.257353
I0409 22:19:28.307456 25006 solver.cpp:397] Test net output #1: loss = 5.54407 (* 1 = 5.54407 loss)
I0409 22:19:30.134889 25006 solver.cpp:218] Iteration 7044 (0.866667 iter/s, 13.8462s/12 iters), loss = 0.266176
I0409 22:19:30.134932 25006 solver.cpp:237] Train net output #0: loss = 0.266176 (* 1 = 0.266176 loss)
I0409 22:19:30.134940 25006 sgd_solver.cpp:105] Iteration 7044, lr = 0.00247755
I0409 22:19:35.133364 25006 solver.cpp:218] Iteration 7056 (2.40086 iter/s, 4.99821s/12 iters), loss = 0.231605
I0409 22:19:35.133416 25006 solver.cpp:237] Train net output #0: loss = 0.231605 (* 1 = 0.231605 loss)
I0409 22:19:35.133425 25006 sgd_solver.cpp:105] Iteration 7056, lr = 0.00247166
I0409 22:19:40.211176 25006 solver.cpp:218] Iteration 7068 (2.36335 iter/s, 5.07753s/12 iters), loss = 0.142255
I0409 22:19:40.211289 25006 solver.cpp:237] Train net output #0: loss = 0.142255 (* 1 = 0.142255 loss)
I0409 22:19:40.211302 25006 sgd_solver.cpp:105] Iteration 7068, lr = 0.0024658
I0409 22:19:45.132421 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:19:45.240087 25006 solver.cpp:218] Iteration 7080 (2.38636 iter/s, 5.02858s/12 iters), loss = 0.263872
I0409 22:19:45.240144 25006 solver.cpp:237] Train net output #0: loss = 0.263872 (* 1 = 0.263872 loss)
I0409 22:19:45.240155 25006 sgd_solver.cpp:105] Iteration 7080, lr = 0.00245994
I0409 22:19:50.251811 25006 solver.cpp:218] Iteration 7092 (2.39452 iter/s, 5.01144s/12 iters), loss = 0.292021
I0409 22:19:50.251870 25006 solver.cpp:237] Train net output #0: loss = 0.292021 (* 1 = 0.292021 loss)
I0409 22:19:50.251884 25006 sgd_solver.cpp:105] Iteration 7092, lr = 0.0024541
I0409 22:19:55.271800 25006 solver.cpp:218] Iteration 7104 (2.39058 iter/s, 5.01971s/12 iters), loss = 0.214629
I0409 22:19:55.271857 25006 solver.cpp:237] Train net output #0: loss = 0.214629 (* 1 = 0.214629 loss)
I0409 22:19:55.271870 25006 sgd_solver.cpp:105] Iteration 7104, lr = 0.00244827
I0409 22:20:00.301151 25006 solver.cpp:218] Iteration 7116 (2.38613 iter/s, 5.02907s/12 iters), loss = 0.254553
I0409 22:20:00.301205 25006 solver.cpp:237] Train net output #0: loss = 0.254553 (* 1 = 0.254553 loss)
I0409 22:20:00.301218 25006 sgd_solver.cpp:105] Iteration 7116, lr = 0.00244246
I0409 22:20:05.343108 25006 solver.cpp:218] Iteration 7128 (2.38016 iter/s, 5.04169s/12 iters), loss = 0.342997
I0409 22:20:05.343163 25006 solver.cpp:237] Train net output #0: loss = 0.342997 (* 1 = 0.342997 loss)
I0409 22:20:05.343175 25006 sgd_solver.cpp:105] Iteration 7128, lr = 0.00243666
I0409 22:20:09.863523 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7140.caffemodel
I0409 22:20:13.728744 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7140.solverstate
I0409 22:20:16.977015 25006 solver.cpp:330] Iteration 7140, Testing net (#0)
I0409 22:20:16.977044 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:20:18.635188 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:20:21.441917 25006 solver.cpp:397] Test net output #0: accuracy = 0.252451
I0409 22:20:21.441951 25006 solver.cpp:397] Test net output #1: loss = 5.54804 (* 1 = 5.54804 loss)
I0409 22:20:21.530843 25006 solver.cpp:218] Iteration 7140 (0.741335 iter/s, 16.187s/12 iters), loss = 0.166632
I0409 22:20:21.530887 25006 solver.cpp:237] Train net output #0: loss = 0.166632 (* 1 = 0.166632 loss)
I0409 22:20:21.530897 25006 sgd_solver.cpp:105] Iteration 7140, lr = 0.00243088
I0409 22:20:25.858687 25006 solver.cpp:218] Iteration 7152 (2.7729 iter/s, 4.3276s/12 iters), loss = 0.15559
I0409 22:20:25.858744 25006 solver.cpp:237] Train net output #0: loss = 0.15559 (* 1 = 0.15559 loss)
I0409 22:20:25.858757 25006 sgd_solver.cpp:105] Iteration 7152, lr = 0.00242511
I0409 22:20:30.834307 25006 solver.cpp:218] Iteration 7164 (2.41189 iter/s, 4.97534s/12 iters), loss = 0.196295
I0409 22:20:30.834358 25006 solver.cpp:237] Train net output #0: loss = 0.196295 (* 1 = 0.196295 loss)
I0409 22:20:30.834370 25006 sgd_solver.cpp:105] Iteration 7164, lr = 0.00241935
I0409 22:20:36.047832 25006 solver.cpp:218] Iteration 7176 (2.30183 iter/s, 5.21324s/12 iters), loss = 0.324763
I0409 22:20:36.047890 25006 solver.cpp:237] Train net output #0: loss = 0.324763 (* 1 = 0.324763 loss)
I0409 22:20:36.047901 25006 sgd_solver.cpp:105] Iteration 7176, lr = 0.0024136
I0409 22:20:38.161114 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:20:41.051898 25006 solver.cpp:218] Iteration 7188 (2.39818 iter/s, 5.00379s/12 iters), loss = 0.265459
I0409 22:20:41.051942 25006 solver.cpp:237] Train net output #0: loss = 0.265459 (* 1 = 0.265459 loss)
I0409 22:20:41.051951 25006 sgd_solver.cpp:105] Iteration 7188, lr = 0.00240787
I0409 22:20:46.025171 25006 solver.cpp:218] Iteration 7200 (2.41303 iter/s, 4.97301s/12 iters), loss = 0.187895
I0409 22:20:46.025272 25006 solver.cpp:237] Train net output #0: loss = 0.187895 (* 1 = 0.187895 loss)
I0409 22:20:46.025283 25006 sgd_solver.cpp:105] Iteration 7200, lr = 0.00240216
I0409 22:20:51.020474 25006 solver.cpp:218] Iteration 7212 (2.40241 iter/s, 4.99498s/12 iters), loss = 0.132537
I0409 22:20:51.020534 25006 solver.cpp:237] Train net output #0: loss = 0.132537 (* 1 = 0.132537 loss)
I0409 22:20:51.020547 25006 sgd_solver.cpp:105] Iteration 7212, lr = 0.00239645
I0409 22:20:56.074681 25006 solver.cpp:218] Iteration 7224 (2.37439 iter/s, 5.05393s/12 iters), loss = 0.0978749
I0409 22:20:56.074733 25006 solver.cpp:237] Train net output #0: loss = 0.0978748 (* 1 = 0.0978748 loss)
I0409 22:20:56.074743 25006 sgd_solver.cpp:105] Iteration 7224, lr = 0.00239076
I0409 22:21:01.189994 25006 solver.cpp:218] Iteration 7236 (2.34602 iter/s, 5.11504s/12 iters), loss = 0.20305
I0409 22:21:01.190049 25006 solver.cpp:237] Train net output #0: loss = 0.20305 (* 1 = 0.20305 loss)
I0409 22:21:01.190062 25006 sgd_solver.cpp:105] Iteration 7236, lr = 0.00238509
I0409 22:21:03.213201 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7242.caffemodel
I0409 22:21:05.465615 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7242.solverstate
I0409 22:21:07.482753 25006 solver.cpp:330] Iteration 7242, Testing net (#0)
I0409 22:21:07.482782 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:21:09.215276 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:21:12.365876 25006 solver.cpp:397] Test net output #0: accuracy = 0.246936
I0409 22:21:12.365921 25006 solver.cpp:397] Test net output #1: loss = 5.53052 (* 1 = 5.53052 loss)
I0409 22:21:14.296612 25006 solver.cpp:218] Iteration 7248 (0.91561 iter/s, 13.106s/12 iters), loss = 0.192535
I0409 22:21:14.296672 25006 solver.cpp:237] Train net output #0: loss = 0.192535 (* 1 = 0.192535 loss)
I0409 22:21:14.296685 25006 sgd_solver.cpp:105] Iteration 7248, lr = 0.00237942
I0409 22:21:19.263319 25006 solver.cpp:218] Iteration 7260 (2.41622 iter/s, 4.96643s/12 iters), loss = 0.117097
I0409 22:21:19.263440 25006 solver.cpp:237] Train net output #0: loss = 0.117097 (* 1 = 0.117097 loss)
I0409 22:21:19.263454 25006 sgd_solver.cpp:105] Iteration 7260, lr = 0.00237378
I0409 22:21:24.264465 25006 solver.cpp:218] Iteration 7272 (2.39961 iter/s, 5.00081s/12 iters), loss = 0.187035
I0409 22:21:24.264521 25006 solver.cpp:237] Train net output #0: loss = 0.187035 (* 1 = 0.187035 loss)
I0409 22:21:24.264534 25006 sgd_solver.cpp:105] Iteration 7272, lr = 0.00236814
I0409 22:21:28.525128 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:21:29.270714 25006 solver.cpp:218] Iteration 7284 (2.39714 iter/s, 5.00597s/12 iters), loss = 0.165468
I0409 22:21:29.270768 25006 solver.cpp:237] Train net output #0: loss = 0.165468 (* 1 = 0.165468 loss)
I0409 22:21:29.270781 25006 sgd_solver.cpp:105] Iteration 7284, lr = 0.00236252
I0409 22:21:34.214643 25006 solver.cpp:218] Iteration 7296 (2.42735 iter/s, 4.94366s/12 iters), loss = 0.156291
I0409 22:21:34.214699 25006 solver.cpp:237] Train net output #0: loss = 0.15629 (* 1 = 0.15629 loss)
I0409 22:21:34.214710 25006 sgd_solver.cpp:105] Iteration 7296, lr = 0.00235691
I0409 22:21:39.192230 25006 solver.cpp:218] Iteration 7308 (2.41094 iter/s, 4.97731s/12 iters), loss = 0.177592
I0409 22:21:39.192276 25006 solver.cpp:237] Train net output #0: loss = 0.177592 (* 1 = 0.177592 loss)
I0409 22:21:39.192284 25006 sgd_solver.cpp:105] Iteration 7308, lr = 0.00235131
I0409 22:21:44.143410 25006 solver.cpp:218] Iteration 7320 (2.42379 iter/s, 4.95092s/12 iters), loss = 0.212085
I0409 22:21:44.143460 25006 solver.cpp:237] Train net output #0: loss = 0.212085 (* 1 = 0.212085 loss)
I0409 22:21:44.143468 25006 sgd_solver.cpp:105] Iteration 7320, lr = 0.00234573
I0409 22:21:49.175557 25006 solver.cpp:218] Iteration 7332 (2.3848 iter/s, 5.03188s/12 iters), loss = 0.160714
I0409 22:21:49.175606 25006 solver.cpp:237] Train net output #0: loss = 0.160714 (* 1 = 0.160714 loss)
I0409 22:21:49.175618 25006 sgd_solver.cpp:105] Iteration 7332, lr = 0.00234016
I0409 22:21:53.711813 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7344.caffemodel
I0409 22:21:55.972450 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7344.solverstate
I0409 22:21:57.627851 25006 solver.cpp:330] Iteration 7344, Testing net (#0)
I0409 22:21:57.627880 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:21:59.224095 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:22:02.152467 25006 solver.cpp:397] Test net output #0: accuracy = 0.251226
I0409 22:22:02.152498 25006 solver.cpp:397] Test net output #1: loss = 5.47511 (* 1 = 5.47511 loss)
I0409 22:22:02.241385 25006 solver.cpp:218] Iteration 7344 (0.918468 iter/s, 13.0652s/12 iters), loss = 0.117126
I0409 22:22:02.241426 25006 solver.cpp:237] Train net output #0: loss = 0.117125 (* 1 = 0.117125 loss)
I0409 22:22:02.241434 25006 sgd_solver.cpp:105] Iteration 7344, lr = 0.0023346
I0409 22:22:06.609246 25006 solver.cpp:218] Iteration 7356 (2.74749 iter/s, 4.36763s/12 iters), loss = 0.16112
I0409 22:22:06.609294 25006 solver.cpp:237] Train net output #0: loss = 0.16112 (* 1 = 0.16112 loss)
I0409 22:22:06.609306 25006 sgd_solver.cpp:105] Iteration 7356, lr = 0.00232906
I0409 22:22:11.711483 25006 solver.cpp:218] Iteration 7368 (2.35204 iter/s, 5.10196s/12 iters), loss = 0.103638
I0409 22:22:11.711537 25006 solver.cpp:237] Train net output #0: loss = 0.103638 (* 1 = 0.103638 loss)
I0409 22:22:11.711549 25006 sgd_solver.cpp:105] Iteration 7368, lr = 0.00232353
I0409 22:22:16.860683 25006 solver.cpp:218] Iteration 7380 (2.33059 iter/s, 5.14892s/12 iters), loss = 0.184423
I0409 22:22:16.860743 25006 solver.cpp:237] Train net output #0: loss = 0.184423 (* 1 = 0.184423 loss)
I0409 22:22:16.860756 25006 sgd_solver.cpp:105] Iteration 7380, lr = 0.00231802
I0409 22:22:18.264335 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:22:21.849505 25006 solver.cpp:218] Iteration 7392 (2.40551 iter/s, 4.98855s/12 iters), loss = 0.159449
I0409 22:22:21.849542 25006 solver.cpp:237] Train net output #0: loss = 0.159449 (* 1 = 0.159449 loss)
I0409 22:22:21.849552 25006 sgd_solver.cpp:105] Iteration 7392, lr = 0.00231251
I0409 22:22:26.785840 25006 solver.cpp:218] Iteration 7404 (2.43108 iter/s, 4.93608s/12 iters), loss = 0.153141
I0409 22:22:26.785984 25006 solver.cpp:237] Train net output #0: loss = 0.15314 (* 1 = 0.15314 loss)
I0409 22:22:26.785998 25006 sgd_solver.cpp:105] Iteration 7404, lr = 0.00230702
I0409 22:22:31.710605 25006 solver.cpp:218] Iteration 7416 (2.43684 iter/s, 4.92441s/12 iters), loss = 0.147318
I0409 22:22:31.710657 25006 solver.cpp:237] Train net output #0: loss = 0.147318 (* 1 = 0.147318 loss)
I0409 22:22:31.710669 25006 sgd_solver.cpp:105] Iteration 7416, lr = 0.00230154
I0409 22:22:36.570037 25006 solver.cpp:218] Iteration 7428 (2.46956 iter/s, 4.85917s/12 iters), loss = 0.353858
I0409 22:22:36.570089 25006 solver.cpp:237] Train net output #0: loss = 0.353858 (* 1 = 0.353858 loss)
I0409 22:22:36.570107 25006 sgd_solver.cpp:105] Iteration 7428, lr = 0.00229608
I0409 22:22:41.650018 25006 solver.cpp:218] Iteration 7440 (2.36234 iter/s, 5.07971s/12 iters), loss = 0.212881
I0409 22:22:41.650068 25006 solver.cpp:237] Train net output #0: loss = 0.212881 (* 1 = 0.212881 loss)
I0409 22:22:41.650079 25006 sgd_solver.cpp:105] Iteration 7440, lr = 0.00229063
I0409 22:22:43.678550 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7446.caffemodel
I0409 22:22:45.914721 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7446.solverstate
I0409 22:22:49.316545 25006 solver.cpp:330] Iteration 7446, Testing net (#0)
I0409 22:22:49.316567 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:22:50.847326 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:22:53.774511 25006 solver.cpp:397] Test net output #0: accuracy = 0.257353
I0409 22:22:53.774549 25006 solver.cpp:397] Test net output #1: loss = 5.48656 (* 1 = 5.48656 loss)
I0409 22:22:55.731698 25006 solver.cpp:218] Iteration 7452 (0.85221 iter/s, 14.081s/12 iters), loss = 0.152289
I0409 22:22:55.731755 25006 solver.cpp:237] Train net output #0: loss = 0.152289 (* 1 = 0.152289 loss)
I0409 22:22:55.731768 25006 sgd_solver.cpp:105] Iteration 7452, lr = 0.00228519
I0409 22:23:00.953053 25006 solver.cpp:218] Iteration 7464 (2.29838 iter/s, 5.22107s/12 iters), loss = 0.253611
I0409 22:23:00.953158 25006 solver.cpp:237] Train net output #0: loss = 0.253611 (* 1 = 0.253611 loss)
I0409 22:23:00.953171 25006 sgd_solver.cpp:105] Iteration 7464, lr = 0.00227976
I0409 22:23:05.927603 25006 solver.cpp:218] Iteration 7476 (2.41243 iter/s, 4.97423s/12 iters), loss = 0.209743
I0409 22:23:05.927640 25006 solver.cpp:237] Train net output #0: loss = 0.209743 (* 1 = 0.209743 loss)
I0409 22:23:05.927650 25006 sgd_solver.cpp:105] Iteration 7476, lr = 0.00227435
I0409 22:23:09.414264 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:23:10.864601 25006 solver.cpp:218] Iteration 7488 (2.43075 iter/s, 4.93674s/12 iters), loss = 0.285134
I0409 22:23:10.864656 25006 solver.cpp:237] Train net output #0: loss = 0.285134 (* 1 = 0.285134 loss)
I0409 22:23:10.864670 25006 sgd_solver.cpp:105] Iteration 7488, lr = 0.00226895
I0409 22:23:15.847970 25006 solver.cpp:218] Iteration 7500 (2.40814 iter/s, 4.9831s/12 iters), loss = 0.24482
I0409 22:23:15.848024 25006 solver.cpp:237] Train net output #0: loss = 0.24482 (* 1 = 0.24482 loss)
I0409 22:23:15.848040 25006 sgd_solver.cpp:105] Iteration 7500, lr = 0.00226357
I0409 22:23:20.871194 25006 solver.cpp:218] Iteration 7512 (2.38903 iter/s, 5.02295s/12 iters), loss = 0.213118
I0409 22:23:20.871254 25006 solver.cpp:237] Train net output #0: loss = 0.213118 (* 1 = 0.213118 loss)
I0409 22:23:20.871266 25006 sgd_solver.cpp:105] Iteration 7512, lr = 0.00225819
I0409 22:23:26.281222 25006 solver.cpp:218] Iteration 7524 (2.21822 iter/s, 5.40973s/12 iters), loss = 0.196505
I0409 22:23:26.281284 25006 solver.cpp:237] Train net output #0: loss = 0.196505 (* 1 = 0.196505 loss)
I0409 22:23:26.281296 25006 sgd_solver.cpp:105] Iteration 7524, lr = 0.00225283
I0409 22:23:31.282506 25006 solver.cpp:218] Iteration 7536 (2.39952 iter/s, 5.001s/12 iters), loss = 0.166933
I0409 22:23:31.282656 25006 solver.cpp:237] Train net output #0: loss = 0.166933 (* 1 = 0.166933 loss)
I0409 22:23:31.282670 25006 sgd_solver.cpp:105] Iteration 7536, lr = 0.00224748
I0409 22:23:35.783387 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7548.caffemodel
I0409 22:23:41.299964 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7548.solverstate
I0409 22:23:49.025219 25006 solver.cpp:330] Iteration 7548, Testing net (#0)
I0409 22:23:49.025247 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:23:50.536271 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:23:53.494583 25006 solver.cpp:397] Test net output #0: accuracy = 0.264706
I0409 22:23:53.494621 25006 solver.cpp:397] Test net output #1: loss = 5.4741 (* 1 = 5.4741 loss)
I0409 22:23:53.580976 25006 solver.cpp:218] Iteration 7548 (0.538179 iter/s, 22.2974s/12 iters), loss = 0.0544058
I0409 22:23:53.581023 25006 solver.cpp:237] Train net output #0: loss = 0.0544057 (* 1 = 0.0544057 loss)
I0409 22:23:53.581033 25006 sgd_solver.cpp:105] Iteration 7548, lr = 0.00224215
I0409 22:23:57.709173 25006 solver.cpp:218] Iteration 7560 (2.907 iter/s, 4.12797s/12 iters), loss = 0.219532
I0409 22:23:57.709229 25006 solver.cpp:237] Train net output #0: loss = 0.219532 (* 1 = 0.219532 loss)
I0409 22:23:57.709241 25006 sgd_solver.cpp:105] Iteration 7560, lr = 0.00223682
I0409 22:24:02.789305 25006 solver.cpp:218] Iteration 7572 (2.36227 iter/s, 5.07986s/12 iters), loss = 0.102993
I0409 22:24:02.789419 25006 solver.cpp:237] Train net output #0: loss = 0.102993 (* 1 = 0.102993 loss)
I0409 22:24:02.789431 25006 sgd_solver.cpp:105] Iteration 7572, lr = 0.00223151
I0409 22:24:07.842160 25006 solver.cpp:218] Iteration 7584 (2.37505 iter/s, 5.05252s/12 iters), loss = 0.153495
I0409 22:24:07.842216 25006 solver.cpp:237] Train net output #0: loss = 0.153495 (* 1 = 0.153495 loss)
I0409 22:24:07.842228 25006 sgd_solver.cpp:105] Iteration 7584, lr = 0.00222621
I0409 22:24:08.512398 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:24:12.918182 25006 solver.cpp:218] Iteration 7596 (2.36419 iter/s, 5.07574s/12 iters), loss = 0.208799
I0409 22:24:12.918239 25006 solver.cpp:237] Train net output #0: loss = 0.208799 (* 1 = 0.208799 loss)
I0409 22:24:12.918251 25006 sgd_solver.cpp:105] Iteration 7596, lr = 0.00222093
I0409 22:24:17.927441 25006 solver.cpp:218] Iteration 7608 (2.39569 iter/s, 5.00899s/12 iters), loss = 0.117384
I0409 22:24:17.927493 25006 solver.cpp:237] Train net output #0: loss = 0.117384 (* 1 = 0.117384 loss)
I0409 22:24:17.927506 25006 sgd_solver.cpp:105] Iteration 7608, lr = 0.00221565
I0409 22:24:22.947443 25006 solver.cpp:218] Iteration 7620 (2.39057 iter/s, 5.01973s/12 iters), loss = 0.121898
I0409 22:24:22.947499 25006 solver.cpp:237] Train net output #0: loss = 0.121898 (* 1 = 0.121898 loss)
I0409 22:24:22.947511 25006 sgd_solver.cpp:105] Iteration 7620, lr = 0.00221039
I0409 22:24:25.369743 25006 blocking_queue.cpp:49] Waiting for data
I0409 22:24:27.945948 25006 solver.cpp:218] Iteration 7632 (2.40085 iter/s, 4.99823s/12 iters), loss = 0.385517
I0409 22:24:27.946013 25006 solver.cpp:237] Train net output #0: loss = 0.385517 (* 1 = 0.385517 loss)
I0409 22:24:27.946025 25006 sgd_solver.cpp:105] Iteration 7632, lr = 0.00220515
I0409 22:24:33.025100 25006 solver.cpp:218] Iteration 7644 (2.36273 iter/s, 5.07887s/12 iters), loss = 0.186265
I0409 22:24:33.025254 25006 solver.cpp:237] Train net output #0: loss = 0.186265 (* 1 = 0.186265 loss)
I0409 22:24:33.025266 25006 sgd_solver.cpp:105] Iteration 7644, lr = 0.00219991
I0409 22:24:35.061849 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7650.caffemodel
I0409 22:24:37.842402 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7650.solverstate
I0409 22:24:39.979136 25006 solver.cpp:330] Iteration 7650, Testing net (#0)
I0409 22:24:39.979163 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:24:41.456781 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:24:44.500766 25006 solver.cpp:397] Test net output #0: accuracy = 0.264093
I0409 22:24:44.500795 25006 solver.cpp:397] Test net output #1: loss = 5.49007 (* 1 = 5.49007 loss)
I0409 22:24:46.598348 25006 solver.cpp:218] Iteration 7656 (0.884139 iter/s, 13.5725s/12 iters), loss = 0.123643
I0409 22:24:46.598388 25006 solver.cpp:237] Train net output #0: loss = 0.123643 (* 1 = 0.123643 loss)
I0409 22:24:46.598398 25006 sgd_solver.cpp:105] Iteration 7656, lr = 0.00219469
I0409 22:24:51.639593 25006 solver.cpp:218] Iteration 7668 (2.38049 iter/s, 5.04098s/12 iters), loss = 0.104856
I0409 22:24:51.639650 25006 solver.cpp:237] Train net output #0: loss = 0.104856 (* 1 = 0.104856 loss)
I0409 22:24:51.639662 25006 sgd_solver.cpp:105] Iteration 7668, lr = 0.00218948
I0409 22:24:56.640771 25006 solver.cpp:218] Iteration 7680 (2.39956 iter/s, 5.00091s/12 iters), loss = 0.190801
I0409 22:24:56.640821 25006 solver.cpp:237] Train net output #0: loss = 0.190801 (* 1 = 0.190801 loss)
I0409 22:24:56.640833 25006 sgd_solver.cpp:105] Iteration 7680, lr = 0.00218428
I0409 22:24:59.387207 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:25:01.548704 25006 solver.cpp:218] Iteration 7692 (2.44515 iter/s, 4.90767s/12 iters), loss = 0.159561
I0409 22:25:01.548744 25006 solver.cpp:237] Train net output #0: loss = 0.159561 (* 1 = 0.159561 loss)
I0409 22:25:01.548753 25006 sgd_solver.cpp:105] Iteration 7692, lr = 0.00217909
I0409 22:25:06.521487 25006 solver.cpp:218] Iteration 7704 (2.41326 iter/s, 4.97252s/12 iters), loss = 0.150496
I0409 22:25:06.521606 25006 solver.cpp:237] Train net output #0: loss = 0.150496 (* 1 = 0.150496 loss)
I0409 22:25:06.521622 25006 sgd_solver.cpp:105] Iteration 7704, lr = 0.00217392
I0409 22:25:11.634289 25006 solver.cpp:218] Iteration 7716 (2.34721 iter/s, 5.11246s/12 iters), loss = 0.0554833
I0409 22:25:11.634342 25006 solver.cpp:237] Train net output #0: loss = 0.0554832 (* 1 = 0.0554832 loss)
I0409 22:25:11.634356 25006 sgd_solver.cpp:105] Iteration 7716, lr = 0.00216876
I0409 22:25:16.544306 25006 solver.cpp:218] Iteration 7728 (2.44412 iter/s, 4.90975s/12 iters), loss = 0.148269
I0409 22:25:16.544351 25006 solver.cpp:237] Train net output #0: loss = 0.148269 (* 1 = 0.148269 loss)
I0409 22:25:16.544359 25006 sgd_solver.cpp:105] Iteration 7728, lr = 0.00216361
I0409 22:25:21.544697 25006 solver.cpp:218] Iteration 7740 (2.39994 iter/s, 5.00012s/12 iters), loss = 0.263545
I0409 22:25:21.544745 25006 solver.cpp:237] Train net output #0: loss = 0.263545 (* 1 = 0.263545 loss)
I0409 22:25:21.544755 25006 sgd_solver.cpp:105] Iteration 7740, lr = 0.00215847
I0409 22:25:26.065735 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7752.caffemodel
I0409 22:25:28.308713 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7752.solverstate
I0409 22:25:29.949700 25006 solver.cpp:330] Iteration 7752, Testing net (#0)
I0409 22:25:29.949720 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:25:31.288159 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:25:34.331936 25006 solver.cpp:397] Test net output #0: accuracy = 0.259191
I0409 22:25:34.331982 25006 solver.cpp:397] Test net output #1: loss = 5.5341 (* 1 = 5.5341 loss)
I0409 22:25:34.421263 25006 solver.cpp:218] Iteration 7752 (0.931968 iter/s, 12.876s/12 iters), loss = 0.10753
I0409 22:25:34.421319 25006 solver.cpp:237] Train net output #0: loss = 0.10753 (* 1 = 0.10753 loss)
I0409 22:25:34.421332 25006 sgd_solver.cpp:105] Iteration 7752, lr = 0.00215335
I0409 22:25:38.624646 25006 solver.cpp:218] Iteration 7764 (2.85501 iter/s, 4.20314s/12 iters), loss = 0.0954403
I0409 22:25:38.624920 25006 solver.cpp:237] Train net output #0: loss = 0.0954403 (* 1 = 0.0954403 loss)
I0409 22:25:38.624938 25006 sgd_solver.cpp:105] Iteration 7764, lr = 0.00214823
I0409 22:25:43.575222 25006 solver.cpp:218] Iteration 7776 (2.4242 iter/s, 4.95009s/12 iters), loss = 0.132922
I0409 22:25:43.575280 25006 solver.cpp:237] Train net output #0: loss = 0.132922 (* 1 = 0.132922 loss)
I0409 22:25:43.575292 25006 sgd_solver.cpp:105] Iteration 7776, lr = 0.00214313
I0409 22:25:48.518172 25006 solver.cpp:218] Iteration 7788 (2.42783 iter/s, 4.94268s/12 iters), loss = 0.122836
I0409 22:25:48.518221 25006 solver.cpp:237] Train net output #0: loss = 0.122836 (* 1 = 0.122836 loss)
I0409 22:25:48.518234 25006 sgd_solver.cpp:105] Iteration 7788, lr = 0.00213805
I0409 22:25:48.526273 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:25:53.452464 25006 solver.cpp:218] Iteration 7800 (2.43209 iter/s, 4.93403s/12 iters), loss = 0.109027
I0409 22:25:53.452515 25006 solver.cpp:237] Train net output #0: loss = 0.109027 (* 1 = 0.109027 loss)
I0409 22:25:53.452526 25006 sgd_solver.cpp:105] Iteration 7800, lr = 0.00213297
I0409 22:25:58.669173 25006 solver.cpp:218] Iteration 7812 (2.30042 iter/s, 5.21643s/12 iters), loss = 0.13241
I0409 22:25:58.669226 25006 solver.cpp:237] Train net output #0: loss = 0.13241 (* 1 = 0.13241 loss)
I0409 22:25:58.669241 25006 sgd_solver.cpp:105] Iteration 7812, lr = 0.00212791
I0409 22:26:03.754459 25006 solver.cpp:218] Iteration 7824 (2.35988 iter/s, 5.085s/12 iters), loss = 0.162315
I0409 22:26:03.754528 25006 solver.cpp:237] Train net output #0: loss = 0.162315 (* 1 = 0.162315 loss)
I0409 22:26:03.754544 25006 sgd_solver.cpp:105] Iteration 7824, lr = 0.00212285
I0409 22:26:08.642338 25006 solver.cpp:218] Iteration 7836 (2.45519 iter/s, 4.88761s/12 iters), loss = 0.163715
I0409 22:26:08.642424 25006 solver.cpp:237] Train net output #0: loss = 0.163715 (* 1 = 0.163715 loss)
I0409 22:26:08.642439 25006 sgd_solver.cpp:105] Iteration 7836, lr = 0.00211781
I0409 22:26:13.734377 25006 solver.cpp:218] Iteration 7848 (2.35676 iter/s, 5.09173s/12 iters), loss = 0.0893708
I0409 22:26:13.734426 25006 solver.cpp:237] Train net output #0: loss = 0.0893707 (* 1 = 0.0893707 loss)
I0409 22:26:13.734436 25006 sgd_solver.cpp:105] Iteration 7848, lr = 0.00211279
I0409 22:26:15.809290 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7854.caffemodel
I0409 22:26:19.343690 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7854.solverstate
I0409 22:26:24.082123 25006 solver.cpp:330] Iteration 7854, Testing net (#0)
I0409 22:26:24.082152 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:26:25.473237 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:26:28.560279 25006 solver.cpp:397] Test net output #0: accuracy = 0.264706
I0409 22:26:28.560329 25006 solver.cpp:397] Test net output #1: loss = 5.58246 (* 1 = 5.58246 loss)
I0409 22:26:30.456111 25006 solver.cpp:218] Iteration 7860 (0.717661 iter/s, 16.721s/12 iters), loss = 0.197325
I0409 22:26:30.456164 25006 solver.cpp:237] Train net output #0: loss = 0.197325 (* 1 = 0.197325 loss)
I0409 22:26:30.456176 25006 sgd_solver.cpp:105] Iteration 7860, lr = 0.00210777
I0409 22:26:35.601157 25006 solver.cpp:218] Iteration 7872 (2.33247 iter/s, 5.14477s/12 iters), loss = 0.142038
I0409 22:26:35.601202 25006 solver.cpp:237] Train net output #0: loss = 0.142038 (* 1 = 0.142038 loss)
I0409 22:26:35.601212 25006 sgd_solver.cpp:105] Iteration 7872, lr = 0.00210277
I0409 22:26:40.721989 25006 solver.cpp:218] Iteration 7884 (2.34349 iter/s, 5.12056s/12 iters), loss = 0.129263
I0409 22:26:40.722151 25006 solver.cpp:237] Train net output #0: loss = 0.129263 (* 1 = 0.129263 loss)
I0409 22:26:40.722167 25006 sgd_solver.cpp:105] Iteration 7884, lr = 0.00209777
I0409 22:26:42.891894 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:26:45.760021 25006 solver.cpp:218] Iteration 7896 (2.38206 iter/s, 5.03765s/12 iters), loss = 0.114191
I0409 22:26:45.760078 25006 solver.cpp:237] Train net output #0: loss = 0.114191 (* 1 = 0.114191 loss)
I0409 22:26:45.760090 25006 sgd_solver.cpp:105] Iteration 7896, lr = 0.00209279
I0409 22:26:50.764472 25006 solver.cpp:218] Iteration 7908 (2.398 iter/s, 5.00418s/12 iters), loss = 0.147705
I0409 22:26:50.764513 25006 solver.cpp:237] Train net output #0: loss = 0.147705 (* 1 = 0.147705 loss)
I0409 22:26:50.764520 25006 sgd_solver.cpp:105] Iteration 7908, lr = 0.00208782
I0409 22:26:55.741420 25006 solver.cpp:218] Iteration 7920 (2.41124 iter/s, 4.97669s/12 iters), loss = 0.227778
I0409 22:26:55.741473 25006 solver.cpp:237] Train net output #0: loss = 0.227778 (* 1 = 0.227778 loss)
I0409 22:26:55.741487 25006 sgd_solver.cpp:105] Iteration 7920, lr = 0.00208287
I0409 22:27:00.827502 25006 solver.cpp:218] Iteration 7932 (2.35951 iter/s, 5.0858s/12 iters), loss = 0.167895
I0409 22:27:00.827550 25006 solver.cpp:237] Train net output #0: loss = 0.167895 (* 1 = 0.167895 loss)
I0409 22:27:00.827560 25006 sgd_solver.cpp:105] Iteration 7932, lr = 0.00207792
I0409 22:27:05.814695 25006 solver.cpp:218] Iteration 7944 (2.40629 iter/s, 4.98693s/12 iters), loss = 0.0915093
I0409 22:27:05.814750 25006 solver.cpp:237] Train net output #0: loss = 0.0915093 (* 1 = 0.0915093 loss)
I0409 22:27:05.814764 25006 sgd_solver.cpp:105] Iteration 7944, lr = 0.00207299
I0409 22:27:10.273592 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_7956.caffemodel
I0409 22:27:13.462688 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_7956.solverstate
I0409 22:27:16.726069 25006 solver.cpp:330] Iteration 7956, Testing net (#0)
I0409 22:27:16.726095 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:27:18.061125 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:27:21.227764 25006 solver.cpp:397] Test net output #0: accuracy = 0.259191
I0409 22:27:21.227810 25006 solver.cpp:397] Test net output #1: loss = 5.59956 (* 1 = 5.59956 loss)
I0409 22:27:21.316900 25006 solver.cpp:218] Iteration 7956 (0.774118 iter/s, 15.5015s/12 iters), loss = 0.0693965
I0409 22:27:21.316942 25006 solver.cpp:237] Train net output #0: loss = 0.0693964 (* 1 = 0.0693964 loss)
I0409 22:27:21.316952 25006 sgd_solver.cpp:105] Iteration 7956, lr = 0.00206807
I0409 22:27:25.715620 25006 solver.cpp:218] Iteration 7968 (2.72821 iter/s, 4.39848s/12 iters), loss = 0.0532126
I0409 22:27:25.715672 25006 solver.cpp:237] Train net output #0: loss = 0.0532125 (* 1 = 0.0532125 loss)
I0409 22:27:25.715684 25006 sgd_solver.cpp:105] Iteration 7968, lr = 0.00206316
I0409 22:27:30.726195 25006 solver.cpp:218] Iteration 7980 (2.39506 iter/s, 5.01031s/12 iters), loss = 0.107301
I0409 22:27:30.726243 25006 solver.cpp:237] Train net output #0: loss = 0.107301 (* 1 = 0.107301 loss)
I0409 22:27:30.726253 25006 sgd_solver.cpp:105] Iteration 7980, lr = 0.00205826
I0409 22:27:35.056847 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:27:35.774751 25006 solver.cpp:218] Iteration 7992 (2.37704 iter/s, 5.04829s/12 iters), loss = 0.175177
I0409 22:27:35.774787 25006 solver.cpp:237] Train net output #0: loss = 0.175177 (* 1 = 0.175177 loss)
I0409 22:27:35.774797 25006 sgd_solver.cpp:105] Iteration 7992, lr = 0.00205337
I0409 22:27:40.770052 25006 solver.cpp:218] Iteration 8004 (2.40238 iter/s, 4.99505s/12 iters), loss = 0.165921
I0409 22:27:40.770099 25006 solver.cpp:237] Train net output #0: loss = 0.165921 (* 1 = 0.165921 loss)
I0409 22:27:40.770108 25006 sgd_solver.cpp:105] Iteration 8004, lr = 0.0020485
I0409 22:27:45.743018 25006 solver.cpp:218] Iteration 8016 (2.41317 iter/s, 4.9727s/12 iters), loss = 0.223039
I0409 22:27:45.746313 25006 solver.cpp:237] Train net output #0: loss = 0.223039 (* 1 = 0.223039 loss)
I0409 22:27:45.746323 25006 sgd_solver.cpp:105] Iteration 8016, lr = 0.00204363
I0409 22:27:50.689460 25006 solver.cpp:218] Iteration 8028 (2.42771 iter/s, 4.94293s/12 iters), loss = 0.0675286
I0409 22:27:50.689510 25006 solver.cpp:237] Train net output #0: loss = 0.0675285 (* 1 = 0.0675285 loss)
I0409 22:27:50.689523 25006 sgd_solver.cpp:105] Iteration 8028, lr = 0.00203878
I0409 22:27:55.727442 25006 solver.cpp:218] Iteration 8040 (2.38203 iter/s, 5.03772s/12 iters), loss = 0.0867825
I0409 22:27:55.727486 25006 solver.cpp:237] Train net output #0: loss = 0.0867824 (* 1 = 0.0867824 loss)
I0409 22:27:55.727495 25006 sgd_solver.cpp:105] Iteration 8040, lr = 0.00203394
I0409 22:28:00.771488 25006 solver.cpp:218] Iteration 8052 (2.37917 iter/s, 5.04378s/12 iters), loss = 0.102509
I0409 22:28:00.771538 25006 solver.cpp:237] Train net output #0: loss = 0.102509 (* 1 = 0.102509 loss)
I0409 22:28:00.771548 25006 sgd_solver.cpp:105] Iteration 8052, lr = 0.00202911
I0409 22:28:02.819327 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8058.caffemodel
I0409 22:28:04.989104 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8058.solverstate
I0409 22:28:06.612519 25006 solver.cpp:330] Iteration 8058, Testing net (#0)
I0409 22:28:06.612545 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:28:07.914877 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:28:11.104678 25006 solver.cpp:397] Test net output #0: accuracy = 0.273284
I0409 22:28:11.104725 25006 solver.cpp:397] Test net output #1: loss = 5.47915 (* 1 = 5.47915 loss)
I0409 22:28:13.127261 25006 solver.cpp:218] Iteration 8064 (0.97125 iter/s, 12.3552s/12 iters), loss = 0.198315
I0409 22:28:13.127310 25006 solver.cpp:237] Train net output #0: loss = 0.198315 (* 1 = 0.198315 loss)
I0409 22:28:13.127321 25006 sgd_solver.cpp:105] Iteration 8064, lr = 0.00202429
I0409 22:28:18.238191 25006 solver.cpp:218] Iteration 8076 (2.34804 iter/s, 5.11065s/12 iters), loss = 0.095023
I0409 22:28:18.238312 25006 solver.cpp:237] Train net output #0: loss = 0.0950229 (* 1 = 0.0950229 loss)
I0409 22:28:18.238324 25006 sgd_solver.cpp:105] Iteration 8076, lr = 0.00201949
I0409 22:28:23.313290 25006 solver.cpp:218] Iteration 8088 (2.36464 iter/s, 5.07476s/12 iters), loss = 0.15391
I0409 22:28:23.313341 25006 solver.cpp:237] Train net output #0: loss = 0.15391 (* 1 = 0.15391 loss)
I0409 22:28:23.313354 25006 sgd_solver.cpp:105] Iteration 8088, lr = 0.00201469
I0409 22:28:24.706972 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:28:28.254933 25006 solver.cpp:218] Iteration 8100 (2.42847 iter/s, 4.94138s/12 iters), loss = 0.0865381
I0409 22:28:28.254987 25006 solver.cpp:237] Train net output #0: loss = 0.086538 (* 1 = 0.086538 loss)
I0409 22:28:28.254998 25006 sgd_solver.cpp:105] Iteration 8100, lr = 0.00200991
I0409 22:28:33.220472 25006 solver.cpp:218] Iteration 8112 (2.41679 iter/s, 4.96526s/12 iters), loss = 0.0684363
I0409 22:28:33.220525 25006 solver.cpp:237] Train net output #0: loss = 0.0684362 (* 1 = 0.0684362 loss)
I0409 22:28:33.220538 25006 sgd_solver.cpp:105] Iteration 8112, lr = 0.00200514
I0409 22:28:38.232287 25006 solver.cpp:218] Iteration 8124 (2.39447 iter/s, 5.01154s/12 iters), loss = 0.262834
I0409 22:28:38.232343 25006 solver.cpp:237] Train net output #0: loss = 0.262834 (* 1 = 0.262834 loss)
I0409 22:28:38.232357 25006 sgd_solver.cpp:105] Iteration 8124, lr = 0.00200038
I0409 22:28:43.188396 25006 solver.cpp:218] Iteration 8136 (2.42139 iter/s, 4.95584s/12 iters), loss = 0.190233
I0409 22:28:43.188454 25006 solver.cpp:237] Train net output #0: loss = 0.190233 (* 1 = 0.190233 loss)
I0409 22:28:43.188467 25006 sgd_solver.cpp:105] Iteration 8136, lr = 0.00199563
I0409 22:28:48.165989 25006 solver.cpp:218] Iteration 8148 (2.41094 iter/s, 4.97732s/12 iters), loss = 0.0765678
I0409 22:28:48.166040 25006 solver.cpp:237] Train net output #0: loss = 0.0765677 (* 1 = 0.0765677 loss)
I0409 22:28:48.166054 25006 sgd_solver.cpp:105] Iteration 8148, lr = 0.00199089
I0409 22:28:52.666182 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8160.caffemodel
I0409 22:28:56.159936 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8160.solverstate
I0409 22:28:58.332671 25006 solver.cpp:330] Iteration 8160, Testing net (#0)
I0409 22:28:58.332700 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:28:59.611166 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:29:02.966758 25006 solver.cpp:397] Test net output #0: accuracy = 0.275735
I0409 22:29:02.966795 25006 solver.cpp:397] Test net output #1: loss = 5.46209 (* 1 = 5.46209 loss)
I0409 22:29:03.055830 25006 solver.cpp:218] Iteration 8160 (0.805954 iter/s, 14.8892s/12 iters), loss = 0.0725834
I0409 22:29:03.055879 25006 solver.cpp:237] Train net output #0: loss = 0.0725833 (* 1 = 0.0725833 loss)
I0409 22:29:03.055889 25006 sgd_solver.cpp:105] Iteration 8160, lr = 0.00198616
I0409 22:29:07.290452 25006 solver.cpp:218] Iteration 8172 (2.83394 iter/s, 4.23439s/12 iters), loss = 0.154105
I0409 22:29:07.290504 25006 solver.cpp:237] Train net output #0: loss = 0.154105 (* 1 = 0.154105 loss)
I0409 22:29:07.290516 25006 sgd_solver.cpp:105] Iteration 8172, lr = 0.00198145
I0409 22:29:12.293926 25006 solver.cpp:218] Iteration 8184 (2.39846 iter/s, 5.00321s/12 iters), loss = 0.109582
I0409 22:29:12.293992 25006 solver.cpp:237] Train net output #0: loss = 0.109582 (* 1 = 0.109582 loss)
I0409 22:29:12.294003 25006 sgd_solver.cpp:105] Iteration 8184, lr = 0.00197674
I0409 22:29:15.809031 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:29:17.253443 25006 solver.cpp:218] Iteration 8196 (2.41973 iter/s, 4.95923s/12 iters), loss = 0.170893
I0409 22:29:17.253502 25006 solver.cpp:237] Train net output #0: loss = 0.170893 (* 1 = 0.170893 loss)
I0409 22:29:17.253515 25006 sgd_solver.cpp:105] Iteration 8196, lr = 0.00197205
I0409 22:29:22.325997 25006 solver.cpp:218] Iteration 8208 (2.3658 iter/s, 5.07227s/12 iters), loss = 0.126687
I0409 22:29:22.326042 25006 solver.cpp:237] Train net output #0: loss = 0.126687 (* 1 = 0.126687 loss)
I0409 22:29:22.326051 25006 sgd_solver.cpp:105] Iteration 8208, lr = 0.00196737
I0409 22:29:27.435523 25006 solver.cpp:218] Iteration 8220 (2.34867 iter/s, 5.10927s/12 iters), loss = 0.103114
I0409 22:29:27.435631 25006 solver.cpp:237] Train net output #0: loss = 0.103114 (* 1 = 0.103114 loss)
I0409 22:29:27.435643 25006 sgd_solver.cpp:105] Iteration 8220, lr = 0.0019627
I0409 22:29:32.404386 25006 solver.cpp:218] Iteration 8232 (2.41519 iter/s, 4.96854s/12 iters), loss = 0.102497
I0409 22:29:32.404439 25006 solver.cpp:237] Train net output #0: loss = 0.102496 (* 1 = 0.102496 loss)
I0409 22:29:32.404453 25006 sgd_solver.cpp:105] Iteration 8232, lr = 0.00195804
I0409 22:29:37.441541 25006 solver.cpp:218] Iteration 8244 (2.38242 iter/s, 5.03689s/12 iters), loss = 0.0337062
I0409 22:29:37.441589 25006 solver.cpp:237] Train net output #0: loss = 0.0337061 (* 1 = 0.0337061 loss)
I0409 22:29:37.441604 25006 sgd_solver.cpp:105] Iteration 8244, lr = 0.00195339
I0409 22:29:42.465689 25006 solver.cpp:218] Iteration 8256 (2.38859 iter/s, 5.02389s/12 iters), loss = 0.0535846
I0409 22:29:42.465741 25006 solver.cpp:237] Train net output #0: loss = 0.0535845 (* 1 = 0.0535845 loss)
I0409 22:29:42.465754 25006 sgd_solver.cpp:105] Iteration 8256, lr = 0.00194875
I0409 22:29:44.460752 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8262.caffemodel
I0409 22:29:52.763303 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8262.solverstate
I0409 22:29:56.045686 25006 solver.cpp:330] Iteration 8262, Testing net (#0)
I0409 22:29:56.045714 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:29:57.287473 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:30:00.517707 25006 solver.cpp:397] Test net output #0: accuracy = 0.269608
I0409 22:30:00.517793 25006 solver.cpp:397] Test net output #1: loss = 5.6053 (* 1 = 5.6053 loss)
I0409 22:30:02.336163 25006 solver.cpp:218] Iteration 8268 (0.603937 iter/s, 19.8696s/12 iters), loss = 0.0726525
I0409 22:30:02.336223 25006 solver.cpp:237] Train net output #0: loss = 0.0726524 (* 1 = 0.0726524 loss)
I0409 22:30:02.336236 25006 sgd_solver.cpp:105] Iteration 8268, lr = 0.00194412
I0409 22:30:07.322248 25006 solver.cpp:218] Iteration 8280 (2.40683 iter/s, 4.98581s/12 iters), loss = 0.0610484
I0409 22:30:07.322300 25006 solver.cpp:237] Train net output #0: loss = 0.0610483 (* 1 = 0.0610483 loss)
I0409 22:30:07.322312 25006 sgd_solver.cpp:105] Iteration 8280, lr = 0.00193951
I0409 22:30:12.715095 25006 solver.cpp:218] Iteration 8292 (2.22529 iter/s, 5.39256s/12 iters), loss = 0.0686685
I0409 22:30:12.715144 25006 solver.cpp:237] Train net output #0: loss = 0.0686684 (* 1 = 0.0686684 loss)
I0409 22:30:12.715154 25006 sgd_solver.cpp:105] Iteration 8292, lr = 0.0019349
I0409 22:30:13.407466 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:30:17.711967 25006 solver.cpp:218] Iteration 8304 (2.40163 iter/s, 4.99661s/12 iters), loss = 0.100962
I0409 22:30:17.712024 25006 solver.cpp:237] Train net output #0: loss = 0.100962 (* 1 = 0.100962 loss)
I0409 22:30:17.712038 25006 sgd_solver.cpp:105] Iteration 8304, lr = 0.00193031
I0409 22:30:20.604032 25006 blocking_queue.cpp:49] Waiting for data
I0409 22:30:22.753089 25006 solver.cpp:218] Iteration 8316 (2.38055 iter/s, 5.04085s/12 iters), loss = 0.127197
I0409 22:30:22.753131 25006 solver.cpp:237] Train net output #0: loss = 0.127197 (* 1 = 0.127197 loss)
I0409 22:30:22.753141 25006 sgd_solver.cpp:105] Iteration 8316, lr = 0.00192573
I0409 22:30:27.782142 25006 solver.cpp:218] Iteration 8328 (2.38626 iter/s, 5.02879s/12 iters), loss = 0.0933901
I0409 22:30:27.782200 25006 solver.cpp:237] Train net output #0: loss = 0.0933901 (* 1 = 0.0933901 loss)
I0409 22:30:27.782213 25006 sgd_solver.cpp:105] Iteration 8328, lr = 0.00192115
I0409 22:30:32.893409 25006 solver.cpp:218] Iteration 8340 (2.34788 iter/s, 5.11099s/12 iters), loss = 0.20673
I0409 22:30:32.893523 25006 solver.cpp:237] Train net output #0: loss = 0.20673 (* 1 = 0.20673 loss)
I0409 22:30:32.893537 25006 sgd_solver.cpp:105] Iteration 8340, lr = 0.00191659
I0409 22:30:37.946802 25006 solver.cpp:218] Iteration 8352 (2.3748 iter/s, 5.05306s/12 iters), loss = 0.0904284
I0409 22:30:37.946853 25006 solver.cpp:237] Train net output #0: loss = 0.0904283 (* 1 = 0.0904283 loss)
I0409 22:30:37.946864 25006 sgd_solver.cpp:105] Iteration 8352, lr = 0.00191204
I0409 22:30:42.496157 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8364.caffemodel
I0409 22:30:53.133888 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8364.solverstate
I0409 22:30:59.174036 25006 solver.cpp:330] Iteration 8364, Testing net (#0)
I0409 22:30:59.174065 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:31:00.366698 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:31:03.641446 25006 solver.cpp:397] Test net output #0: accuracy = 0.268995
I0409 22:31:03.641585 25006 solver.cpp:397] Test net output #1: loss = 5.64263 (* 1 = 5.64263 loss)
I0409 22:31:03.731865 25006 solver.cpp:218] Iteration 8364 (0.465406 iter/s, 25.784s/12 iters), loss = 0.204289
I0409 22:31:03.731923 25006 solver.cpp:237] Train net output #0: loss = 0.204289 (* 1 = 0.204289 loss)
I0409 22:31:03.731936 25006 sgd_solver.cpp:105] Iteration 8364, lr = 0.0019075
I0409 22:31:07.926120 25006 solver.cpp:218] Iteration 8376 (2.86122 iter/s, 4.19402s/12 iters), loss = 0.128588
I0409 22:31:07.926163 25006 solver.cpp:237] Train net output #0: loss = 0.128588 (* 1 = 0.128588 loss)
I0409 22:31:07.926175 25006 sgd_solver.cpp:105] Iteration 8376, lr = 0.00190297
I0409 22:31:12.923017 25006 solver.cpp:218] Iteration 8388 (2.40161 iter/s, 4.99664s/12 iters), loss = 0.0520522
I0409 22:31:12.923061 25006 solver.cpp:237] Train net output #0: loss = 0.0520521 (* 1 = 0.0520521 loss)
I0409 22:31:12.923075 25006 sgd_solver.cpp:105] Iteration 8388, lr = 0.00189846
I0409 22:31:15.735119 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:31:17.917862 25006 solver.cpp:218] Iteration 8400 (2.4026 iter/s, 4.99458s/12 iters), loss = 0.0763835
I0409 22:31:17.917918 25006 solver.cpp:237] Train net output #0: loss = 0.0763834 (* 1 = 0.0763834 loss)
I0409 22:31:17.917932 25006 sgd_solver.cpp:105] Iteration 8400, lr = 0.00189395
I0409 22:31:22.980053 25006 solver.cpp:218] Iteration 8412 (2.37065 iter/s, 5.06191s/12 iters), loss = 0.118711
I0409 22:31:22.980106 25006 solver.cpp:237] Train net output #0: loss = 0.118711 (* 1 = 0.118711 loss)
I0409 22:31:22.980118 25006 sgd_solver.cpp:105] Iteration 8412, lr = 0.00188945
I0409 22:31:27.983006 25006 solver.cpp:218] Iteration 8424 (2.39871 iter/s, 5.00269s/12 iters), loss = 0.107819
I0409 22:31:27.983057 25006 solver.cpp:237] Train net output #0: loss = 0.107819 (* 1 = 0.107819 loss)
I0409 22:31:27.983068 25006 sgd_solver.cpp:105] Iteration 8424, lr = 0.00188497
I0409 22:31:32.986285 25006 solver.cpp:218] Iteration 8436 (2.39856 iter/s, 5.00301s/12 iters), loss = 0.0934219
I0409 22:31:32.986344 25006 solver.cpp:237] Train net output #0: loss = 0.0934218 (* 1 = 0.0934218 loss)
I0409 22:31:32.986357 25006 sgd_solver.cpp:105] Iteration 8436, lr = 0.00188049
I0409 22:31:38.051975 25006 solver.cpp:218] Iteration 8448 (2.36901 iter/s, 5.06541s/12 iters), loss = 0.063605
I0409 22:31:38.052135 25006 solver.cpp:237] Train net output #0: loss = 0.0636049 (* 1 = 0.0636049 loss)
I0409 22:31:38.052150 25006 sgd_solver.cpp:105] Iteration 8448, lr = 0.00187603
I0409 22:31:43.023206 25006 solver.cpp:218] Iteration 8460 (2.41407 iter/s, 4.97086s/12 iters), loss = 0.0520086
I0409 22:31:43.023258 25006 solver.cpp:237] Train net output #0: loss = 0.0520084 (* 1 = 0.0520084 loss)
I0409 22:31:43.023269 25006 sgd_solver.cpp:105] Iteration 8460, lr = 0.00187157
I0409 22:31:45.042810 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8466.caffemodel
I0409 22:31:48.788431 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8466.solverstate
I0409 22:31:52.394838 25006 solver.cpp:330] Iteration 8466, Testing net (#0)
I0409 22:31:52.394865 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:31:53.560420 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:31:56.965242 25006 solver.cpp:397] Test net output #0: accuracy = 0.265319
I0409 22:31:56.965281 25006 solver.cpp:397] Test net output #1: loss = 5.73137 (* 1 = 5.73137 loss)
I0409 22:31:58.908481 25006 solver.cpp:218] Iteration 8472 (0.75545 iter/s, 15.8846s/12 iters), loss = 0.121386
I0409 22:31:58.908540 25006 solver.cpp:237] Train net output #0: loss = 0.121386 (* 1 = 0.121386 loss)
I0409 22:31:58.908555 25006 sgd_solver.cpp:105] Iteration 8472, lr = 0.00186713
I0409 22:32:04.062157 25006 solver.cpp:218] Iteration 8484 (2.32856 iter/s, 5.15339s/12 iters), loss = 0.0533975
I0409 22:32:04.062206 25006 solver.cpp:237] Train net output #0: loss = 0.0533974 (* 1 = 0.0533974 loss)
I0409 22:32:04.062216 25006 sgd_solver.cpp:105] Iteration 8484, lr = 0.0018627
I0409 22:32:09.060981 25006 solver.cpp:218] Iteration 8496 (2.40069 iter/s, 4.99856s/12 iters), loss = 0.0396055
I0409 22:32:09.061100 25006 solver.cpp:237] Train net output #0: loss = 0.0396054 (* 1 = 0.0396054 loss)
I0409 22:32:09.061112 25006 sgd_solver.cpp:105] Iteration 8496, lr = 0.00185827
I0409 22:32:09.099678 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:32:14.159909 25006 solver.cpp:218] Iteration 8508 (2.35359 iter/s, 5.0986s/12 iters), loss = 0.125496
I0409 22:32:14.159948 25006 solver.cpp:237] Train net output #0: loss = 0.125496 (* 1 = 0.125496 loss)
I0409 22:32:14.159957 25006 sgd_solver.cpp:105] Iteration 8508, lr = 0.00185386
I0409 22:32:19.111263 25006 solver.cpp:218] Iteration 8520 (2.4237 iter/s, 4.9511s/12 iters), loss = 0.177266
I0409 22:32:19.111307 25006 solver.cpp:237] Train net output #0: loss = 0.177266 (* 1 = 0.177266 loss)
I0409 22:32:19.111317 25006 sgd_solver.cpp:105] Iteration 8520, lr = 0.00184946
I0409 22:32:24.047276 25006 solver.cpp:218] Iteration 8532 (2.43124 iter/s, 4.93575s/12 iters), loss = 0.0815766
I0409 22:32:24.047330 25006 solver.cpp:237] Train net output #0: loss = 0.0815765 (* 1 = 0.0815765 loss)
I0409 22:32:24.047343 25006 sgd_solver.cpp:105] Iteration 8532, lr = 0.00184507
I0409 22:32:28.957152 25006 solver.cpp:218] Iteration 8544 (2.44418 iter/s, 4.90961s/12 iters), loss = 0.0694151
I0409 22:32:28.957203 25006 solver.cpp:237] Train net output #0: loss = 0.069415 (* 1 = 0.069415 loss)
I0409 22:32:28.957216 25006 sgd_solver.cpp:105] Iteration 8544, lr = 0.00184069
I0409 22:32:33.875056 25006 solver.cpp:218] Iteration 8556 (2.44019 iter/s, 4.91764s/12 iters), loss = 0.249156
I0409 22:32:33.875097 25006 solver.cpp:237] Train net output #0: loss = 0.249156 (* 1 = 0.249156 loss)
I0409 22:32:33.875106 25006 sgd_solver.cpp:105] Iteration 8556, lr = 0.00183632
I0409 22:32:38.463277 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8568.caffemodel
I0409 22:32:40.641719 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8568.solverstate
I0409 22:32:42.287585 25006 solver.cpp:330] Iteration 8568, Testing net (#0)
I0409 22:32:42.287611 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:32:43.500624 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:32:46.864248 25006 solver.cpp:397] Test net output #0: accuracy = 0.268995
I0409 22:32:46.864289 25006 solver.cpp:397] Test net output #1: loss = 5.59396 (* 1 = 5.59396 loss)
I0409 22:32:46.953352 25006 solver.cpp:218] Iteration 8568 (0.917592 iter/s, 13.0777s/12 iters), loss = 0.152108
I0409 22:32:46.953404 25006 solver.cpp:237] Train net output #0: loss = 0.152107 (* 1 = 0.152107 loss)
I0409 22:32:46.953414 25006 sgd_solver.cpp:105] Iteration 8568, lr = 0.00183196
I0409 22:32:51.288156 25006 solver.cpp:218] Iteration 8580 (2.76845 iter/s, 4.33456s/12 iters), loss = 0.0452824
I0409 22:32:51.288206 25006 solver.cpp:237] Train net output #0: loss = 0.0452823 (* 1 = 0.0452823 loss)
I0409 22:32:51.288218 25006 sgd_solver.cpp:105] Iteration 8580, lr = 0.00182761
I0409 22:32:56.258770 25006 solver.cpp:218] Iteration 8592 (2.41432 iter/s, 4.97035s/12 iters), loss = 0.0916002
I0409 22:32:56.258824 25006 solver.cpp:237] Train net output #0: loss = 0.0916001 (* 1 = 0.0916001 loss)
I0409 22:32:56.258836 25006 sgd_solver.cpp:105] Iteration 8592, lr = 0.00182327
I0409 22:32:58.422262 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:33:01.272084 25006 solver.cpp:218] Iteration 8604 (2.39376 iter/s, 5.01304s/12 iters), loss = 0.0483322
I0409 22:33:01.272140 25006 solver.cpp:237] Train net output #0: loss = 0.0483321 (* 1 = 0.0483321 loss)
I0409 22:33:01.272153 25006 sgd_solver.cpp:105] Iteration 8604, lr = 0.00181894
I0409 22:33:06.279192 25006 solver.cpp:218] Iteration 8616 (2.39672 iter/s, 5.00684s/12 iters), loss = 0.0571574
I0409 22:33:06.279244 25006 solver.cpp:237] Train net output #0: loss = 0.0571573 (* 1 = 0.0571573 loss)
I0409 22:33:06.279253 25006 sgd_solver.cpp:105] Iteration 8616, lr = 0.00181462
I0409 22:33:11.237346 25006 solver.cpp:218] Iteration 8628 (2.42038 iter/s, 4.95789s/12 iters), loss = 0.154568
I0409 22:33:11.237444 25006 solver.cpp:237] Train net output #0: loss = 0.154568 (* 1 = 0.154568 loss)
I0409 22:33:11.237455 25006 sgd_solver.cpp:105] Iteration 8628, lr = 0.00181031
I0409 22:33:16.296311 25006 solver.cpp:218] Iteration 8640 (2.37217 iter/s, 5.05865s/12 iters), loss = 0.0844565
I0409 22:33:16.296355 25006 solver.cpp:237] Train net output #0: loss = 0.0844564 (* 1 = 0.0844564 loss)
I0409 22:33:16.296363 25006 sgd_solver.cpp:105] Iteration 8640, lr = 0.00180602
I0409 22:33:21.296293 25006 solver.cpp:218] Iteration 8652 (2.40013 iter/s, 4.99973s/12 iters), loss = 0.0975366
I0409 22:33:21.296326 25006 solver.cpp:237] Train net output #0: loss = 0.0975365 (* 1 = 0.0975365 loss)
I0409 22:33:21.296334 25006 sgd_solver.cpp:105] Iteration 8652, lr = 0.00180173
I0409 22:33:26.326730 25006 solver.cpp:218] Iteration 8664 (2.3856 iter/s, 5.03018s/12 iters), loss = 0.0566142
I0409 22:33:26.326779 25006 solver.cpp:237] Train net output #0: loss = 0.056614 (* 1 = 0.056614 loss)
I0409 22:33:26.326790 25006 sgd_solver.cpp:105] Iteration 8664, lr = 0.00179745
I0409 22:33:28.341218 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8670.caffemodel
I0409 22:33:43.221210 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8670.solverstate
I0409 22:33:54.676036 25006 solver.cpp:330] Iteration 8670, Testing net (#0)
I0409 22:33:54.676070 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:33:55.635450 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:33:59.031178 25006 solver.cpp:397] Test net output #0: accuracy = 0.267157
I0409 22:33:59.031225 25006 solver.cpp:397] Test net output #1: loss = 5.58036 (* 1 = 5.58036 loss)
I0409 22:34:00.905823 25006 solver.cpp:218] Iteration 8676 (0.347045 iter/s, 34.5776s/12 iters), loss = 0.0780327
I0409 22:34:00.905885 25006 solver.cpp:237] Train net output #0: loss = 0.0780326 (* 1 = 0.0780326 loss)
I0409 22:34:00.905900 25006 sgd_solver.cpp:105] Iteration 8676, lr = 0.00179318
I0409 22:34:05.830796 25006 solver.cpp:218] Iteration 8688 (2.43669 iter/s, 4.92471s/12 iters), loss = 0.122942
I0409 22:34:05.830833 25006 solver.cpp:237] Train net output #0: loss = 0.122942 (* 1 = 0.122942 loss)
I0409 22:34:05.830845 25006 sgd_solver.cpp:105] Iteration 8688, lr = 0.00178893
I0409 22:34:10.102604 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:34:10.775080 25006 solver.cpp:218] Iteration 8700 (2.42717 iter/s, 4.94404s/12 iters), loss = 0.0361996
I0409 22:34:10.775128 25006 solver.cpp:237] Train net output #0: loss = 0.0361995 (* 1 = 0.0361995 loss)
I0409 22:34:10.775141 25006 sgd_solver.cpp:105] Iteration 8700, lr = 0.00178468
I0409 22:34:15.693790 25006 solver.cpp:218] Iteration 8712 (2.43979 iter/s, 4.91845s/12 iters), loss = 0.0849813
I0409 22:34:15.693913 25006 solver.cpp:237] Train net output #0: loss = 0.0849812 (* 1 = 0.0849812 loss)
I0409 22:34:15.693928 25006 sgd_solver.cpp:105] Iteration 8712, lr = 0.00178044
I0409 22:34:20.773066 25006 solver.cpp:218] Iteration 8724 (2.3627 iter/s, 5.07894s/12 iters), loss = 0.0316799
I0409 22:34:20.773113 25006 solver.cpp:237] Train net output #0: loss = 0.0316798 (* 1 = 0.0316798 loss)
I0409 22:34:20.773121 25006 sgd_solver.cpp:105] Iteration 8724, lr = 0.00177621
I0409 22:34:25.897186 25006 solver.cpp:218] Iteration 8736 (2.34199 iter/s, 5.12386s/12 iters), loss = 0.176477
I0409 22:34:25.897226 25006 solver.cpp:237] Train net output #0: loss = 0.176477 (* 1 = 0.176477 loss)
I0409 22:34:25.897235 25006 sgd_solver.cpp:105] Iteration 8736, lr = 0.001772
I0409 22:34:30.946487 25006 solver.cpp:218] Iteration 8748 (2.37669 iter/s, 5.04903s/12 iters), loss = 0.0776436
I0409 22:34:30.946545 25006 solver.cpp:237] Train net output #0: loss = 0.0776435 (* 1 = 0.0776435 loss)
I0409 22:34:30.946559 25006 sgd_solver.cpp:105] Iteration 8748, lr = 0.00176779
I0409 22:34:35.918607 25006 solver.cpp:218] Iteration 8760 (2.41359 iter/s, 4.97185s/12 iters), loss = 0.0498544
I0409 22:34:35.918655 25006 solver.cpp:237] Train net output #0: loss = 0.0498543 (* 1 = 0.0498543 loss)
I0409 22:34:35.918665 25006 sgd_solver.cpp:105] Iteration 8760, lr = 0.00176359
I0409 22:34:40.459443 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8772.caffemodel
I0409 22:34:44.229481 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8772.solverstate
I0409 22:34:46.363785 25006 solver.cpp:330] Iteration 8772, Testing net (#0)
I0409 22:34:46.363893 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:34:47.371922 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:34:50.861049 25006 solver.cpp:397] Test net output #0: accuracy = 0.261029
I0409 22:34:50.861099 25006 solver.cpp:397] Test net output #1: loss = 5.55421 (* 1 = 5.55421 loss)
I0409 22:34:50.948216 25006 solver.cpp:218] Iteration 8772 (0.79846 iter/s, 15.0289s/12 iters), loss = 0.134067
I0409 22:34:50.948269 25006 solver.cpp:237] Train net output #0: loss = 0.134067 (* 1 = 0.134067 loss)
I0409 22:34:50.948282 25006 sgd_solver.cpp:105] Iteration 8772, lr = 0.00175941
I0409 22:34:55.337785 25006 solver.cpp:218] Iteration 8784 (2.73391 iter/s, 4.38932s/12 iters), loss = 0.0572344
I0409 22:34:55.337844 25006 solver.cpp:237] Train net output #0: loss = 0.0572343 (* 1 = 0.0572343 loss)
I0409 22:34:55.337857 25006 sgd_solver.cpp:105] Iteration 8784, lr = 0.00175523
I0409 22:35:00.346782 25006 solver.cpp:218] Iteration 8796 (2.39582 iter/s, 5.00872s/12 iters), loss = 0.0550063
I0409 22:35:00.346834 25006 solver.cpp:237] Train net output #0: loss = 0.0550062 (* 1 = 0.0550062 loss)
I0409 22:35:00.346846 25006 sgd_solver.cpp:105] Iteration 8796, lr = 0.00175106
I0409 22:35:01.796448 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:35:05.460781 25006 solver.cpp:218] Iteration 8808 (2.34663 iter/s, 5.11372s/12 iters), loss = 0.0553047
I0409 22:35:05.460841 25006 solver.cpp:237] Train net output #0: loss = 0.0553046 (* 1 = 0.0553046 loss)
I0409 22:35:05.460855 25006 sgd_solver.cpp:105] Iteration 8808, lr = 0.0017469
I0409 22:35:10.541326 25006 solver.cpp:218] Iteration 8820 (2.36208 iter/s, 5.08027s/12 iters), loss = 0.0733393
I0409 22:35:10.541383 25006 solver.cpp:237] Train net output #0: loss = 0.0733392 (* 1 = 0.0733392 loss)
I0409 22:35:10.541396 25006 sgd_solver.cpp:105] Iteration 8820, lr = 0.00174276
I0409 22:35:15.506027 25006 solver.cpp:218] Iteration 8832 (2.41719 iter/s, 4.96443s/12 iters), loss = 0.0267894
I0409 22:35:15.506084 25006 solver.cpp:237] Train net output #0: loss = 0.0267893 (* 1 = 0.0267893 loss)
I0409 22:35:15.506098 25006 sgd_solver.cpp:105] Iteration 8832, lr = 0.00173862
I0409 22:35:20.441572 25006 solver.cpp:218] Iteration 8844 (2.43147 iter/s, 4.93528s/12 iters), loss = 0.333612
I0409 22:35:20.441648 25006 solver.cpp:237] Train net output #0: loss = 0.333612 (* 1 = 0.333612 loss)
I0409 22:35:20.441660 25006 sgd_solver.cpp:105] Iteration 8844, lr = 0.00173449
I0409 22:35:25.377516 25006 solver.cpp:218] Iteration 8856 (2.43129 iter/s, 4.93565s/12 iters), loss = 0.0667375
I0409 22:35:25.377573 25006 solver.cpp:237] Train net output #0: loss = 0.0667375 (* 1 = 0.0667375 loss)
I0409 22:35:25.377584 25006 sgd_solver.cpp:105] Iteration 8856, lr = 0.00173037
I0409 22:35:30.379187 25006 solver.cpp:218] Iteration 8868 (2.39933 iter/s, 5.0014s/12 iters), loss = 0.146447
I0409 22:35:30.379245 25006 solver.cpp:237] Train net output #0: loss = 0.146447 (* 1 = 0.146447 loss)
I0409 22:35:30.379258 25006 sgd_solver.cpp:105] Iteration 8868, lr = 0.00172626
I0409 22:35:32.427261 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8874.caffemodel
I0409 22:35:38.448746 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8874.solverstate
I0409 22:35:40.729228 25006 solver.cpp:330] Iteration 8874, Testing net (#0)
I0409 22:35:40.729251 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:35:41.719429 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:35:45.327488 25006 solver.cpp:397] Test net output #0: accuracy = 0.264706
I0409 22:35:45.327533 25006 solver.cpp:397] Test net output #1: loss = 5.56386 (* 1 = 5.56386 loss)
I0409 22:35:47.301872 25006 solver.cpp:218] Iteration 8880 (0.709139 iter/s, 16.9219s/12 iters), loss = 0.0893629
I0409 22:35:47.301930 25006 solver.cpp:237] Train net output #0: loss = 0.0893628 (* 1 = 0.0893628 loss)
I0409 22:35:47.301942 25006 sgd_solver.cpp:105] Iteration 8880, lr = 0.00172217
I0409 22:35:52.371433 25006 solver.cpp:218] Iteration 8892 (2.3672 iter/s, 5.06928s/12 iters), loss = 0.0599476
I0409 22:35:52.371551 25006 solver.cpp:237] Train net output #0: loss = 0.0599475 (* 1 = 0.0599475 loss)
I0409 22:35:52.371563 25006 sgd_solver.cpp:105] Iteration 8892, lr = 0.00171808
I0409 22:35:55.953207 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:35:57.366654 25006 solver.cpp:218] Iteration 8904 (2.40245 iter/s, 4.99489s/12 iters), loss = 0.0863173
I0409 22:35:57.366696 25006 solver.cpp:237] Train net output #0: loss = 0.0863172 (* 1 = 0.0863172 loss)
I0409 22:35:57.366706 25006 sgd_solver.cpp:105] Iteration 8904, lr = 0.001714
I0409 22:36:02.395589 25006 solver.cpp:218] Iteration 8916 (2.38631 iter/s, 5.02868s/12 iters), loss = 0.125407
I0409 22:36:02.395635 25006 solver.cpp:237] Train net output #0: loss = 0.125407 (* 1 = 0.125407 loss)
I0409 22:36:02.395645 25006 sgd_solver.cpp:105] Iteration 8916, lr = 0.00170993
I0409 22:36:07.350142 25006 solver.cpp:218] Iteration 8928 (2.42214 iter/s, 4.95429s/12 iters), loss = 0.101872
I0409 22:36:07.350193 25006 solver.cpp:237] Train net output #0: loss = 0.101872 (* 1 = 0.101872 loss)
I0409 22:36:07.350204 25006 sgd_solver.cpp:105] Iteration 8928, lr = 0.00170587
I0409 22:36:12.319628 25006 solver.cpp:218] Iteration 8940 (2.41487 iter/s, 4.96922s/12 iters), loss = 0.0919337
I0409 22:36:12.319680 25006 solver.cpp:237] Train net output #0: loss = 0.0919336 (* 1 = 0.0919336 loss)
I0409 22:36:12.319694 25006 sgd_solver.cpp:105] Iteration 8940, lr = 0.00170182
I0409 22:36:17.309573 25006 solver.cpp:218] Iteration 8952 (2.40496 iter/s, 4.98968s/12 iters), loss = 0.268714
I0409 22:36:17.309612 25006 solver.cpp:237] Train net output #0: loss = 0.268714 (* 1 = 0.268714 loss)
I0409 22:36:17.309619 25006 sgd_solver.cpp:105] Iteration 8952, lr = 0.00169778
I0409 22:36:22.290886 25006 solver.cpp:218] Iteration 8964 (2.40913 iter/s, 4.98106s/12 iters), loss = 0.0623722
I0409 22:36:22.290935 25006 solver.cpp:237] Train net output #0: loss = 0.0623721 (* 1 = 0.0623721 loss)
I0409 22:36:22.290946 25006 sgd_solver.cpp:105] Iteration 8964, lr = 0.00169375
I0409 22:36:26.812201 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_8976.caffemodel
I0409 22:36:39.496127 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_8976.solverstate
I0409 22:36:42.732946 25006 solver.cpp:330] Iteration 8976, Testing net (#0)
I0409 22:36:42.732966 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:36:43.566299 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:36:47.072381 25006 solver.cpp:397] Test net output #0: accuracy = 0.276348
I0409 22:36:47.072413 25006 solver.cpp:397] Test net output #1: loss = 5.59008 (* 1 = 5.59008 loss)
I0409 22:36:47.161317 25006 solver.cpp:218] Iteration 8976 (0.482521 iter/s, 24.8694s/12 iters), loss = 0.155649
I0409 22:36:47.161373 25006 solver.cpp:237] Train net output #0: loss = 0.155649 (* 1 = 0.155649 loss)
I0409 22:36:47.161384 25006 sgd_solver.cpp:105] Iteration 8976, lr = 0.00168973
I0409 22:36:51.438041 25006 solver.cpp:218] Iteration 8988 (2.80605 iter/s, 4.27647s/12 iters), loss = 0.112251
I0409 22:36:51.438100 25006 solver.cpp:237] Train net output #0: loss = 0.112251 (* 1 = 0.112251 loss)
I0409 22:36:51.438113 25006 sgd_solver.cpp:105] Iteration 8988, lr = 0.00168571
I0409 22:36:54.713094 25006 blocking_queue.cpp:49] Waiting for data
I0409 22:36:56.415616 25006 solver.cpp:218] Iteration 9000 (2.41095 iter/s, 4.9773s/12 iters), loss = 0.107514
I0409 22:36:56.415668 25006 solver.cpp:237] Train net output #0: loss = 0.107514 (* 1 = 0.107514 loss)
I0409 22:36:56.415680 25006 sgd_solver.cpp:105] Iteration 9000, lr = 0.00168171
I0409 22:36:57.135815 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:37:01.395093 25006 solver.cpp:218] Iteration 9012 (2.41002 iter/s, 4.97921s/12 iters), loss = 0.0429282
I0409 22:37:01.395143 25006 solver.cpp:237] Train net output #0: loss = 0.0429281 (* 1 = 0.0429281 loss)
I0409 22:37:01.395153 25006 sgd_solver.cpp:105] Iteration 9012, lr = 0.00167772
I0409 22:37:06.385099 25006 solver.cpp:218] Iteration 9024 (2.40494 iter/s, 4.98974s/12 iters), loss = 0.124733
I0409 22:37:06.385154 25006 solver.cpp:237] Train net output #0: loss = 0.124733 (* 1 = 0.124733 loss)
I0409 22:37:06.385165 25006 sgd_solver.cpp:105] Iteration 9024, lr = 0.00167374
I0409 22:37:11.375262 25006 solver.cpp:218] Iteration 9036 (2.40486 iter/s, 4.98989s/12 iters), loss = 0.0915062
I0409 22:37:11.375301 25006 solver.cpp:237] Train net output #0: loss = 0.0915061 (* 1 = 0.0915061 loss)
I0409 22:37:11.375310 25006 sgd_solver.cpp:105] Iteration 9036, lr = 0.00166976
I0409 22:37:16.412207 25006 solver.cpp:218] Iteration 9048 (2.38252 iter/s, 5.03669s/12 iters), loss = 0.117027
I0409 22:37:16.412261 25006 solver.cpp:237] Train net output #0: loss = 0.117026 (* 1 = 0.117026 loss)
I0409 22:37:16.412273 25006 sgd_solver.cpp:105] Iteration 9048, lr = 0.0016658
I0409 22:37:21.516129 25006 solver.cpp:218] Iteration 9060 (2.35126 iter/s, 5.10365s/12 iters), loss = 0.124413
I0409 22:37:21.516172 25006 solver.cpp:237] Train net output #0: loss = 0.124413 (* 1 = 0.124413 loss)
I0409 22:37:21.516181 25006 sgd_solver.cpp:105] Iteration 9060, lr = 0.00166184
I0409 22:37:26.709744 25006 solver.cpp:218] Iteration 9072 (2.31065 iter/s, 5.19335s/12 iters), loss = 0.138954
I0409 22:37:26.709791 25006 solver.cpp:237] Train net output #0: loss = 0.138954 (* 1 = 0.138954 loss)
I0409 22:37:26.709803 25006 sgd_solver.cpp:105] Iteration 9072, lr = 0.0016579
I0409 22:37:28.849210 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9078.caffemodel
I0409 22:37:36.076901 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9078.solverstate
I0409 22:37:40.274472 25006 solver.cpp:330] Iteration 9078, Testing net (#0)
I0409 22:37:40.274499 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:37:41.346745 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:37:44.932934 25006 solver.cpp:397] Test net output #0: accuracy = 0.266544
I0409 22:37:44.932981 25006 solver.cpp:397] Test net output #1: loss = 5.8088 (* 1 = 5.8088 loss)
I0409 22:37:46.786867 25006 solver.cpp:218] Iteration 9084 (0.597721 iter/s, 20.0763s/12 iters), loss = 0.0775044
I0409 22:37:46.786924 25006 solver.cpp:237] Train net output #0: loss = 0.0775042 (* 1 = 0.0775042 loss)
I0409 22:37:46.786936 25006 sgd_solver.cpp:105] Iteration 9084, lr = 0.00165396
I0409 22:37:51.756045 25006 solver.cpp:218] Iteration 9096 (2.41502 iter/s, 4.9689s/12 iters), loss = 0.0731473
I0409 22:37:51.756110 25006 solver.cpp:237] Train net output #0: loss = 0.0731472 (* 1 = 0.0731472 loss)
I0409 22:37:51.756124 25006 sgd_solver.cpp:105] Iteration 9096, lr = 0.00165003
I0409 22:37:54.617152 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:37:56.627952 25006 solver.cpp:218] Iteration 9108 (2.46324 iter/s, 4.87163s/12 iters), loss = 0.106504
I0409 22:37:56.628011 25006 solver.cpp:237] Train net output #0: loss = 0.106504 (* 1 = 0.106504 loss)
I0409 22:37:56.628024 25006 sgd_solver.cpp:105] Iteration 9108, lr = 0.00164612
I0409 22:38:01.513659 25006 solver.cpp:218] Iteration 9120 (2.45628 iter/s, 4.88544s/12 iters), loss = 0.102914
I0409 22:38:01.513737 25006 solver.cpp:237] Train net output #0: loss = 0.102914 (* 1 = 0.102914 loss)
I0409 22:38:01.513748 25006 sgd_solver.cpp:105] Iteration 9120, lr = 0.00164221
I0409 22:38:06.379827 25006 solver.cpp:218] Iteration 9132 (2.46615 iter/s, 4.86588s/12 iters), loss = 0.129393
I0409 22:38:06.379886 25006 solver.cpp:237] Train net output #0: loss = 0.129393 (* 1 = 0.129393 loss)
I0409 22:38:06.379897 25006 sgd_solver.cpp:105] Iteration 9132, lr = 0.00163831
I0409 22:38:11.499213 25006 solver.cpp:218] Iteration 9144 (2.34416 iter/s, 5.1191s/12 iters), loss = 0.0858623
I0409 22:38:11.499265 25006 solver.cpp:237] Train net output #0: loss = 0.0858622 (* 1 = 0.0858622 loss)
I0409 22:38:11.499277 25006 sgd_solver.cpp:105] Iteration 9144, lr = 0.00163442
I0409 22:38:16.573887 25006 solver.cpp:218] Iteration 9156 (2.36481 iter/s, 5.0744s/12 iters), loss = 0.0334542
I0409 22:38:16.573932 25006 solver.cpp:237] Train net output #0: loss = 0.033454 (* 1 = 0.033454 loss)
I0409 22:38:16.573942 25006 sgd_solver.cpp:105] Iteration 9156, lr = 0.00163054
I0409 22:38:21.578405 25006 solver.cpp:218] Iteration 9168 (2.39796 iter/s, 5.00426s/12 iters), loss = 0.0991065
I0409 22:38:21.578454 25006 solver.cpp:237] Train net output #0: loss = 0.0991064 (* 1 = 0.0991064 loss)
I0409 22:38:21.578464 25006 sgd_solver.cpp:105] Iteration 9168, lr = 0.00162667
I0409 22:38:26.096081 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9180.caffemodel
I0409 22:38:29.362586 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9180.solverstate
I0409 22:38:33.203387 25006 solver.cpp:330] Iteration 9180, Testing net (#0)
I0409 22:38:33.203485 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:38:34.056092 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:38:37.665633 25006 solver.cpp:397] Test net output #0: accuracy = 0.278799
I0409 22:38:37.665681 25006 solver.cpp:397] Test net output #1: loss = 5.77403 (* 1 = 5.77403 loss)
I0409 22:38:37.754868 25006 solver.cpp:218] Iteration 9180 (0.741851 iter/s, 16.1757s/12 iters), loss = 0.142284
I0409 22:38:37.754927 25006 solver.cpp:237] Train net output #0: loss = 0.142284 (* 1 = 0.142284 loss)
I0409 22:38:37.754940 25006 sgd_solver.cpp:105] Iteration 9180, lr = 0.00162281
I0409 22:38:42.140347 25006 solver.cpp:218] Iteration 9192 (2.73646 iter/s, 4.38523s/12 iters), loss = 0.13541
I0409 22:38:42.140408 25006 solver.cpp:237] Train net output #0: loss = 0.13541 (* 1 = 0.13541 loss)
I0409 22:38:42.140424 25006 sgd_solver.cpp:105] Iteration 9192, lr = 0.00161895
I0409 22:38:47.352381 25006 solver.cpp:218] Iteration 9204 (2.30249 iter/s, 5.21175s/12 iters), loss = 0.070993
I0409 22:38:47.352421 25006 solver.cpp:237] Train net output #0: loss = 0.0709928 (* 1 = 0.0709928 loss)
I0409 22:38:47.352429 25006 sgd_solver.cpp:105] Iteration 9204, lr = 0.00161511
I0409 22:38:47.434799 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:38:52.369264 25006 solver.cpp:218] Iteration 9216 (2.39205 iter/s, 5.01662s/12 iters), loss = 0.0473672
I0409 22:38:52.369326 25006 solver.cpp:237] Train net output #0: loss = 0.047367 (* 1 = 0.047367 loss)
I0409 22:38:52.369339 25006 sgd_solver.cpp:105] Iteration 9216, lr = 0.00161128
I0409 22:38:57.356251 25006 solver.cpp:218] Iteration 9228 (2.40639 iter/s, 4.98672s/12 iters), loss = 0.109343
I0409 22:38:57.356293 25006 solver.cpp:237] Train net output #0: loss = 0.109343 (* 1 = 0.109343 loss)
I0409 22:38:57.356304 25006 sgd_solver.cpp:105] Iteration 9228, lr = 0.00160745
I0409 22:39:02.347290 25006 solver.cpp:218] Iteration 9240 (2.40443 iter/s, 4.99078s/12 iters), loss = 0.0271437
I0409 22:39:02.347335 25006 solver.cpp:237] Train net output #0: loss = 0.0271435 (* 1 = 0.0271435 loss)
I0409 22:39:02.347344 25006 sgd_solver.cpp:105] Iteration 9240, lr = 0.00160363
I0409 22:39:07.303647 25006 solver.cpp:218] Iteration 9252 (2.42126 iter/s, 4.9561s/12 iters), loss = 0.103684
I0409 22:39:07.303747 25006 solver.cpp:237] Train net output #0: loss = 0.103684 (* 1 = 0.103684 loss)
I0409 22:39:07.303759 25006 sgd_solver.cpp:105] Iteration 9252, lr = 0.00159983
I0409 22:39:12.252065 25006 solver.cpp:218] Iteration 9264 (2.42517 iter/s, 4.9481s/12 iters), loss = 0.0528835
I0409 22:39:12.252117 25006 solver.cpp:237] Train net output #0: loss = 0.0528833 (* 1 = 0.0528833 loss)
I0409 22:39:12.252128 25006 sgd_solver.cpp:105] Iteration 9264, lr = 0.00159603
I0409 22:39:17.258991 25006 solver.cpp:218] Iteration 9276 (2.39681 iter/s, 5.00666s/12 iters), loss = 0.135671
I0409 22:39:17.259032 25006 solver.cpp:237] Train net output #0: loss = 0.135671 (* 1 = 0.135671 loss)
I0409 22:39:17.259042 25006 sgd_solver.cpp:105] Iteration 9276, lr = 0.00159224
I0409 22:39:19.290812 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9282.caffemodel
I0409 22:39:21.490900 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9282.solverstate
I0409 22:39:23.109302 25006 solver.cpp:330] Iteration 9282, Testing net (#0)
I0409 22:39:23.109320 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:39:23.847373 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:39:27.673259 25006 solver.cpp:397] Test net output #0: accuracy = 0.273284
I0409 22:39:27.673314 25006 solver.cpp:397] Test net output #1: loss = 5.76658 (* 1 = 5.76658 loss)
I0409 22:39:29.600411 25006 solver.cpp:218] Iteration 9288 (0.972379 iter/s, 12.3409s/12 iters), loss = 0.104623
I0409 22:39:29.600471 25006 solver.cpp:237] Train net output #0: loss = 0.104623 (* 1 = 0.104623 loss)
I0409 22:39:29.600484 25006 sgd_solver.cpp:105] Iteration 9288, lr = 0.00158846
I0409 22:39:34.636695 25006 solver.cpp:218] Iteration 9300 (2.38284 iter/s, 5.03601s/12 iters), loss = 0.0998986
I0409 22:39:34.636741 25006 solver.cpp:237] Train net output #0: loss = 0.0998984 (* 1 = 0.0998984 loss)
I0409 22:39:34.636750 25006 sgd_solver.cpp:105] Iteration 9300, lr = 0.00158469
I0409 22:39:37.056095 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:39:39.970532 25006 solver.cpp:218] Iteration 9312 (2.24991 iter/s, 5.33356s/12 iters), loss = 0.0962362
I0409 22:39:39.970690 25006 solver.cpp:237] Train net output #0: loss = 0.0962361 (* 1 = 0.0962361 loss)
I0409 22:39:39.970702 25006 sgd_solver.cpp:105] Iteration 9312, lr = 0.00158092
I0409 22:39:44.927757 25006 solver.cpp:218] Iteration 9324 (2.42089 iter/s, 4.95686s/12 iters), loss = 0.0829108
I0409 22:39:44.927819 25006 solver.cpp:237] Train net output #0: loss = 0.0829106 (* 1 = 0.0829106 loss)
I0409 22:39:44.927829 25006 sgd_solver.cpp:105] Iteration 9324, lr = 0.00157717
I0409 22:39:49.804805 25006 solver.cpp:218] Iteration 9336 (2.46064 iter/s, 4.87677s/12 iters), loss = 0.0780464
I0409 22:39:49.804869 25006 solver.cpp:237] Train net output #0: loss = 0.0780462 (* 1 = 0.0780462 loss)
I0409 22:39:49.804881 25006 sgd_solver.cpp:105] Iteration 9336, lr = 0.00157343
I0409 22:39:54.684911 25006 solver.cpp:218] Iteration 9348 (2.4591 iter/s, 4.87983s/12 iters), loss = 0.135921
I0409 22:39:54.684968 25006 solver.cpp:237] Train net output #0: loss = 0.13592 (* 1 = 0.13592 loss)
I0409 22:39:54.684981 25006 sgd_solver.cpp:105] Iteration 9348, lr = 0.00156969
I0409 22:39:59.690979 25006 solver.cpp:218] Iteration 9360 (2.39722 iter/s, 5.00579s/12 iters), loss = 0.036986
I0409 22:39:59.691051 25006 solver.cpp:237] Train net output #0: loss = 0.0369859 (* 1 = 0.0369859 loss)
I0409 22:39:59.691068 25006 sgd_solver.cpp:105] Iteration 9360, lr = 0.00156596
I0409 22:40:04.644340 25006 solver.cpp:218] Iteration 9372 (2.42274 iter/s, 4.95308s/12 iters), loss = 0.105193
I0409 22:40:04.644407 25006 solver.cpp:237] Train net output #0: loss = 0.105193 (* 1 = 0.105193 loss)
I0409 22:40:04.644419 25006 sgd_solver.cpp:105] Iteration 9372, lr = 0.00156225
I0409 22:40:09.579035 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9384.caffemodel
I0409 22:40:16.396546 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9384.solverstate
I0409 22:40:21.621479 25006 solver.cpp:330] Iteration 9384, Testing net (#0)
I0409 22:40:21.621510 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:40:22.402789 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:40:26.226006 25006 solver.cpp:397] Test net output #0: accuracy = 0.268382
I0409 22:40:26.226068 25006 solver.cpp:397] Test net output #1: loss = 5.65073 (* 1 = 5.65073 loss)
I0409 22:40:26.315310 25006 solver.cpp:218] Iteration 9384 (0.553761 iter/s, 21.67s/12 iters), loss = 0.140681
I0409 22:40:26.315371 25006 solver.cpp:237] Train net output #0: loss = 0.140681 (* 1 = 0.140681 loss)
I0409 22:40:26.315385 25006 sgd_solver.cpp:105] Iteration 9384, lr = 0.00155854
I0409 22:40:30.435003 25006 solver.cpp:218] Iteration 9396 (2.91301 iter/s, 4.11945s/12 iters), loss = 0.107103
I0409 22:40:30.435051 25006 solver.cpp:237] Train net output #0: loss = 0.107103 (* 1 = 0.107103 loss)
I0409 22:40:30.435063 25006 sgd_solver.cpp:105] Iteration 9396, lr = 0.00155484
I0409 22:40:34.754438 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:40:35.410202 25006 solver.cpp:218] Iteration 9408 (2.41209 iter/s, 4.97494s/12 iters), loss = 0.0319489
I0409 22:40:35.410249 25006 solver.cpp:237] Train net output #0: loss = 0.0319487 (* 1 = 0.0319487 loss)
I0409 22:40:35.410261 25006 sgd_solver.cpp:105] Iteration 9408, lr = 0.00155114
I0409 22:40:40.294925 25006 solver.cpp:218] Iteration 9420 (2.45677 iter/s, 4.88445s/12 iters), loss = 0.0983134
I0409 22:40:40.294976 25006 solver.cpp:237] Train net output #0: loss = 0.0983133 (* 1 = 0.0983133 loss)
I0409 22:40:40.294988 25006 sgd_solver.cpp:105] Iteration 9420, lr = 0.00154746
I0409 22:40:45.251863 25006 solver.cpp:218] Iteration 9432 (2.42098 iter/s, 4.95668s/12 iters), loss = 0.0952654
I0409 22:40:45.251912 25006 solver.cpp:237] Train net output #0: loss = 0.0952652 (* 1 = 0.0952652 loss)
I0409 22:40:45.251926 25006 sgd_solver.cpp:105] Iteration 9432, lr = 0.00154379
I0409 22:40:50.209424 25006 solver.cpp:218] Iteration 9444 (2.42068 iter/s, 4.95729s/12 iters), loss = 0.0478516
I0409 22:40:50.209575 25006 solver.cpp:237] Train net output #0: loss = 0.0478515 (* 1 = 0.0478515 loss)
I0409 22:40:50.209594 25006 sgd_solver.cpp:105] Iteration 9444, lr = 0.00154012
I0409 22:40:55.213073 25006 solver.cpp:218] Iteration 9456 (2.39842 iter/s, 5.00329s/12 iters), loss = 0.0884052
I0409 22:40:55.213121 25006 solver.cpp:237] Train net output #0: loss = 0.088405 (* 1 = 0.088405 loss)
I0409 22:40:55.213131 25006 sgd_solver.cpp:105] Iteration 9456, lr = 0.00153647
I0409 22:41:00.188995 25006 solver.cpp:218] Iteration 9468 (2.41174 iter/s, 4.97566s/12 iters), loss = 0.0689394
I0409 22:41:00.189044 25006 solver.cpp:237] Train net output #0: loss = 0.0689392 (* 1 = 0.0689392 loss)
I0409 22:41:00.189054 25006 sgd_solver.cpp:105] Iteration 9468, lr = 0.00153282
I0409 22:41:05.372642 25006 solver.cpp:218] Iteration 9480 (2.31509 iter/s, 5.18338s/12 iters), loss = 0.122977
I0409 22:41:05.372690 25006 solver.cpp:237] Train net output #0: loss = 0.122977 (* 1 = 0.122977 loss)
I0409 22:41:05.372701 25006 sgd_solver.cpp:105] Iteration 9480, lr = 0.00152918
I0409 22:41:07.499408 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9486.caffemodel
I0409 22:41:09.700592 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9486.solverstate
I0409 22:41:11.369146 25006 solver.cpp:330] Iteration 9486, Testing net (#0)
I0409 22:41:11.369176 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:41:12.097805 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:41:15.845710 25006 solver.cpp:397] Test net output #0: accuracy = 0.270833
I0409 22:41:15.845759 25006 solver.cpp:397] Test net output #1: loss = 5.59256 (* 1 = 5.59256 loss)
I0409 22:41:17.655503 25006 solver.cpp:218] Iteration 9492 (0.977016 iter/s, 12.2823s/12 iters), loss = 0.0341171
I0409 22:41:17.655545 25006 solver.cpp:237] Train net output #0: loss = 0.0341169 (* 1 = 0.0341169 loss)
I0409 22:41:17.655555 25006 sgd_solver.cpp:105] Iteration 9492, lr = 0.00152555
I0409 22:41:22.786870 25006 solver.cpp:218] Iteration 9504 (2.33868 iter/s, 5.1311s/12 iters), loss = 0.0757274
I0409 22:41:22.786960 25006 solver.cpp:237] Train net output #0: loss = 0.0757273 (* 1 = 0.0757273 loss)
I0409 22:41:22.786970 25006 sgd_solver.cpp:105] Iteration 9504, lr = 0.00152193
I0409 22:41:24.385218 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:41:28.175890 25006 solver.cpp:218] Iteration 9516 (2.22688 iter/s, 5.3887s/12 iters), loss = 0.068841
I0409 22:41:28.175933 25006 solver.cpp:237] Train net output #0: loss = 0.0688408 (* 1 = 0.0688408 loss)
I0409 22:41:28.175943 25006 sgd_solver.cpp:105] Iteration 9516, lr = 0.00151831
I0409 22:41:33.217342 25006 solver.cpp:218] Iteration 9528 (2.38039 iter/s, 5.04119s/12 iters), loss = 0.0391527
I0409 22:41:33.217396 25006 solver.cpp:237] Train net output #0: loss = 0.0391525 (* 1 = 0.0391525 loss)
I0409 22:41:33.217407 25006 sgd_solver.cpp:105] Iteration 9528, lr = 0.00151471
I0409 22:41:38.519598 25006 solver.cpp:218] Iteration 9540 (2.26331 iter/s, 5.30198s/12 iters), loss = 0.0408216
I0409 22:41:38.519647 25006 solver.cpp:237] Train net output #0: loss = 0.0408215 (* 1 = 0.0408215 loss)
I0409 22:41:38.519657 25006 sgd_solver.cpp:105] Iteration 9540, lr = 0.00151111
I0409 22:41:43.582352 25006 solver.cpp:218] Iteration 9552 (2.37038 iter/s, 5.06249s/12 iters), loss = 0.130085
I0409 22:41:43.582396 25006 solver.cpp:237] Train net output #0: loss = 0.130085 (* 1 = 0.130085 loss)
I0409 22:41:43.582407 25006 sgd_solver.cpp:105] Iteration 9552, lr = 0.00150752
I0409 22:41:48.549643 25006 solver.cpp:218] Iteration 9564 (2.41593 iter/s, 4.96704s/12 iters), loss = 0.0317397
I0409 22:41:48.549685 25006 solver.cpp:237] Train net output #0: loss = 0.0317396 (* 1 = 0.0317396 loss)
I0409 22:41:48.549693 25006 sgd_solver.cpp:105] Iteration 9564, lr = 0.00150395
I0409 22:41:53.505779 25006 solver.cpp:218] Iteration 9576 (2.42136 iter/s, 4.95589s/12 iters), loss = 0.0861794
I0409 22:41:53.505872 25006 solver.cpp:237] Train net output #0: loss = 0.0861793 (* 1 = 0.0861793 loss)
I0409 22:41:53.505882 25006 sgd_solver.cpp:105] Iteration 9576, lr = 0.00150037
I0409 22:41:58.017006 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9588.caffemodel
I0409 22:42:07.999111 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9588.solverstate
I0409 22:42:12.163506 25006 solver.cpp:330] Iteration 9588, Testing net (#0)
I0409 22:42:12.163532 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:42:12.863667 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:42:16.625341 25006 solver.cpp:397] Test net output #0: accuracy = 0.267157
I0409 22:42:16.625391 25006 solver.cpp:397] Test net output #1: loss = 5.70371 (* 1 = 5.70371 loss)
I0409 22:42:16.714278 25006 solver.cpp:218] Iteration 9588 (0.517075 iter/s, 23.2075s/12 iters), loss = 0.0342837
I0409 22:42:16.714340 25006 solver.cpp:237] Train net output #0: loss = 0.0342835 (* 1 = 0.0342835 loss)
I0409 22:42:16.714352 25006 sgd_solver.cpp:105] Iteration 9588, lr = 0.00149681
I0409 22:42:21.042699 25006 solver.cpp:218] Iteration 9600 (2.77253 iter/s, 4.32818s/12 iters), loss = 0.0867348
I0409 22:42:21.042750 25006 solver.cpp:237] Train net output #0: loss = 0.0867347 (* 1 = 0.0867347 loss)
I0409 22:42:21.042762 25006 sgd_solver.cpp:105] Iteration 9600, lr = 0.00149326
I0409 22:42:24.655350 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:42:26.036299 25006 solver.cpp:218] Iteration 9612 (2.4032 iter/s, 4.99334s/12 iters), loss = 0.073914
I0409 22:42:26.036340 25006 solver.cpp:237] Train net output #0: loss = 0.0739138 (* 1 = 0.0739138 loss)
I0409 22:42:26.036350 25006 sgd_solver.cpp:105] Iteration 9612, lr = 0.00148971
I0409 22:42:31.122138 25006 solver.cpp:218] Iteration 9624 (2.35962 iter/s, 5.08557s/12 iters), loss = 0.0485795
I0409 22:42:31.122185 25006 solver.cpp:237] Train net output #0: loss = 0.0485794 (* 1 = 0.0485794 loss)
I0409 22:42:31.122193 25006 sgd_solver.cpp:105] Iteration 9624, lr = 0.00148618
I0409 22:42:36.047334 25006 solver.cpp:218] Iteration 9636 (2.43658 iter/s, 4.92493s/12 iters), loss = 0.164285
I0409 22:42:36.047385 25006 solver.cpp:237] Train net output #0: loss = 0.164285 (* 1 = 0.164285 loss)
I0409 22:42:36.047400 25006 sgd_solver.cpp:105] Iteration 9636, lr = 0.00148265
I0409 22:42:40.988742 25006 solver.cpp:218] Iteration 9648 (2.42859 iter/s, 4.94114s/12 iters), loss = 0.0486852
I0409 22:42:40.988790 25006 solver.cpp:237] Train net output #0: loss = 0.048685 (* 1 = 0.048685 loss)
I0409 22:42:40.988801 25006 sgd_solver.cpp:105] Iteration 9648, lr = 0.00147913
I0409 22:42:46.182289 25006 solver.cpp:218] Iteration 9660 (2.31068 iter/s, 5.19328s/12 iters), loss = 0.00864094
I0409 22:42:46.182330 25006 solver.cpp:237] Train net output #0: loss = 0.00864078 (* 1 = 0.00864078 loss)
I0409 22:42:46.182339 25006 sgd_solver.cpp:105] Iteration 9660, lr = 0.00147562
I0409 22:42:51.248299 25006 solver.cpp:218] Iteration 9672 (2.36885 iter/s, 5.06575s/12 iters), loss = 0.153442
I0409 22:42:51.248350 25006 solver.cpp:237] Train net output #0: loss = 0.153442 (* 1 = 0.153442 loss)
I0409 22:42:51.248364 25006 sgd_solver.cpp:105] Iteration 9672, lr = 0.00147211
I0409 22:42:56.212746 25006 solver.cpp:218] Iteration 9684 (2.41732 iter/s, 4.96418s/12 iters), loss = 0.028777
I0409 22:42:56.212924 25006 solver.cpp:237] Train net output #0: loss = 0.0287768 (* 1 = 0.0287768 loss)
I0409 22:42:56.212939 25006 sgd_solver.cpp:105] Iteration 9684, lr = 0.00146862
I0409 22:42:58.204879 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9690.caffemodel
I0409 22:43:04.565824 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9690.solverstate
I0409 22:43:08.557729 25006 solver.cpp:330] Iteration 9690, Testing net (#0)
I0409 22:43:08.557752 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:43:09.115736 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:43:11.911737 25006 blocking_queue.cpp:49] Waiting for data
I0409 22:43:12.897614 25006 solver.cpp:397] Test net output #0: accuracy = 0.277574
I0409 22:43:12.897658 25006 solver.cpp:397] Test net output #1: loss = 5.66138 (* 1 = 5.66138 loss)
I0409 22:43:14.883508 25006 solver.cpp:218] Iteration 9696 (0.642749 iter/s, 18.6698s/12 iters), loss = 0.112352
I0409 22:43:14.883576 25006 solver.cpp:237] Train net output #0: loss = 0.112352 (* 1 = 0.112352 loss)
I0409 22:43:14.883594 25006 sgd_solver.cpp:105] Iteration 9696, lr = 0.00146513
I0409 22:43:19.936538 25006 solver.cpp:218] Iteration 9708 (2.37495 iter/s, 5.05274s/12 iters), loss = 0.225802
I0409 22:43:19.936591 25006 solver.cpp:237] Train net output #0: loss = 0.225802 (* 1 = 0.225802 loss)
I0409 22:43:19.936604 25006 sgd_solver.cpp:105] Iteration 9708, lr = 0.00146165
I0409 22:43:20.658547 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:43:24.920862 25006 solver.cpp:218] Iteration 9720 (2.40768 iter/s, 4.98405s/12 iters), loss = 0.0607281
I0409 22:43:24.920912 25006 solver.cpp:237] Train net output #0: loss = 0.0607279 (* 1 = 0.0607279 loss)
I0409 22:43:24.920922 25006 sgd_solver.cpp:105] Iteration 9720, lr = 0.00145818
I0409 22:43:29.927419 25006 solver.cpp:218] Iteration 9732 (2.39698 iter/s, 5.00629s/12 iters), loss = 0.0839009
I0409 22:43:29.927521 25006 solver.cpp:237] Train net output #0: loss = 0.0839007 (* 1 = 0.0839007 loss)
I0409 22:43:29.927534 25006 sgd_solver.cpp:105] Iteration 9732, lr = 0.00145472
I0409 22:43:34.885563 25006 solver.cpp:218] Iteration 9744 (2.42041 iter/s, 4.95783s/12 iters), loss = 0.0995246
I0409 22:43:34.885602 25006 solver.cpp:237] Train net output #0: loss = 0.0995244 (* 1 = 0.0995244 loss)
I0409 22:43:34.885612 25006 sgd_solver.cpp:105] Iteration 9744, lr = 0.00145127
I0409 22:43:39.810328 25006 solver.cpp:218] Iteration 9756 (2.43679 iter/s, 4.9245s/12 iters), loss = 0.0578778
I0409 22:43:39.810391 25006 solver.cpp:237] Train net output #0: loss = 0.0578777 (* 1 = 0.0578777 loss)
I0409 22:43:39.810402 25006 sgd_solver.cpp:105] Iteration 9756, lr = 0.00144782
I0409 22:43:44.820916 25006 solver.cpp:218] Iteration 9768 (2.39506 iter/s, 5.01031s/12 iters), loss = 0.0402917
I0409 22:43:44.820963 25006 solver.cpp:237] Train net output #0: loss = 0.0402915 (* 1 = 0.0402915 loss)
I0409 22:43:44.820973 25006 sgd_solver.cpp:105] Iteration 9768, lr = 0.00144438
I0409 22:43:49.841984 25006 solver.cpp:218] Iteration 9780 (2.39006 iter/s, 5.02079s/12 iters), loss = 0.0733618
I0409 22:43:49.842031 25006 solver.cpp:237] Train net output #0: loss = 0.0733616 (* 1 = 0.0733616 loss)
I0409 22:43:49.842042 25006 sgd_solver.cpp:105] Iteration 9780, lr = 0.00144095
I0409 22:43:54.416447 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9792.caffemodel
I0409 22:43:56.665614 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9792.solverstate
I0409 22:43:58.328691 25006 solver.cpp:330] Iteration 9792, Testing net (#0)
I0409 22:43:58.328722 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:43:58.934338 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:44:02.796624 25006 solver.cpp:397] Test net output #0: accuracy = 0.275123
I0409 22:44:02.796743 25006 solver.cpp:397] Test net output #1: loss = 5.67189 (* 1 = 5.67189 loss)
I0409 22:44:02.885910 25006 solver.cpp:218] Iteration 9792 (0.92001 iter/s, 13.0433s/12 iters), loss = 0.0289406
I0409 22:44:02.885984 25006 solver.cpp:237] Train net output #0: loss = 0.0289405 (* 1 = 0.0289405 loss)
I0409 22:44:02.885994 25006 sgd_solver.cpp:105] Iteration 9792, lr = 0.00143753
I0409 22:44:06.960261 25006 solver.cpp:218] Iteration 9804 (2.94542 iter/s, 4.07412s/12 iters), loss = 0.026573
I0409 22:44:06.960315 25006 solver.cpp:237] Train net output #0: loss = 0.0265728 (* 1 = 0.0265728 loss)
I0409 22:44:06.960327 25006 sgd_solver.cpp:105] Iteration 9804, lr = 0.00143412
I0409 22:44:09.892452 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:44:11.939361 25006 solver.cpp:218] Iteration 9816 (2.41021 iter/s, 4.97883s/12 iters), loss = 0.0622664
I0409 22:44:11.939419 25006 solver.cpp:237] Train net output #0: loss = 0.0622662 (* 1 = 0.0622662 loss)
I0409 22:44:11.939432 25006 sgd_solver.cpp:105] Iteration 9816, lr = 0.00143072
I0409 22:44:17.154762 25006 solver.cpp:218] Iteration 9828 (2.301 iter/s, 5.21511s/12 iters), loss = 0.12219
I0409 22:44:17.154820 25006 solver.cpp:237] Train net output #0: loss = 0.12219 (* 1 = 0.12219 loss)
I0409 22:44:17.154834 25006 sgd_solver.cpp:105] Iteration 9828, lr = 0.00142732
I0409 22:44:22.378233 25006 solver.cpp:218] Iteration 9840 (2.29745 iter/s, 5.22319s/12 iters), loss = 0.124394
I0409 22:44:22.378281 25006 solver.cpp:237] Train net output #0: loss = 0.124394 (* 1 = 0.124394 loss)
I0409 22:44:22.378293 25006 sgd_solver.cpp:105] Iteration 9840, lr = 0.00142393
I0409 22:44:27.370981 25006 solver.cpp:218] Iteration 9852 (2.40361 iter/s, 4.99249s/12 iters), loss = 0.0563979
I0409 22:44:27.371032 25006 solver.cpp:237] Train net output #0: loss = 0.0563977 (* 1 = 0.0563977 loss)
I0409 22:44:27.371045 25006 sgd_solver.cpp:105] Iteration 9852, lr = 0.00142055
I0409 22:44:32.341302 25006 solver.cpp:218] Iteration 9864 (2.41446 iter/s, 4.97005s/12 iters), loss = 0.154108
I0409 22:44:32.341361 25006 solver.cpp:237] Train net output #0: loss = 0.154108 (* 1 = 0.154108 loss)
I0409 22:44:32.341373 25006 sgd_solver.cpp:105] Iteration 9864, lr = 0.00141718
I0409 22:44:37.351639 25006 solver.cpp:218] Iteration 9876 (2.39518 iter/s, 5.01006s/12 iters), loss = 0.0222644
I0409 22:44:37.351723 25006 solver.cpp:237] Train net output #0: loss = 0.0222642 (* 1 = 0.0222642 loss)
I0409 22:44:37.351737 25006 sgd_solver.cpp:105] Iteration 9876, lr = 0.00141381
I0409 22:44:42.301282 25006 solver.cpp:218] Iteration 9888 (2.42456 iter/s, 4.94934s/12 iters), loss = 0.163245
I0409 22:44:42.301337 25006 solver.cpp:237] Train net output #0: loss = 0.163244 (* 1 = 0.163244 loss)
I0409 22:44:42.301348 25006 sgd_solver.cpp:105] Iteration 9888, lr = 0.00141045
I0409 22:44:44.315486 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9894.caffemodel
I0409 22:44:49.760890 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9894.solverstate
I0409 22:44:52.584781 25006 solver.cpp:330] Iteration 9894, Testing net (#0)
I0409 22:44:52.584808 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:44:53.155510 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:44:57.318647 25006 solver.cpp:397] Test net output #0: accuracy = 0.277574
I0409 22:44:57.318687 25006 solver.cpp:397] Test net output #1: loss = 5.68966 (* 1 = 5.68966 loss)
I0409 22:44:59.195472 25006 solver.cpp:218] Iteration 9900 (0.710335 iter/s, 16.8934s/12 iters), loss = 0.0814843
I0409 22:44:59.195534 25006 solver.cpp:237] Train net output #0: loss = 0.0814841 (* 1 = 0.0814841 loss)
I0409 22:44:59.195547 25006 sgd_solver.cpp:105] Iteration 9900, lr = 0.00140711
I0409 22:45:04.272426 25006 solver.cpp:218] Iteration 9912 (2.36375 iter/s, 5.07668s/12 iters), loss = 0.0822835
I0409 22:45:04.272475 25006 solver.cpp:237] Train net output #0: loss = 0.0822833 (* 1 = 0.0822833 loss)
I0409 22:45:04.272485 25006 sgd_solver.cpp:105] Iteration 9912, lr = 0.00140377
I0409 22:45:04.371023 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:45:09.248342 25006 solver.cpp:218] Iteration 9924 (2.41175 iter/s, 4.97565s/12 iters), loss = 0.0173469
I0409 22:45:09.249258 25006 solver.cpp:237] Train net output #0: loss = 0.0173467 (* 1 = 0.0173467 loss)
I0409 22:45:09.249269 25006 sgd_solver.cpp:105] Iteration 9924, lr = 0.00140043
I0409 22:45:14.344480 25006 solver.cpp:218] Iteration 9936 (2.35525 iter/s, 5.095s/12 iters), loss = 0.064298
I0409 22:45:14.344529 25006 solver.cpp:237] Train net output #0: loss = 0.0642978 (* 1 = 0.0642978 loss)
I0409 22:45:14.344542 25006 sgd_solver.cpp:105] Iteration 9936, lr = 0.00139711
I0409 22:45:19.436730 25006 solver.cpp:218] Iteration 9948 (2.35665 iter/s, 5.09197s/12 iters), loss = 0.0987668
I0409 22:45:19.436800 25006 solver.cpp:237] Train net output #0: loss = 0.0987666 (* 1 = 0.0987666 loss)
I0409 22:45:19.436815 25006 sgd_solver.cpp:105] Iteration 9948, lr = 0.00139379
I0409 22:45:24.567487 25006 solver.cpp:218] Iteration 9960 (2.33897 iter/s, 5.13047s/12 iters), loss = 0.125941
I0409 22:45:24.567538 25006 solver.cpp:237] Train net output #0: loss = 0.12594 (* 1 = 0.12594 loss)
I0409 22:45:24.567548 25006 sgd_solver.cpp:105] Iteration 9960, lr = 0.00139048
I0409 22:45:29.656651 25006 solver.cpp:218] Iteration 9972 (2.35808 iter/s, 5.08889s/12 iters), loss = 0.0518003
I0409 22:45:29.656702 25006 solver.cpp:237] Train net output #0: loss = 0.0518001 (* 1 = 0.0518001 loss)
I0409 22:45:29.656713 25006 sgd_solver.cpp:105] Iteration 9972, lr = 0.00138718
I0409 22:45:34.638463 25006 solver.cpp:218] Iteration 9984 (2.40889 iter/s, 4.98154s/12 iters), loss = 0.0535928
I0409 22:45:34.638511 25006 solver.cpp:237] Train net output #0: loss = 0.0535926 (* 1 = 0.0535926 loss)
I0409 22:45:34.638523 25006 sgd_solver.cpp:105] Iteration 9984, lr = 0.00138389
I0409 22:45:39.143893 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_9996.caffemodel
I0409 22:45:48.362352 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_9996.solverstate
I0409 22:45:51.664741 25006 solver.cpp:330] Iteration 9996, Testing net (#0)
I0409 22:45:51.664769 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:45:52.178076 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:45:56.197166 25006 solver.cpp:397] Test net output #0: accuracy = 0.287377
I0409 22:45:56.197214 25006 solver.cpp:397] Test net output #1: loss = 5.62657 (* 1 = 5.62657 loss)
I0409 22:45:56.286000 25006 solver.cpp:218] Iteration 9996 (0.55436 iter/s, 21.6466s/12 iters), loss = 0.019449
I0409 22:45:56.286052 25006 solver.cpp:237] Train net output #0: loss = 0.0194488 (* 1 = 0.0194488 loss)
I0409 22:45:56.286064 25006 sgd_solver.cpp:105] Iteration 9996, lr = 0.0013806
I0409 22:46:00.541098 25006 solver.cpp:218] Iteration 10008 (2.82031 iter/s, 4.25485s/12 iters), loss = 0.0828464
I0409 22:46:00.541160 25006 solver.cpp:237] Train net output #0: loss = 0.0828462 (* 1 = 0.0828462 loss)
I0409 22:46:00.541173 25006 sgd_solver.cpp:105] Iteration 10008, lr = 0.00137732
I0409 22:46:02.773221 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:46:05.500285 25006 solver.cpp:218] Iteration 10020 (2.41988 iter/s, 4.95892s/12 iters), loss = 0.105258
I0409 22:46:05.500335 25006 solver.cpp:237] Train net output #0: loss = 0.105257 (* 1 = 0.105257 loss)
I0409 22:46:05.500347 25006 sgd_solver.cpp:105] Iteration 10020, lr = 0.00137405
I0409 22:46:10.663172 25006 solver.cpp:218] Iteration 10032 (2.3244 iter/s, 5.16262s/12 iters), loss = 0.0956678
I0409 22:46:10.663219 25006 solver.cpp:237] Train net output #0: loss = 0.0956676 (* 1 = 0.0956676 loss)
I0409 22:46:10.663231 25006 sgd_solver.cpp:105] Iteration 10032, lr = 0.00137079
I0409 22:46:15.675555 25006 solver.cpp:218] Iteration 10044 (2.3942 iter/s, 5.01212s/12 iters), loss = 0.080296
I0409 22:46:15.675606 25006 solver.cpp:237] Train net output #0: loss = 0.0802957 (* 1 = 0.0802957 loss)
I0409 22:46:15.675617 25006 sgd_solver.cpp:105] Iteration 10044, lr = 0.00136754
I0409 22:46:20.611016 25006 solver.cpp:218] Iteration 10056 (2.43151 iter/s, 4.9352s/12 iters), loss = 0.0941451
I0409 22:46:20.611145 25006 solver.cpp:237] Train net output #0: loss = 0.0941448 (* 1 = 0.0941448 loss)
I0409 22:46:20.611160 25006 sgd_solver.cpp:105] Iteration 10056, lr = 0.00136429
I0409 22:46:25.682487 25006 solver.cpp:218] Iteration 10068 (2.36634 iter/s, 5.07113s/12 iters), loss = 0.161337
I0409 22:46:25.682528 25006 solver.cpp:237] Train net output #0: loss = 0.161336 (* 1 = 0.161336 loss)
I0409 22:46:25.682536 25006 sgd_solver.cpp:105] Iteration 10068, lr = 0.00136105
I0409 22:46:30.620851 25006 solver.cpp:218] Iteration 10080 (2.43008 iter/s, 4.93811s/12 iters), loss = 0.12288
I0409 22:46:30.620898 25006 solver.cpp:237] Train net output #0: loss = 0.12288 (* 1 = 0.12288 loss)
I0409 22:46:30.620909 25006 sgd_solver.cpp:105] Iteration 10080, lr = 0.00135782
I0409 22:46:35.767450 25006 solver.cpp:218] Iteration 10092 (2.33176 iter/s, 5.14632s/12 iters), loss = 0.0987126
I0409 22:46:35.767510 25006 solver.cpp:237] Train net output #0: loss = 0.0987124 (* 1 = 0.0987124 loss)
I0409 22:46:35.767524 25006 sgd_solver.cpp:105] Iteration 10092, lr = 0.0013546
I0409 22:46:37.795768 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_10098.caffemodel
I0409 22:46:41.476552 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_10098.solverstate
I0409 22:46:44.261502 25006 solver.cpp:330] Iteration 10098, Testing net (#0)
I0409 22:46:44.261524 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:46:44.676645 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:46:48.750241 25006 solver.cpp:397] Test net output #0: accuracy = 0.278186
I0409 22:46:48.750284 25006 solver.cpp:397] Test net output #1: loss = 5.63059 (* 1 = 5.63059 loss)
I0409 22:46:50.524756 25006 solver.cpp:218] Iteration 10104 (0.813194 iter/s, 14.7566s/12 iters), loss = 0.0501022
I0409 22:46:50.524811 25006 solver.cpp:237] Train net output #0: loss = 0.050102 (* 1 = 0.050102 loss)
I0409 22:46:50.524823 25006 sgd_solver.cpp:105] Iteration 10104, lr = 0.00135138
I0409 22:46:54.869093 25010 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:46:55.497550 25006 solver.cpp:218] Iteration 10116 (2.41327 iter/s, 4.97252s/12 iters), loss = 0.0716723
I0409 22:46:55.497611 25006 solver.cpp:237] Train net output #0: loss = 0.0716721 (* 1 = 0.0716721 loss)
I0409 22:46:55.497624 25006 sgd_solver.cpp:105] Iteration 10116, lr = 0.00134817
I0409 22:47:00.496429 25006 solver.cpp:218] Iteration 10128 (2.40067 iter/s, 4.9986s/12 iters), loss = 0.0707206
I0409 22:47:00.496479 25006 solver.cpp:237] Train net output #0: loss = 0.0707203 (* 1 = 0.0707203 loss)
I0409 22:47:00.496488 25006 sgd_solver.cpp:105] Iteration 10128, lr = 0.00134497
I0409 22:47:05.484256 25006 solver.cpp:218] Iteration 10140 (2.40599 iter/s, 4.98756s/12 iters), loss = 0.0284899
I0409 22:47:05.484302 25006 solver.cpp:237] Train net output #0: loss = 0.0284897 (* 1 = 0.0284897 loss)
I0409 22:47:05.484310 25006 sgd_solver.cpp:105] Iteration 10140, lr = 0.00134178
I0409 22:47:10.541887 25006 solver.cpp:218] Iteration 10152 (2.37278 iter/s, 5.05737s/12 iters), loss = 0.0195806
I0409 22:47:10.541935 25006 solver.cpp:237] Train net output #0: loss = 0.0195804 (* 1 = 0.0195804 loss)
I0409 22:47:10.541946 25006 sgd_solver.cpp:105] Iteration 10152, lr = 0.00133859
I0409 22:47:15.499590 25006 solver.cpp:218] Iteration 10164 (2.4206 iter/s, 4.95744s/12 iters), loss = 0.0961295
I0409 22:47:15.499630 25006 solver.cpp:237] Train net output #0: loss = 0.0961293 (* 1 = 0.0961293 loss)
I0409 22:47:15.499639 25006 sgd_solver.cpp:105] Iteration 10164, lr = 0.00133541
I0409 22:47:20.447201 25006 solver.cpp:218] Iteration 10176 (2.42554 iter/s, 4.94735s/12 iters), loss = 0.0566418
I0409 22:47:20.447260 25006 solver.cpp:237] Train net output #0: loss = 0.0566416 (* 1 = 0.0566416 loss)
I0409 22:47:20.447273 25006 sgd_solver.cpp:105] Iteration 10176, lr = 0.00133224
I0409 22:47:25.420591 25006 solver.cpp:218] Iteration 10188 (2.41297 iter/s, 4.97312s/12 iters), loss = 0.0618153
I0409 22:47:25.420735 25006 solver.cpp:237] Train net output #0: loss = 0.061815 (* 1 = 0.061815 loss)
I0409 22:47:25.420750 25006 sgd_solver.cpp:105] Iteration 10188, lr = 0.00132908
I0409 22:47:29.925628 25006 solver.cpp:447] Snapshotting to binary proto file snapshot_iter_10200.caffemodel
I0409 22:47:42.450809 25006 sgd_solver.cpp:273] Snapshotting solver state to binary proto file snapshot_iter_10200.solverstate
I0409 22:47:48.733346 25006 solver.cpp:310] Iteration 10200, loss = 0.0909517
I0409 22:47:48.733373 25006 solver.cpp:330] Iteration 10200, Testing net (#0)
I0409 22:47:48.733378 25006 net.cpp:676] Ignoring source layer train-data
I0409 22:47:49.135320 25011 data_layer.cpp:73] Restarting data prefetching from start.
I0409 22:47:53.206182 25006 solver.cpp:397] Test net output #0: accuracy = 0.279412
I0409 22:47:53.206223 25006 solver.cpp:397] Test net output #1: loss = 5.61082 (* 1 = 5.61082 loss)
I0409 22:47:53.206231 25006 solver.cpp:315] Optimization Done.
I0409 22:47:53.206238 25006 caffe.cpp:259] Optimization Done.