-
Notifications
You must be signed in to change notification settings - Fork 196
/
train_mnist_99.58.log
1429 lines (1429 loc) · 72.9 KB
/
train_mnist_99.58.log
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
I0806 09:45:15.961009 11748 caffe.cpp:113] Use GPU with device ID 0
I0806 09:45:16.327029 11748 common.cpp:24] System entropy source not available, using fallback algorithm to generate seed instead.
I0806 09:45:16.327029 11748 caffe.cpp:121] Starting Optimization
I0806 09:45:16.327029 11748 solver.cpp:32] Initializing solver from parameters:
test_iter: 100
test_interval: 1000
base_lr: 0.04
display: 100
max_iter: 15000
lr_policy: "step"
gamma: 0.1428571
momentum: 0.8
weight_decay: 0.0001
stepsize: 6000
snapshot: 5000
snapshot_prefix: "lenet"
solver_mode: GPU
net: "lenet_train_test.prototxt"
I0806 09:45:16.328030 11748 solver.cpp:70] Creating training net from net file: lenet_train_test.prototxt
I0806 09:45:16.328030 11748 net.cpp:287] The NetState phase (0) differed from the phase (1) specified by a rule in layer mnist
I0806 09:45:16.328030 11748 net.cpp:287] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy
I0806 09:45:16.328030 11748 net.cpp:42] Initializing net from parameters:
name: "LeNet"
state {
phase: TRAIN
}
layer {
name: "mnist"
type: "Data"
top: "data"
top: "label"
include {
phase: TRAIN
}
transform_param {
scale: 0.00390625
}
data_param {
source: "mnist-train-leveldb"
batch_size: 64
backend: LEVELDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 30
kernel_size: 7
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "conv1_bn"
type: "BN"
bottom: "conv1"
top: "conv1_bn"
param {
lr_mult: 1
decay_mult: 0
}
param {
lr_mult: 1
decay_mult: 0
}
bn_param {
scale_filler {
type: "constant"
value: 1
}
shift_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1_bn"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 70
kernel_size: 3
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "conv2_bn"
type: "BN"
bottom: "conv2"
top: "conv2_bn"
param {
lr_mult: 1
decay_mult: 0
}
param {
lr_mult: 1
decay_mult: 0
}
bn_param {
scale_filler {
type: "constant"
value: 1
}
shift_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "drop1"
type: "Dropout"
bottom: "conv2_bn"
top: "conv2_bn"
dropout_param {
dropout_ratio: 0.1
}
}
layer {
name: "conv2_1"
type: "Convolution"
bottom: "conv2_bn"
top: "conv2_1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 70
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2_1"
top: "conv2_1"
}
layer {
name: "conv2_bn_1"
type: "BN"
bottom: "conv2_1"
top: "conv2_bn_1"
param {
lr_mult: 1
decay_mult: 0
}
param {
lr_mult: 1
decay_mult: 0
}
bn_param {
scale_filler {
type: "constant"
value: 1
}
shift_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "drop1_1"
type: "Dropout"
bottom: "conv2_bn_1"
top: "conv2_bn_1"
dropout_param {
dropout_ratio: 0.1
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2_bn_1"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool2"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 300
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "ip1"
top: "ip1"
}
layer {
name: "drop2"
type: "Dropout"
bottom: "ip1"
top: "ip1"
dropout_param {
dropout_ratio: 0.3
}
}
layer {
name: "ip3"
type: "InnerProduct"
bottom: "ip1"
top: "ip3"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip3"
bottom: "label"
top: "loss"
}
I0806 09:45:16.377032 11748 layer_factory.hpp:74] Creating layer mnist
I0806 09:45:16.378032 11748 net.cpp:90] Creating Layer mnist
I0806 09:45:16.378032 11748 net.cpp:368] mnist -> data
I0806 09:45:16.379032 11748 net.cpp:368] mnist -> label
I0806 09:45:16.379032 11748 net.cpp:120] Setting up mnist
I0806 09:45:16.386034 11748 db.cpp:20] Opened leveldb mnist-train-leveldb
I0806 09:45:16.386034 11748 data_layer.cpp:52] output data size: 64,1,28,28
I0806 09:45:16.387033 11748 net.cpp:127] Top shape: 64 1 28 28 (50176)
I0806 09:45:16.387033 11748 net.cpp:127] Top shape: 64 (64)
I0806 09:45:16.387033 11748 layer_factory.hpp:74] Creating layer conv1
I0806 09:45:16.388033 11748 net.cpp:90] Creating Layer conv1
I0806 09:45:16.388033 11748 net.cpp:410] conv1 <- data
I0806 09:45:16.388033 11748 net.cpp:368] conv1 -> conv1
I0806 09:45:16.388033 11748 net.cpp:120] Setting up conv1
I0806 09:45:16.389034 11748 common.cpp:24] System entropy source not available, using fallback algorithm to generate seed instead.
I0806 09:45:16.457037 11748 net.cpp:127] Top shape: 64 30 22 22 (929280)
I0806 09:45:16.457037 11748 layer_factory.hpp:74] Creating layer relu1
I0806 09:45:16.458037 11748 net.cpp:90] Creating Layer relu1
I0806 09:45:16.458037 11748 net.cpp:410] relu1 <- conv1
I0806 09:45:16.458037 11748 net.cpp:357] relu1 -> conv1 (in-place)
I0806 09:45:16.459038 11748 net.cpp:120] Setting up relu1
I0806 09:45:16.459038 11748 net.cpp:127] Top shape: 64 30 22 22 (929280)
I0806 09:45:16.459038 11748 layer_factory.hpp:74] Creating layer conv1_bn
I0806 09:45:16.460037 11748 net.cpp:90] Creating Layer conv1_bn
I0806 09:45:16.460037 11748 net.cpp:410] conv1_bn <- conv1
I0806 09:45:16.460037 11748 net.cpp:368] conv1_bn -> conv1_bn
I0806 09:45:16.461037 11748 net.cpp:120] Setting up conv1_bn
I0806 09:45:16.461037 11748 net.cpp:127] Top shape: 64 30 22 22 (929280)
I0806 09:45:16.461037 11748 layer_factory.hpp:74] Creating layer pool1
I0806 09:45:16.462038 11748 net.cpp:90] Creating Layer pool1
I0806 09:45:16.462038 11748 net.cpp:410] pool1 <- conv1_bn
I0806 09:45:16.462038 11748 net.cpp:368] pool1 -> pool1
I0806 09:45:16.463037 11748 net.cpp:120] Setting up pool1
I0806 09:45:16.463037 11748 net.cpp:127] Top shape: 64 30 11 11 (232320)
I0806 09:45:16.463037 11748 layer_factory.hpp:74] Creating layer conv2
I0806 09:45:16.464037 11748 net.cpp:90] Creating Layer conv2
I0806 09:45:16.464037 11748 net.cpp:410] conv2 <- pool1
I0806 09:45:16.464037 11748 net.cpp:368] conv2 -> conv2
I0806 09:45:16.464037 11748 net.cpp:120] Setting up conv2
I0806 09:45:16.465037 11748 net.cpp:127] Top shape: 64 70 9 9 (362880)
I0806 09:45:16.466037 11748 layer_factory.hpp:74] Creating layer relu2
I0806 09:45:16.466037 11748 net.cpp:90] Creating Layer relu2
I0806 09:45:16.466037 11748 net.cpp:410] relu2 <- conv2
I0806 09:45:16.466037 11748 net.cpp:357] relu2 -> conv2 (in-place)
I0806 09:45:16.467038 11748 net.cpp:120] Setting up relu2
I0806 09:45:16.467038 11748 net.cpp:127] Top shape: 64 70 9 9 (362880)
I0806 09:45:16.467038 11748 layer_factory.hpp:74] Creating layer conv2_bn
I0806 09:45:16.467038 11748 net.cpp:90] Creating Layer conv2_bn
I0806 09:45:16.468039 11748 net.cpp:410] conv2_bn <- conv2
I0806 09:45:16.468039 11748 net.cpp:368] conv2_bn -> conv2_bn
I0806 09:45:16.468039 11748 net.cpp:120] Setting up conv2_bn
I0806 09:45:16.468039 11748 net.cpp:127] Top shape: 64 70 9 9 (362880)
I0806 09:45:16.469038 11748 layer_factory.hpp:74] Creating layer drop1
I0806 09:45:16.469038 11748 net.cpp:90] Creating Layer drop1
I0806 09:45:16.469038 11748 net.cpp:410] drop1 <- conv2_bn
I0806 09:45:16.469038 11748 net.cpp:357] drop1 -> conv2_bn (in-place)
I0806 09:45:16.470038 11748 net.cpp:120] Setting up drop1
I0806 09:45:16.470038 11748 net.cpp:127] Top shape: 64 70 9 9 (362880)
I0806 09:45:16.470038 11748 layer_factory.hpp:74] Creating layer conv2_1
I0806 09:45:16.470038 11748 net.cpp:90] Creating Layer conv2_1
I0806 09:45:16.471038 11748 net.cpp:410] conv2_1 <- conv2_bn
I0806 09:45:16.471038 11748 net.cpp:368] conv2_1 -> conv2_1
I0806 09:45:16.471038 11748 net.cpp:120] Setting up conv2_1
I0806 09:45:16.472038 11748 net.cpp:127] Top shape: 64 70 9 9 (362880)
I0806 09:45:16.472038 11748 layer_factory.hpp:74] Creating layer relu2
I0806 09:45:16.473038 11748 net.cpp:90] Creating Layer relu2
I0806 09:45:16.473038 11748 net.cpp:410] relu2 <- conv2_1
I0806 09:45:16.473038 11748 net.cpp:357] relu2 -> conv2_1 (in-place)
I0806 09:45:16.473038 11748 net.cpp:120] Setting up relu2
I0806 09:45:16.474038 11748 net.cpp:127] Top shape: 64 70 9 9 (362880)
I0806 09:45:16.474038 11748 layer_factory.hpp:74] Creating layer conv2_bn_1
I0806 09:45:16.474038 11748 net.cpp:90] Creating Layer conv2_bn_1
I0806 09:45:16.475039 11748 net.cpp:410] conv2_bn_1 <- conv2_1
I0806 09:45:16.475039 11748 net.cpp:368] conv2_bn_1 -> conv2_bn_1
I0806 09:45:16.475039 11748 net.cpp:120] Setting up conv2_bn_1
I0806 09:45:16.475039 11748 net.cpp:127] Top shape: 64 70 9 9 (362880)
I0806 09:45:16.476038 11748 layer_factory.hpp:74] Creating layer drop1_1
I0806 09:45:16.476038 11748 net.cpp:90] Creating Layer drop1_1
I0806 09:45:16.476038 11748 net.cpp:410] drop1_1 <- conv2_bn_1
I0806 09:45:16.476038 11748 net.cpp:357] drop1_1 -> conv2_bn_1 (in-place)
I0806 09:45:16.476038 11748 net.cpp:120] Setting up drop1_1
I0806 09:45:16.477038 11748 net.cpp:127] Top shape: 64 70 9 9 (362880)
I0806 09:45:16.477038 11748 layer_factory.hpp:74] Creating layer pool2
I0806 09:45:16.477038 11748 net.cpp:90] Creating Layer pool2
I0806 09:45:16.477038 11748 net.cpp:410] pool2 <- conv2_bn_1
I0806 09:45:16.478039 11748 net.cpp:368] pool2 -> pool2
I0806 09:45:16.478039 11748 net.cpp:120] Setting up pool2
I0806 09:45:16.478039 11748 net.cpp:127] Top shape: 64 70 5 5 (112000)
I0806 09:45:16.479038 11748 layer_factory.hpp:74] Creating layer ip1
I0806 09:45:16.479038 11748 net.cpp:90] Creating Layer ip1
I0806 09:45:16.479038 11748 net.cpp:410] ip1 <- pool2
I0806 09:45:16.480038 11748 net.cpp:368] ip1 -> ip1
I0806 09:45:16.480038 11748 net.cpp:120] Setting up ip1
I0806 09:45:16.484038 11748 net.cpp:127] Top shape: 64 300 (19200)
I0806 09:45:16.484038 11748 layer_factory.hpp:74] Creating layer relu3
I0806 09:45:16.484038 11748 net.cpp:90] Creating Layer relu3
I0806 09:45:16.485039 11748 net.cpp:410] relu3 <- ip1
I0806 09:45:16.485039 11748 net.cpp:357] relu3 -> ip1 (in-place)
I0806 09:45:16.485039 11748 net.cpp:120] Setting up relu3
I0806 09:45:16.486039 11748 net.cpp:127] Top shape: 64 300 (19200)
I0806 09:45:16.486039 11748 layer_factory.hpp:74] Creating layer drop2
I0806 09:45:16.486039 11748 net.cpp:90] Creating Layer drop2
I0806 09:45:16.486039 11748 net.cpp:410] drop2 <- ip1
I0806 09:45:16.487040 11748 net.cpp:357] drop2 -> ip1 (in-place)
I0806 09:45:16.487040 11748 net.cpp:120] Setting up drop2
I0806 09:45:16.487040 11748 net.cpp:127] Top shape: 64 300 (19200)
I0806 09:45:16.487040 11748 layer_factory.hpp:74] Creating layer ip3
I0806 09:45:16.487040 11748 net.cpp:90] Creating Layer ip3
I0806 09:45:16.488039 11748 net.cpp:410] ip3 <- ip1
I0806 09:45:16.488039 11748 net.cpp:368] ip3 -> ip3
I0806 09:45:16.488039 11748 net.cpp:120] Setting up ip3
I0806 09:45:16.488039 11748 net.cpp:127] Top shape: 64 10 (640)
I0806 09:45:16.489039 11748 layer_factory.hpp:74] Creating layer loss
I0806 09:45:16.489039 11748 net.cpp:90] Creating Layer loss
I0806 09:45:16.489039 11748 net.cpp:410] loss <- ip3
I0806 09:45:16.489039 11748 net.cpp:410] loss <- label
I0806 09:45:16.490039 11748 net.cpp:368] loss -> loss
I0806 09:45:16.490039 11748 net.cpp:120] Setting up loss
I0806 09:45:16.490039 11748 layer_factory.hpp:74] Creating layer loss
I0806 09:45:16.491039 11748 net.cpp:127] Top shape: (1)
I0806 09:45:16.491039 11748 net.cpp:129] with loss weight 1
I0806 09:45:16.491039 11748 net.cpp:192] loss needs backward computation.
I0806 09:45:16.491039 11748 net.cpp:192] ip3 needs backward computation.
I0806 09:45:16.492039 11748 net.cpp:192] drop2 needs backward computation.
I0806 09:45:16.492039 11748 net.cpp:192] relu3 needs backward computation.
I0806 09:45:16.492039 11748 net.cpp:192] ip1 needs backward computation.
I0806 09:45:16.492039 11748 net.cpp:192] pool2 needs backward computation.
I0806 09:45:16.493039 11748 net.cpp:192] drop1_1 needs backward computation.
I0806 09:45:16.493039 11748 net.cpp:192] conv2_bn_1 needs backward computation.
I0806 09:45:16.493039 11748 net.cpp:192] relu2 needs backward computation.
I0806 09:45:16.493039 11748 net.cpp:192] conv2_1 needs backward computation.
I0806 09:45:16.494040 11748 net.cpp:192] drop1 needs backward computation.
I0806 09:45:16.494040 11748 net.cpp:192] conv2_bn needs backward computation.
I0806 09:45:16.494040 11748 net.cpp:192] relu2 needs backward computation.
I0806 09:45:16.494040 11748 net.cpp:192] conv2 needs backward computation.
I0806 09:45:16.494040 11748 net.cpp:192] pool1 needs backward computation.
I0806 09:45:16.495039 11748 net.cpp:192] conv1_bn needs backward computation.
I0806 09:45:16.495039 11748 net.cpp:192] relu1 needs backward computation.
I0806 09:45:16.495039 11748 net.cpp:192] conv1 needs backward computation.
I0806 09:45:16.495039 11748 net.cpp:194] mnist does not need backward computation.
I0806 09:45:16.496039 11748 net.cpp:235] This network produces output loss
I0806 09:45:16.496039 11748 net.cpp:482] Collecting Learning Rate and Weight Decay.
I0806 09:45:16.496039 11748 net.cpp:247] Network initialization done.
I0806 09:45:16.497040 11748 net.cpp:248] Memory required for data: 24574724
I0806 09:45:16.497040 11748 solver.cpp:154] Creating test net (#0) specified by net file: lenet_train_test.prototxt
I0806 09:45:16.498039 11748 net.cpp:287] The NetState phase (1) differed from the phase (0) specified by a rule in layer mnist
I0806 09:45:16.498039 11748 net.cpp:42] Initializing net from parameters:
name: "LeNet"
state {
phase: TEST
}
layer {
name: "mnist"
type: "Data"
top: "data"
top: "label"
include {
phase: TEST
}
transform_param {
scale: 0.00390625
}
data_param {
source: "mnist-test-leveldb"
batch_size: 100
backend: LEVELDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 30
kernel_size: 7
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "conv1_bn"
type: "BN"
bottom: "conv1"
top: "conv1_bn"
param {
lr_mult: 1
decay_mult: 0
}
param {
lr_mult: 1
decay_mult: 0
}
bn_param {
scale_filler {
type: "constant"
value: 1
}
shift_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1_bn"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 70
kernel_size: 3
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "conv2_bn"
type: "BN"
bottom: "conv2"
top: "conv2_bn"
param {
lr_mult: 1
decay_mult: 0
}
param {
lr_mult: 1
decay_mult: 0
}
bn_param {
scale_filler {
type: "constant"
value: 1
}
shift_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "drop1"
type: "Dropout"
bottom: "conv2_bn"
top: "conv2_bn"
dropout_param {
dropout_ratio: 0.1
}
}
layer {
name: "conv2_1"
type: "Convolution"
bottom: "conv2_bn"
top: "conv2_1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 70
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2_1"
top: "conv2_1"
}
layer {
name: "conv2_bn_1"
type: "BN"
bottom: "conv2_1"
top: "conv2_bn_1"
param {
lr_mult: 1
decay_mult: 0
}
param {
lr_mult: 1
decay_mult: 0
}
bn_param {
scale_filler {
type: "constant"
value: 1
}
shift_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "drop1_1"
type: "Dropout"
bottom: "conv2_bn_1"
top: "conv2_bn_1"
dropout_param {
dropout_ratio: 0.1
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2_bn_1"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool2"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 300
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "ip1"
top: "ip1"
}
layer {
name: "drop2"
type: "Dropout"
bottom: "ip1"
top: "ip1"
dropout_param {
dropout_ratio: 0.3
}
}
layer {
name: "ip3"
type: "InnerProduct"
bottom: "ip1"
top: "ip3"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "ip3"
bottom: "label"
top: "accuracy"
include {
phase: TEST
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip3"
bottom: "label"
top: "loss"
}
I0806 09:45:16.529042 11748 layer_factory.hpp:74] Creating layer mnist
I0806 09:45:16.530041 11748 net.cpp:90] Creating Layer mnist
I0806 09:45:16.530041 11748 net.cpp:368] mnist -> data
I0806 09:45:16.530041 11748 net.cpp:368] mnist -> label
I0806 09:45:16.531041 11748 net.cpp:120] Setting up mnist
I0806 09:45:16.538043 11748 db.cpp:20] Opened leveldb mnist-test-leveldb
I0806 09:45:16.539042 11748 data_layer.cpp:52] output data size: 100,1,28,28
I0806 09:45:16.539042 11748 net.cpp:127] Top shape: 100 1 28 28 (78400)
I0806 09:45:16.540042 11748 net.cpp:127] Top shape: 100 (100)
I0806 09:45:16.540042 11748 layer_factory.hpp:74] Creating layer label_mnist_1_split
I0806 09:45:16.540042 11748 net.cpp:90] Creating Layer label_mnist_1_split
I0806 09:45:16.541043 11748 net.cpp:410] label_mnist_1_split <- label
I0806 09:45:16.541043 11748 net.cpp:368] label_mnist_1_split -> label_mnist_1_split_0
I0806 09:45:16.541043 11748 net.cpp:368] label_mnist_1_split -> label_mnist_1_split_1
I0806 09:45:16.542042 11748 net.cpp:120] Setting up label_mnist_1_split
I0806 09:45:16.542042 11748 net.cpp:127] Top shape: 100 (100)
I0806 09:45:16.542042 11748 net.cpp:127] Top shape: 100 (100)
I0806 09:45:16.542042 11748 layer_factory.hpp:74] Creating layer conv1
I0806 09:45:16.543042 11748 net.cpp:90] Creating Layer conv1
I0806 09:45:16.543042 11748 net.cpp:410] conv1 <- data
I0806 09:45:16.543042 11748 net.cpp:368] conv1 -> conv1
I0806 09:45:16.543042 11748 net.cpp:120] Setting up conv1
I0806 09:45:16.544042 11748 net.cpp:127] Top shape: 100 30 22 22 (1452000)
I0806 09:45:16.545042 11748 layer_factory.hpp:74] Creating layer relu1
I0806 09:45:16.545042 11748 net.cpp:90] Creating Layer relu1
I0806 09:45:16.545042 11748 net.cpp:410] relu1 <- conv1
I0806 09:45:16.545042 11748 net.cpp:357] relu1 -> conv1 (in-place)
I0806 09:45:16.546042 11748 net.cpp:120] Setting up relu1
I0806 09:45:16.546042 11748 net.cpp:127] Top shape: 100 30 22 22 (1452000)
I0806 09:45:16.547042 11748 layer_factory.hpp:74] Creating layer conv1_bn
I0806 09:45:16.547042 11748 net.cpp:90] Creating Layer conv1_bn
I0806 09:45:16.547042 11748 net.cpp:410] conv1_bn <- conv1
I0806 09:45:16.547042 11748 net.cpp:368] conv1_bn -> conv1_bn
I0806 09:45:16.548043 11748 net.cpp:120] Setting up conv1_bn
I0806 09:45:16.548043 11748 net.cpp:127] Top shape: 100 30 22 22 (1452000)
I0806 09:45:16.548043 11748 layer_factory.hpp:74] Creating layer pool1
I0806 09:45:16.548043 11748 net.cpp:90] Creating Layer pool1
I0806 09:45:16.549042 11748 net.cpp:410] pool1 <- conv1_bn
I0806 09:45:16.549042 11748 net.cpp:368] pool1 -> pool1
I0806 09:45:16.549042 11748 net.cpp:120] Setting up pool1
I0806 09:45:16.549042 11748 net.cpp:127] Top shape: 100 30 11 11 (363000)
I0806 09:45:16.550042 11748 layer_factory.hpp:74] Creating layer conv2
I0806 09:45:16.550042 11748 net.cpp:90] Creating Layer conv2
I0806 09:45:16.550042 11748 net.cpp:410] conv2 <- pool1
I0806 09:45:16.550042 11748 net.cpp:368] conv2 -> conv2
I0806 09:45:16.550042 11748 net.cpp:120] Setting up conv2
I0806 09:45:16.551043 11748 net.cpp:127] Top shape: 100 70 9 9 (567000)
I0806 09:45:16.551043 11748 layer_factory.hpp:74] Creating layer relu2
I0806 09:45:16.552042 11748 net.cpp:90] Creating Layer relu2
I0806 09:45:16.552042 11748 net.cpp:410] relu2 <- conv2
I0806 09:45:16.553042 11748 net.cpp:357] relu2 -> conv2 (in-place)
I0806 09:45:16.553042 11748 net.cpp:120] Setting up relu2
I0806 09:45:16.553042 11748 net.cpp:127] Top shape: 100 70 9 9 (567000)
I0806 09:45:16.554042 11748 layer_factory.hpp:74] Creating layer conv2_bn
I0806 09:45:16.554042 11748 net.cpp:90] Creating Layer conv2_bn
I0806 09:45:16.554042 11748 net.cpp:410] conv2_bn <- conv2
I0806 09:45:16.554042 11748 net.cpp:368] conv2_bn -> conv2_bn
I0806 09:45:16.555043 11748 net.cpp:120] Setting up conv2_bn
I0806 09:45:16.555043 11748 net.cpp:127] Top shape: 100 70 9 9 (567000)
I0806 09:45:16.555043 11748 layer_factory.hpp:74] Creating layer drop1
I0806 09:45:16.556043 11748 net.cpp:90] Creating Layer drop1
I0806 09:45:16.556043 11748 net.cpp:410] drop1 <- conv2_bn
I0806 09:45:16.556043 11748 net.cpp:357] drop1 -> conv2_bn (in-place)
I0806 09:45:16.556043 11748 net.cpp:120] Setting up drop1
I0806 09:45:16.557044 11748 net.cpp:127] Top shape: 100 70 9 9 (567000)
I0806 09:45:16.557044 11748 layer_factory.hpp:74] Creating layer conv2_1
I0806 09:45:16.557044 11748 net.cpp:90] Creating Layer conv2_1
I0806 09:45:16.557044 11748 net.cpp:410] conv2_1 <- conv2_bn
I0806 09:45:16.558043 11748 net.cpp:368] conv2_1 -> conv2_1
I0806 09:45:16.558043 11748 net.cpp:120] Setting up conv2_1
I0806 09:45:16.559043 11748 net.cpp:127] Top shape: 100 70 9 9 (567000)
I0806 09:45:16.559043 11748 layer_factory.hpp:74] Creating layer relu2
I0806 09:45:16.559043 11748 net.cpp:90] Creating Layer relu2
I0806 09:45:16.560044 11748 net.cpp:410] relu2 <- conv2_1
I0806 09:45:16.560044 11748 net.cpp:357] relu2 -> conv2_1 (in-place)
I0806 09:45:16.560044 11748 net.cpp:120] Setting up relu2
I0806 09:45:16.560044 11748 net.cpp:127] Top shape: 100 70 9 9 (567000)
I0806 09:45:16.561043 11748 layer_factory.hpp:74] Creating layer conv2_bn_1
I0806 09:45:16.561043 11748 net.cpp:90] Creating Layer conv2_bn_1
I0806 09:45:16.561043 11748 net.cpp:410] conv2_bn_1 <- conv2_1
I0806 09:45:16.562043 11748 net.cpp:368] conv2_bn_1 -> conv2_bn_1
I0806 09:45:16.562043 11748 net.cpp:120] Setting up conv2_bn_1
I0806 09:45:16.562043 11748 net.cpp:127] Top shape: 100 70 9 9 (567000)
I0806 09:45:16.562043 11748 layer_factory.hpp:74] Creating layer drop1_1
I0806 09:45:16.562043 11748 net.cpp:90] Creating Layer drop1_1
I0806 09:45:16.563043 11748 net.cpp:410] drop1_1 <- conv2_bn_1
I0806 09:45:16.563043 11748 net.cpp:357] drop1_1 -> conv2_bn_1 (in-place)
I0806 09:45:16.563043 11748 net.cpp:120] Setting up drop1_1
I0806 09:45:16.563043 11748 net.cpp:127] Top shape: 100 70 9 9 (567000)
I0806 09:45:16.564043 11748 layer_factory.hpp:74] Creating layer pool2
I0806 09:45:16.564043 11748 net.cpp:90] Creating Layer pool2
I0806 09:45:16.564043 11748 net.cpp:410] pool2 <- conv2_bn_1
I0806 09:45:16.565043 11748 net.cpp:368] pool2 -> pool2
I0806 09:45:16.565043 11748 net.cpp:120] Setting up pool2
I0806 09:45:16.565043 11748 net.cpp:127] Top shape: 100 70 5 5 (175000)
I0806 09:45:16.566043 11748 layer_factory.hpp:74] Creating layer ip1
I0806 09:45:16.566043 11748 net.cpp:90] Creating Layer ip1
I0806 09:45:16.566043 11748 net.cpp:410] ip1 <- pool2
I0806 09:45:16.566043 11748 net.cpp:368] ip1 -> ip1
I0806 09:45:16.567044 11748 net.cpp:120] Setting up ip1
I0806 09:45:16.570044 11748 net.cpp:127] Top shape: 100 300 (30000)
I0806 09:45:16.571043 11748 layer_factory.hpp:74] Creating layer relu3
I0806 09:45:16.571043 11748 net.cpp:90] Creating Layer relu3
I0806 09:45:16.571043 11748 net.cpp:410] relu3 <- ip1
I0806 09:45:16.572044 11748 net.cpp:357] relu3 -> ip1 (in-place)
I0806 09:45:16.572044 11748 net.cpp:120] Setting up relu3
I0806 09:45:16.572044 11748 net.cpp:127] Top shape: 100 300 (30000)
I0806 09:45:16.573045 11748 layer_factory.hpp:74] Creating layer drop2
I0806 09:45:16.573045 11748 net.cpp:90] Creating Layer drop2
I0806 09:45:16.573045 11748 net.cpp:410] drop2 <- ip1
I0806 09:45:16.573045 11748 net.cpp:357] drop2 -> ip1 (in-place)
I0806 09:45:16.574044 11748 net.cpp:120] Setting up drop2
I0806 09:45:16.574044 11748 net.cpp:127] Top shape: 100 300 (30000)
I0806 09:45:16.574044 11748 layer_factory.hpp:74] Creating layer ip3
I0806 09:45:16.574044 11748 net.cpp:90] Creating Layer ip3
I0806 09:45:16.575044 11748 net.cpp:410] ip3 <- ip1
I0806 09:45:16.575044 11748 net.cpp:368] ip3 -> ip3
I0806 09:45:16.575044 11748 net.cpp:120] Setting up ip3
I0806 09:45:16.575044 11748 net.cpp:127] Top shape: 100 10 (1000)
I0806 09:45:16.576045 11748 layer_factory.hpp:74] Creating layer ip3_ip3_0_split
I0806 09:45:16.576045 11748 net.cpp:90] Creating Layer ip3_ip3_0_split
I0806 09:45:16.576045 11748 net.cpp:410] ip3_ip3_0_split <- ip3
I0806 09:45:16.576045 11748 net.cpp:368] ip3_ip3_0_split -> ip3_ip3_0_split_0
I0806 09:45:16.577044 11748 net.cpp:368] ip3_ip3_0_split -> ip3_ip3_0_split_1
I0806 09:45:16.577044 11748 net.cpp:120] Setting up ip3_ip3_0_split
I0806 09:45:16.577044 11748 net.cpp:127] Top shape: 100 10 (1000)
I0806 09:45:16.577044 11748 net.cpp:127] Top shape: 100 10 (1000)
I0806 09:45:16.578044 11748 layer_factory.hpp:74] Creating layer accuracy
I0806 09:45:16.578044 11748 net.cpp:90] Creating Layer accuracy
I0806 09:45:16.578044 11748 net.cpp:410] accuracy <- ip3_ip3_0_split_0
I0806 09:45:16.578044 11748 net.cpp:410] accuracy <- label_mnist_1_split_0
I0806 09:45:16.579044 11748 net.cpp:368] accuracy -> accuracy
I0806 09:45:16.579044 11748 net.cpp:120] Setting up accuracy
I0806 09:45:16.579044 11748 net.cpp:127] Top shape: (1)
I0806 09:45:16.579044 11748 layer_factory.hpp:74] Creating layer loss
I0806 09:45:16.580044 11748 net.cpp:90] Creating Layer loss
I0806 09:45:16.580044 11748 net.cpp:410] loss <- ip3_ip3_0_split_1
I0806 09:45:16.580044 11748 net.cpp:410] loss <- label_mnist_1_split_1
I0806 09:45:16.580044 11748 net.cpp:368] loss -> loss
I0806 09:45:16.580044 11748 net.cpp:120] Setting up loss
I0806 09:45:16.581044 11748 layer_factory.hpp:74] Creating layer loss
I0806 09:45:16.581044 11748 net.cpp:127] Top shape: (1)
I0806 09:45:16.581044 11748 net.cpp:129] with loss weight 1
I0806 09:45:16.581044 11748 net.cpp:192] loss needs backward computation.
I0806 09:45:16.582044 11748 net.cpp:194] accuracy does not need backward computation.
I0806 09:45:16.582044 11748 net.cpp:192] ip3_ip3_0_split needs backward computation.
I0806 09:45:16.582044 11748 net.cpp:192] ip3 needs backward computation.
I0806 09:45:16.582044 11748 net.cpp:192] drop2 needs backward computation.
I0806 09:45:16.583045 11748 net.cpp:192] relu3 needs backward computation.
I0806 09:45:16.583045 11748 net.cpp:192] ip1 needs backward computation.
I0806 09:45:16.583045 11748 net.cpp:192] pool2 needs backward computation.
I0806 09:45:16.583045 11748 net.cpp:192] drop1_1 needs backward computation.
I0806 09:45:16.584044 11748 net.cpp:192] conv2_bn_1 needs backward computation.
I0806 09:45:16.584044 11748 net.cpp:192] relu2 needs backward computation.
I0806 09:45:16.584044 11748 net.cpp:192] conv2_1 needs backward computation.
I0806 09:45:16.585044 11748 net.cpp:192] drop1 needs backward computation.
I0806 09:45:16.585044 11748 net.cpp:192] conv2_bn needs backward computation.
I0806 09:45:16.585044 11748 net.cpp:192] relu2 needs backward computation.
I0806 09:45:16.585044 11748 net.cpp:192] conv2 needs backward computation.
I0806 09:45:16.586045 11748 net.cpp:192] pool1 needs backward computation.
I0806 09:45:16.586045 11748 net.cpp:192] conv1_bn needs backward computation.
I0806 09:45:16.586045 11748 net.cpp:192] relu1 needs backward computation.
I0806 09:45:16.586045 11748 net.cpp:192] conv1 needs backward computation.
I0806 09:45:16.587044 11748 net.cpp:194] label_mnist_1_split does not need backward computation.
I0806 09:45:16.587044 11748 net.cpp:194] mnist does not need backward computation.
I0806 09:45:16.587044 11748 net.cpp:235] This network produces output accuracy
I0806 09:45:16.588044 11748 net.cpp:235] This network produces output loss
I0806 09:45:16.588044 11748 net.cpp:482] Collecting Learning Rate and Weight Decay.
I0806 09:45:16.588044 11748 net.cpp:247] Network initialization done.
I0806 09:45:16.588044 11748 net.cpp:248] Memory required for data: 38406808
I0806 09:45:16.589045 11748 solver.cpp:42] Solver scaffolding done.
I0806 09:45:16.589045 11748 solver.cpp:250] Solving LeNet
I0806 09:45:16.589045 11748 solver.cpp:251] Learning Rate Policy: step
I0806 09:45:16.591045 11748 solver.cpp:294] Iteration 0, Testing net (#0)
I0806 09:45:17.155077 11748 solver.cpp:343] Test net output #0: accuracy = 0.0602
I0806 09:45:17.155077 11748 solver.cpp:343] Test net output #1: loss = 82.0789 (* 1 = 82.0789 loss)
I0806 09:45:17.179078 11748 solver.cpp:214] Iteration 0, loss = 3.0437
I0806 09:45:17.180078 11748 solver.cpp:229] Train net output #0: loss = 3.0437 (* 1 = 3.0437 loss)
I0806 09:45:17.181078 11748 solver.cpp:486] Iteration 0, lr = 0.04
I0806 09:45:18.149134 11748 solver.cpp:214] Iteration 100, loss = 0.113036
I0806 09:45:18.149134 11748 solver.cpp:229] Train net output #0: loss = 0.113036 (* 1 = 0.113036 loss)
I0806 09:45:18.150135 11748 solver.cpp:486] Iteration 100, lr = 0.04
I0806 09:45:19.121189 11748 solver.cpp:214] Iteration 200, loss = 0.0645544
I0806 09:45:19.122189 11748 solver.cpp:229] Train net output #0: loss = 0.0645544 (* 1 = 0.0645544 loss)
I0806 09:45:19.122189 11748 solver.cpp:486] Iteration 200, lr = 0.04
I0806 09:45:20.095245 11748 solver.cpp:214] Iteration 300, loss = 0.101794
I0806 09:45:20.095245 11748 solver.cpp:229] Train net output #0: loss = 0.101794 (* 1 = 0.101794 loss)
I0806 09:45:20.096246 11748 solver.cpp:486] Iteration 300, lr = 0.04
I0806 09:45:21.066301 11748 solver.cpp:214] Iteration 400, loss = 0.0546454
I0806 09:45:21.067301 11748 solver.cpp:229] Train net output #0: loss = 0.0546455 (* 1 = 0.0546455 loss)
I0806 09:45:21.067301 11748 solver.cpp:486] Iteration 400, lr = 0.04
I0806 09:45:22.036356 11748 solver.cpp:214] Iteration 500, loss = 0.0764314
I0806 09:45:22.036356 11748 solver.cpp:229] Train net output #0: loss = 0.0764315 (* 1 = 0.0764315 loss)
I0806 09:45:22.037356 11748 solver.cpp:486] Iteration 500, lr = 0.04
I0806 09:45:23.016412 11748 solver.cpp:214] Iteration 600, loss = 0.0923337
I0806 09:45:23.017412 11748 solver.cpp:229] Train net output #0: loss = 0.0923337 (* 1 = 0.0923337 loss)
I0806 09:45:23.017412 11748 solver.cpp:486] Iteration 600, lr = 0.04
I0806 09:45:23.988468 11748 solver.cpp:214] Iteration 700, loss = 0.10121
I0806 09:45:23.989469 11748 solver.cpp:229] Train net output #0: loss = 0.10121 (* 1 = 0.10121 loss)
I0806 09:45:23.990468 11748 solver.cpp:486] Iteration 700, lr = 0.04
I0806 09:45:24.971524 11748 solver.cpp:214] Iteration 800, loss = 0.257519
I0806 09:45:24.971524 11748 solver.cpp:229] Train net output #0: loss = 0.25752 (* 1 = 0.25752 loss)
I0806 09:45:24.972524 11748 solver.cpp:486] Iteration 800, lr = 0.04
I0806 09:45:25.956580 11748 solver.cpp:214] Iteration 900, loss = 0.112153
I0806 09:45:25.957581 11748 solver.cpp:229] Train net output #0: loss = 0.112154 (* 1 = 0.112154 loss)
I0806 09:45:25.958580 11748 solver.cpp:486] Iteration 900, lr = 0.04
I0806 09:45:26.944638 11748 solver.cpp:294] Iteration 1000, Testing net (#0)
I0806 09:45:27.495668 11748 solver.cpp:343] Test net output #0: accuracy = 0.9851
I0806 09:45:27.495668 11748 solver.cpp:343] Test net output #1: loss = 0.0464547 (* 1 = 0.0464547 loss)
I0806 09:45:27.501669 11748 solver.cpp:214] Iteration 1000, loss = 0.0401987
I0806 09:45:27.501669 11748 solver.cpp:229] Train net output #0: loss = 0.0401988 (* 1 = 0.0401988 loss)
I0806 09:45:27.502670 11748 solver.cpp:486] Iteration 1000, lr = 0.04
I0806 09:45:28.481725 11748 solver.cpp:214] Iteration 1100, loss = 0.00360373
I0806 09:45:28.481725 11748 solver.cpp:229] Train net output #0: loss = 0.00360381 (* 1 = 0.00360381 loss)
I0806 09:45:28.482725 11748 solver.cpp:486] Iteration 1100, lr = 0.04
I0806 09:45:29.468781 11748 solver.cpp:214] Iteration 1200, loss = 0.115456
I0806 09:45:29.471781 11748 solver.cpp:229] Train net output #0: loss = 0.115456 (* 1 = 0.115456 loss)
I0806 09:45:29.478782 11748 solver.cpp:486] Iteration 1200, lr = 0.04
I0806 09:45:30.453838 11748 solver.cpp:214] Iteration 1300, loss = 0.0086775
I0806 09:45:30.454838 11748 solver.cpp:229] Train net output #0: loss = 0.0086776 (* 1 = 0.0086776 loss)
I0806 09:45:30.454838 11748 solver.cpp:486] Iteration 1300, lr = 0.04
I0806 09:45:31.430893 11748 solver.cpp:214] Iteration 1400, loss = 0.00986404
I0806 09:45:31.430893 11748 solver.cpp:229] Train net output #0: loss = 0.00986412 (* 1 = 0.00986412 loss)
I0806 09:45:31.431893 11748 solver.cpp:486] Iteration 1400, lr = 0.04
I0806 09:45:32.408949 11748 solver.cpp:214] Iteration 1500, loss = 0.0921006
I0806 09:45:32.409950 11748 solver.cpp:229] Train net output #0: loss = 0.0921007 (* 1 = 0.0921007 loss)
I0806 09:45:32.409950 11748 solver.cpp:486] Iteration 1500, lr = 0.04
I0806 09:45:33.378005 11748 solver.cpp:214] Iteration 1600, loss = 0.111908
I0806 09:45:33.379005 11748 solver.cpp:229] Train net output #0: loss = 0.111908 (* 1 = 0.111908 loss)
I0806 09:45:33.381006 11748 solver.cpp:486] Iteration 1600, lr = 0.04
I0806 09:45:34.351060 11748 solver.cpp:214] Iteration 1700, loss = 0.0105853
I0806 09:45:34.352061 11748 solver.cpp:229] Train net output #0: loss = 0.0105854 (* 1 = 0.0105854 loss)
I0806 09:45:34.352061 11748 solver.cpp:486] Iteration 1700, lr = 0.04
I0806 09:45:35.321116 11748 solver.cpp:214] Iteration 1800, loss = 0.0114328
I0806 09:45:35.322116 11748 solver.cpp:229] Train net output #0: loss = 0.0114329 (* 1 = 0.0114329 loss)
I0806 09:45:35.322116 11748 solver.cpp:486] Iteration 1800, lr = 0.04
I0806 09:45:36.299172 11748 solver.cpp:214] Iteration 1900, loss = 0.152364
I0806 09:45:36.300173 11748 solver.cpp:229] Train net output #0: loss = 0.152365 (* 1 = 0.152365 loss)
I0806 09:45:36.300173 11748 solver.cpp:486] Iteration 1900, lr = 0.04
I0806 09:45:37.268228 11748 solver.cpp:294] Iteration 2000, Testing net (#0)
I0806 09:45:37.825259 11748 solver.cpp:343] Test net output #0: accuracy = 0.9906
I0806 09:45:37.826259 11748 solver.cpp:343] Test net output #1: loss = 0.0314774 (* 1 = 0.0314774 loss)
I0806 09:45:37.832260 11748 solver.cpp:214] Iteration 2000, loss = 0.00243739
I0806 09:45:37.833261 11748 solver.cpp:229] Train net output #0: loss = 0.00243742 (* 1 = 0.00243742 loss)
I0806 09:45:37.833261 11748 solver.cpp:486] Iteration 2000, lr = 0.04
I0806 09:45:38.817317 11748 solver.cpp:214] Iteration 2100, loss = 0.0282853
I0806 09:45:38.818316 11748 solver.cpp:229] Train net output #0: loss = 0.0282853 (* 1 = 0.0282853 loss)
I0806 09:45:38.819316 11748 solver.cpp:486] Iteration 2100, lr = 0.04
I0806 09:45:39.798372 11748 solver.cpp:214] Iteration 2200, loss = 0.00365397
I0806 09:45:39.798372 11748 solver.cpp:229] Train net output #0: loss = 0.003654 (* 1 = 0.003654 loss)
I0806 09:45:39.799372 11748 solver.cpp:486] Iteration 2200, lr = 0.04