-
Notifications
You must be signed in to change notification settings - Fork 0
/
output.txt
790 lines (790 loc) · 39.9 KB
/
output.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
[name: "/device:CPU:0"
device_type: "CPU"
memory_limit: 268435456
locality {
}
incarnation: 551970982242999557
, name: "/device:GPU:0"
device_type: "GPU"
memory_limit: 2403991552
locality {
bus_id: 1
links {
link {
device_id: 1
type: "StreamExecutor"
strength: 1
}
}
}
incarnation: 10096734097526406606
physical_device_desc: "device: 0, name: GeForce GTX TITAN X, pci bus id: 0000:02:00.0, compute capability: 5.2"
, name: "/device:GPU:1"
device_type: "GPU"
memory_limit: 2421358592
locality {
bus_id: 1
links {
link {
type: "StreamExecutor"
strength: 1
}
}
}
incarnation: 3303530385180522172
physical_device_desc: "device: 1, name: GeForce GTX TITAN X, pci bus id: 0000:03:00.0, compute capability: 5.2"
, name: "/device:GPU:2"
device_type: "GPU"
memory_limit: 2421358592
locality {
bus_id: 2
numa_node: 1
links {
link {
device_id: 3
type: "StreamExecutor"
strength: 1
}
}
}
incarnation: 6077438548209564868
physical_device_desc: "device: 2, name: GeForce GTX TITAN X, pci bus id: 0000:82:00.0, compute capability: 5.2"
, name: "/device:GPU:3"
device_type: "GPU"
memory_limit: 2421358592
locality {
bus_id: 2
numa_node: 1
links {
link {
device_id: 2
type: "StreamExecutor"
strength: 1
}
}
}
incarnation: 7810493206943392654
physical_device_desc: "device: 3, name: GeForce GTX TITAN X, pci bus id: 0000:83:00.0, compute capability: 5.2"
]
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 32, 32, 128) 3584
_________________________________________________________________
conv2d_1 (Conv2D) (None, 30, 30, 128) 147584
_________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 15, 15, 128) 0
_________________________________________________________________
dropout (Dropout) (None, 15, 15, 128) 0
_________________________________________________________________
conv2d_2 (Conv2D) (None, 15, 15, 256) 295168
_________________________________________________________________
conv2d_3 (Conv2D) (None, 13, 13, 256) 590080
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 6, 6, 256) 0
_________________________________________________________________
dropout_1 (Dropout) (None, 6, 6, 256) 0
_________________________________________________________________
conv2d_4 (Conv2D) (None, 6, 6, 512) 1180160
_________________________________________________________________
conv2d_5 (Conv2D) (None, 4, 4, 512) 2359808
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 2, 2, 512) 0
_________________________________________________________________
dropout_2 (Dropout) (None, 2, 2, 512) 0
_________________________________________________________________
flatten (Flatten) (None, 2048) 0
_________________________________________________________________
dense (Dense) (None, 1000) 2049000
_________________________________________________________________
dropout_3 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_1 (Dense) (None, 1000) 1001000
_________________________________________________________________
dropout_4 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_2 (Dense) (None, 100) 100100
=================================================================
Total params: 7,726,484
Trainable params: 7,726,484
Non-trainable params: 0
_________________________________________________________________
Epoch 1/50
313/313 - 96s - loss: 4.5627 - accuracy: 0.0126 - val_loss: 4.4776 - val_accuracy: 0.0242
Epoch 2/50
313/313 - 46s - loss: 4.3533 - accuracy: 0.0287 - val_loss: 4.2234 - val_accuracy: 0.0470
Epoch 3/50
313/313 - 46s - loss: 4.1918 - accuracy: 0.0434 - val_loss: 4.0726 - val_accuracy: 0.0636
Epoch 4/50
313/313 - 45s - loss: 4.0665 - accuracy: 0.0612 - val_loss: 3.8846 - val_accuracy: 0.1007
Epoch 5/50
313/313 - 46s - loss: 3.9012 - accuracy: 0.0851 - val_loss: 3.7244 - val_accuracy: 0.1179
Epoch 6/50
313/313 - 45s - loss: 3.7531 - accuracy: 0.1077 - val_loss: 3.5380 - val_accuracy: 0.1524
Epoch 7/50
313/313 - 46s - loss: 3.6198 - accuracy: 0.1289 - val_loss: 3.4467 - val_accuracy: 0.1662
Epoch 8/50
313/313 - 46s - loss: 3.4991 - accuracy: 0.1494 - val_loss: 3.2980 - val_accuracy: 0.1921
Epoch 9/50
313/313 - 45s - loss: 3.3890 - accuracy: 0.1690 - val_loss: 3.1419 - val_accuracy: 0.2233
Epoch 10/50
313/313 - 46s - loss: 3.2652 - accuracy: 0.1891 - val_loss: 3.0157 - val_accuracy: 0.2478
Epoch 11/50
313/313 - 46s - loss: 3.1812 - accuracy: 0.2075 - val_loss: 2.8746 - val_accuracy: 0.2739
Epoch 12/50
313/313 - 46s - loss: 3.0862 - accuracy: 0.2230 - val_loss: 2.8107 - val_accuracy: 0.2834
Epoch 13/50
313/313 - 46s - loss: 3.0082 - accuracy: 0.2382 - val_loss: 2.7658 - val_accuracy: 0.2955
Epoch 14/50
313/313 - 45s - loss: 2.9284 - accuracy: 0.2527 - val_loss: 2.6229 - val_accuracy: 0.3169
Epoch 15/50
313/313 - 46s - loss: 2.8563 - accuracy: 0.2674 - val_loss: 2.6023 - val_accuracy: 0.3186
Epoch 16/50
313/313 - 46s - loss: 2.7810 - accuracy: 0.2837 - val_loss: 2.5003 - val_accuracy: 0.3452
Epoch 17/50
313/313 - 46s - loss: 2.7136 - accuracy: 0.2972 - val_loss: 2.4621 - val_accuracy: 0.3552
Epoch 18/50
313/313 - 46s - loss: 2.6537 - accuracy: 0.3099 - val_loss: 2.4113 - val_accuracy: 0.3647
Epoch 19/50
313/313 - 46s - loss: 2.5879 - accuracy: 0.3239 - val_loss: 2.3573 - val_accuracy: 0.3791
Epoch 20/50
313/313 - 45s - loss: 2.5320 - accuracy: 0.3356 - val_loss: 2.2350 - val_accuracy: 0.4064
Epoch 21/50
313/313 - 45s - loss: 2.4732 - accuracy: 0.3495 - val_loss: 2.2538 - val_accuracy: 0.3983
Epoch 22/50
313/313 - 46s - loss: 2.4372 - accuracy: 0.3578 - val_loss: 2.1552 - val_accuracy: 0.4205
Epoch 23/50
313/313 - 46s - loss: 2.3875 - accuracy: 0.3668 - val_loss: 2.2039 - val_accuracy: 0.4107
Epoch 24/50
313/313 - 46s - loss: 2.3463 - accuracy: 0.3759 - val_loss: 2.1000 - val_accuracy: 0.4335
Epoch 25/50
313/313 - 45s - loss: 2.3019 - accuracy: 0.3867 - val_loss: 2.1185 - val_accuracy: 0.4270
Epoch 26/50
313/313 - 46s - loss: 2.2693 - accuracy: 0.3960 - val_loss: 2.0701 - val_accuracy: 0.4421
Epoch 27/50
313/313 - 45s - loss: 2.2145 - accuracy: 0.4058 - val_loss: 2.0762 - val_accuracy: 0.4418
Epoch 28/50
313/313 - 46s - loss: 2.1881 - accuracy: 0.4101 - val_loss: 2.0070 - val_accuracy: 0.4548
Epoch 29/50
313/313 - 46s - loss: 2.1517 - accuracy: 0.4195 - val_loss: 1.9347 - val_accuracy: 0.4758
Epoch 30/50
313/313 - 46s - loss: 2.1113 - accuracy: 0.4312 - val_loss: 1.9287 - val_accuracy: 0.4716
Epoch 31/50
313/313 - 46s - loss: 2.0943 - accuracy: 0.4334 - val_loss: 1.9585 - val_accuracy: 0.4705
Epoch 32/50
313/313 - 46s - loss: 2.0462 - accuracy: 0.4440 - val_loss: 1.8643 - val_accuracy: 0.4879
Epoch 33/50
313/313 - 46s - loss: 2.0195 - accuracy: 0.4480 - val_loss: 1.9210 - val_accuracy: 0.4754
Epoch 34/50
313/313 - 46s - loss: 1.9908 - accuracy: 0.4555 - val_loss: 1.9023 - val_accuracy: 0.4871
Epoch 35/50
313/313 - 46s - loss: 1.9564 - accuracy: 0.4649 - val_loss: 1.8884 - val_accuracy: 0.4862
Epoch 36/50
313/313 - 46s - loss: 1.9434 - accuracy: 0.4695 - val_loss: 1.8998 - val_accuracy: 0.4893
Epoch 37/50
313/313 - 46s - loss: 1.9033 - accuracy: 0.4749 - val_loss: 1.8693 - val_accuracy: 0.4901
Epoch 38/50
313/313 - 45s - loss: 1.8821 - accuracy: 0.4847 - val_loss: 1.7958 - val_accuracy: 0.5109
Epoch 39/50
313/313 - 45s - loss: 1.8671 - accuracy: 0.4882 - val_loss: 1.8209 - val_accuracy: 0.5053
Epoch 40/50
313/313 - 46s - loss: 1.8225 - accuracy: 0.4968 - val_loss: 1.7467 - val_accuracy: 0.5191
Epoch 41/50
313/313 - 46s - loss: 1.8061 - accuracy: 0.5010 - val_loss: 1.8137 - val_accuracy: 0.5071
Epoch 42/50
313/313 - 46s - loss: 1.7910 - accuracy: 0.5038 - val_loss: 1.7452 - val_accuracy: 0.5245
Epoch 43/50
313/313 - 46s - loss: 1.7693 - accuracy: 0.5097 - val_loss: 1.7398 - val_accuracy: 0.5270
Epoch 44/50
313/313 - 46s - loss: 1.7477 - accuracy: 0.5134 - val_loss: 1.7259 - val_accuracy: 0.5319
Epoch 45/50
313/313 - 46s - loss: 1.7176 - accuracy: 0.5219 - val_loss: 1.7031 - val_accuracy: 0.5341
Epoch 46/50
313/313 - 46s - loss: 1.6958 - accuracy: 0.5271 - val_loss: 1.6630 - val_accuracy: 0.5456
Epoch 47/50
313/313 - 46s - loss: 1.6713 - accuracy: 0.5337 - val_loss: 1.6906 - val_accuracy: 0.5412
Epoch 48/50
313/313 - 46s - loss: 1.6568 - accuracy: 0.5319 - val_loss: 1.6614 - val_accuracy: 0.5447
Epoch 49/50
313/313 - 46s - loss: 1.6329 - accuracy: 0.5405 - val_loss: 1.6378 - val_accuracy: 0.5501
Epoch 50/50
313/313 - 46s - loss: 1.6241 - accuracy: 0.5420 - val_loss: 1.6548 - val_accuracy: 0.5485
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_6 (Conv2D) (None, 32, 32, 128) 3584
_________________________________________________________________
conv2d_7 (Conv2D) (None, 30, 30, 128) 147584
_________________________________________________________________
max_pooling2d_3 (MaxPooling2 (None, 15, 15, 128) 0
_________________________________________________________________
dropout_5 (Dropout) (None, 15, 15, 128) 0
_________________________________________________________________
conv2d_8 (Conv2D) (None, 15, 15, 256) 295168
_________________________________________________________________
conv2d_9 (Conv2D) (None, 13, 13, 256) 590080
_________________________________________________________________
max_pooling2d_4 (MaxPooling2 (None, 6, 6, 256) 0
_________________________________________________________________
dropout_6 (Dropout) (None, 6, 6, 256) 0
_________________________________________________________________
conv2d_10 (Conv2D) (None, 6, 6, 512) 1180160
_________________________________________________________________
conv2d_11 (Conv2D) (None, 4, 4, 512) 2359808
_________________________________________________________________
max_pooling2d_5 (MaxPooling2 (None, 2, 2, 512) 0
_________________________________________________________________
dropout_7 (Dropout) (None, 2, 2, 512) 0
_________________________________________________________________
flatten_1 (Flatten) (None, 2048) 0
_________________________________________________________________
dense_3 (Dense) (None, 1000) 2049000
_________________________________________________________________
dropout_8 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_4 (Dense) (None, 1000) 1001000
_________________________________________________________________
dropout_9 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_5 (Dense) (None, 100) 100100
=================================================================
Total params: 7,726,484
Trainable params: 7,726,484
Non-trainable params: 0
_________________________________________________________________
Epoch 1/50
313/313 - 48s - loss: 4.5584 - accuracy: 0.0126 - val_loss: 4.4796 - val_accuracy: 0.0134
Epoch 2/50
313/313 - 47s - loss: 4.4743 - accuracy: 0.0186 - val_loss: 4.4494 - val_accuracy: 0.0215
Epoch 3/50
313/313 - 47s - loss: 4.3786 - accuracy: 0.0221 - val_loss: 4.2716 - val_accuracy: 0.0344
Epoch 4/50
313/313 - 47s - loss: 4.2566 - accuracy: 0.0356 - val_loss: 4.0839 - val_accuracy: 0.0631
Epoch 5/50
313/313 - 47s - loss: 4.0394 - accuracy: 0.0633 - val_loss: 3.8183 - val_accuracy: 0.0950
Epoch 6/50
313/313 - 47s - loss: 3.8410 - accuracy: 0.0867 - val_loss: 3.6076 - val_accuracy: 0.1366
Epoch 7/50
313/313 - 47s - loss: 3.6798 - accuracy: 0.1128 - val_loss: 3.4160 - val_accuracy: 0.1677
Epoch 8/50
313/313 - 47s - loss: 3.5429 - accuracy: 0.1362 - val_loss: 3.2690 - val_accuracy: 0.1973
Epoch 9/50
313/313 - 47s - loss: 3.4179 - accuracy: 0.1617 - val_loss: 3.1576 - val_accuracy: 0.2117
Epoch 10/50
313/313 - 47s - loss: 3.3070 - accuracy: 0.1777 - val_loss: 3.0498 - val_accuracy: 0.2314
Epoch 11/50
313/313 - 47s - loss: 3.1974 - accuracy: 0.1986 - val_loss: 3.0040 - val_accuracy: 0.2406
Epoch 12/50
313/313 - 47s - loss: 3.1082 - accuracy: 0.2145 - val_loss: 2.8307 - val_accuracy: 0.2669
Epoch 13/50
313/313 - 47s - loss: 3.0067 - accuracy: 0.2332 - val_loss: 2.8213 - val_accuracy: 0.2792
Epoch 14/50
313/313 - 47s - loss: 2.9266 - accuracy: 0.2502 - val_loss: 2.6327 - val_accuracy: 0.3095
Epoch 15/50
313/313 - 47s - loss: 2.8429 - accuracy: 0.2682 - val_loss: 2.5886 - val_accuracy: 0.3191
Epoch 16/50
313/313 - 47s - loss: 2.7766 - accuracy: 0.2823 - val_loss: 2.5471 - val_accuracy: 0.3342
Epoch 17/50
313/313 - 47s - loss: 2.7050 - accuracy: 0.2955 - val_loss: 2.5059 - val_accuracy: 0.3442
Epoch 18/50
313/313 - 47s - loss: 2.6392 - accuracy: 0.3104 - val_loss: 2.3964 - val_accuracy: 0.3651
Epoch 19/50
313/313 - 47s - loss: 2.5827 - accuracy: 0.3195 - val_loss: 2.3141 - val_accuracy: 0.3833
Epoch 20/50
313/313 - 47s - loss: 2.5325 - accuracy: 0.3331 - val_loss: 2.3015 - val_accuracy: 0.3860
Epoch 21/50
313/313 - 47s - loss: 2.4803 - accuracy: 0.3439 - val_loss: 2.2351 - val_accuracy: 0.4010
Epoch 22/50
313/313 - 47s - loss: 2.4344 - accuracy: 0.3546 - val_loss: 2.2397 - val_accuracy: 0.4020
Epoch 23/50
313/313 - 47s - loss: 2.3818 - accuracy: 0.3666 - val_loss: 2.1886 - val_accuracy: 0.4073
Epoch 24/50
313/313 - 47s - loss: 2.3402 - accuracy: 0.3723 - val_loss: 2.1051 - val_accuracy: 0.4313
Epoch 25/50
313/313 - 47s - loss: 2.3104 - accuracy: 0.3822 - val_loss: 2.1155 - val_accuracy: 0.4293
Epoch 26/50
313/313 - 46s - loss: 2.2599 - accuracy: 0.3930 - val_loss: 2.1235 - val_accuracy: 0.4264
Epoch 27/50
313/313 - 42s - loss: 2.2235 - accuracy: 0.4006 - val_loss: 2.0431 - val_accuracy: 0.4461
Epoch 28/50
313/313 - 42s - loss: 2.1803 - accuracy: 0.4094 - val_loss: 1.9612 - val_accuracy: 0.4619
Epoch 29/50
313/313 - 42s - loss: 2.1470 - accuracy: 0.4174 - val_loss: 1.9434 - val_accuracy: 0.4653
Epoch 30/50
313/313 - 42s - loss: 2.1140 - accuracy: 0.4246 - val_loss: 2.0052 - val_accuracy: 0.4469
Epoch 31/50
313/313 - 42s - loss: 2.0884 - accuracy: 0.4307 - val_loss: 1.9213 - val_accuracy: 0.4689
Epoch 32/50
313/313 - 42s - loss: 2.0461 - accuracy: 0.4407 - val_loss: 1.8896 - val_accuracy: 0.4783
Epoch 33/50
313/313 - 42s - loss: 2.0140 - accuracy: 0.4507 - val_loss: 1.8598 - val_accuracy: 0.4894
Epoch 34/50
313/313 - 42s - loss: 1.9997 - accuracy: 0.4519 - val_loss: 1.8367 - val_accuracy: 0.4933
Epoch 35/50
313/313 - 41s - loss: 1.9621 - accuracy: 0.4594 - val_loss: 1.8556 - val_accuracy: 0.4889
Epoch 36/50
313/313 - 47s - loss: 1.9385 - accuracy: 0.4669 - val_loss: 1.8460 - val_accuracy: 0.4942
Epoch 37/50
313/313 - 47s - loss: 1.9129 - accuracy: 0.4707 - val_loss: 1.7726 - val_accuracy: 0.5137
Epoch 38/50
313/313 - 47s - loss: 1.8866 - accuracy: 0.4796 - val_loss: 1.7878 - val_accuracy: 0.5081
Epoch 39/50
313/313 - 47s - loss: 1.8640 - accuracy: 0.4854 - val_loss: 1.7799 - val_accuracy: 0.5097
Epoch 40/50
313/313 - 47s - loss: 1.8305 - accuracy: 0.4922 - val_loss: 1.7394 - val_accuracy: 0.5148
Epoch 41/50
313/313 - 47s - loss: 1.8080 - accuracy: 0.4964 - val_loss: 1.7427 - val_accuracy: 0.5149
Epoch 42/50
313/313 - 47s - loss: 1.7862 - accuracy: 0.5024 - val_loss: 1.6957 - val_accuracy: 0.5267
Epoch 43/50
313/313 - 47s - loss: 1.7802 - accuracy: 0.5052 - val_loss: 1.7259 - val_accuracy: 0.5193
Epoch 44/50
313/313 - 47s - loss: 1.7491 - accuracy: 0.5086 - val_loss: 1.6566 - val_accuracy: 0.5408
Epoch 45/50
313/313 - 47s - loss: 1.7234 - accuracy: 0.5179 - val_loss: 1.7564 - val_accuracy: 0.5240
Epoch 46/50
313/313 - 47s - loss: 1.6975 - accuracy: 0.5231 - val_loss: 1.6727 - val_accuracy: 0.5369
Epoch 47/50
313/313 - 47s - loss: 1.6830 - accuracy: 0.5263 - val_loss: 1.6689 - val_accuracy: 0.5413
Epoch 48/50
313/313 - 47s - loss: 1.6689 - accuracy: 0.5304 - val_loss: 1.6735 - val_accuracy: 0.5355
Epoch 49/50
313/313 - 47s - loss: 1.6423 - accuracy: 0.5379 - val_loss: 1.6160 - val_accuracy: 0.5530
Epoch 50/50
313/313 - 47s - loss: 1.6231 - accuracy: 0.5444 - val_loss: 1.6521 - val_accuracy: 0.5419
Model: "sequential_2"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_12 (Conv2D) (None, 32, 32, 128) 3584
_________________________________________________________________
conv2d_13 (Conv2D) (None, 30, 30, 128) 147584
_________________________________________________________________
max_pooling2d_6 (MaxPooling2 (None, 15, 15, 128) 0
_________________________________________________________________
dropout_10 (Dropout) (None, 15, 15, 128) 0
_________________________________________________________________
conv2d_14 (Conv2D) (None, 15, 15, 256) 295168
_________________________________________________________________
conv2d_15 (Conv2D) (None, 13, 13, 256) 590080
_________________________________________________________________
max_pooling2d_7 (MaxPooling2 (None, 6, 6, 256) 0
_________________________________________________________________
dropout_11 (Dropout) (None, 6, 6, 256) 0
_________________________________________________________________
conv2d_16 (Conv2D) (None, 6, 6, 512) 1180160
_________________________________________________________________
conv2d_17 (Conv2D) (None, 4, 4, 512) 2359808
_________________________________________________________________
max_pooling2d_8 (MaxPooling2 (None, 2, 2, 512) 0
_________________________________________________________________
dropout_12 (Dropout) (None, 2, 2, 512) 0
_________________________________________________________________
flatten_2 (Flatten) (None, 2048) 0
_________________________________________________________________
dense_6 (Dense) (None, 1000) 2049000
_________________________________________________________________
dropout_13 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_7 (Dense) (None, 1000) 1001000
_________________________________________________________________
dropout_14 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_8 (Dense) (None, 100) 100100
=================================================================
Total params: 7,726,484
Trainable params: 7,726,484
Non-trainable params: 0
_________________________________________________________________
Epoch 1/50
313/313 - 49s - loss: 4.5638 - accuracy: 0.0127 - val_loss: 4.4975 - val_accuracy: 0.0256
Epoch 2/50
313/313 - 48s - loss: 4.3571 - accuracy: 0.0284 - val_loss: 4.2125 - val_accuracy: 0.0429
Epoch 3/50
313/313 - 48s - loss: 4.1796 - accuracy: 0.0448 - val_loss: 4.0339 - val_accuracy: 0.0641
Epoch 4/50
313/313 - 48s - loss: 4.0166 - accuracy: 0.0677 - val_loss: 3.7705 - val_accuracy: 0.1132
Epoch 5/50
313/313 - 48s - loss: 3.8618 - accuracy: 0.0870 - val_loss: 3.6712 - val_accuracy: 0.1269
Epoch 6/50
313/313 - 48s - loss: 3.7235 - accuracy: 0.1065 - val_loss: 3.4551 - val_accuracy: 0.1635
Epoch 7/50
313/313 - 48s - loss: 3.5704 - accuracy: 0.1359 - val_loss: 3.4059 - val_accuracy: 0.1652
Epoch 8/50
313/313 - 48s - loss: 3.4461 - accuracy: 0.1581 - val_loss: 3.1828 - val_accuracy: 0.2130
Epoch 9/50
313/313 - 48s - loss: 3.3260 - accuracy: 0.1786 - val_loss: 3.0753 - val_accuracy: 0.2325
Epoch 10/50
313/313 - 48s - loss: 3.2184 - accuracy: 0.1975 - val_loss: 3.0270 - val_accuracy: 0.2428
Epoch 11/50
313/313 - 48s - loss: 3.1130 - accuracy: 0.2181 - val_loss: 2.8363 - val_accuracy: 0.2770
Epoch 12/50
313/313 - 48s - loss: 3.0314 - accuracy: 0.2336 - val_loss: 2.7716 - val_accuracy: 0.2897
Epoch 13/50
313/313 - 48s - loss: 2.9501 - accuracy: 0.2478 - val_loss: 2.6754 - val_accuracy: 0.3068
Epoch 14/50
313/313 - 48s - loss: 2.8722 - accuracy: 0.2659 - val_loss: 2.6243 - val_accuracy: 0.3196
Epoch 15/50
313/313 - 48s - loss: 2.7969 - accuracy: 0.2824 - val_loss: 2.5756 - val_accuracy: 0.3257
Epoch 16/50
313/313 - 48s - loss: 2.7318 - accuracy: 0.2937 - val_loss: 2.5086 - val_accuracy: 0.3405
Epoch 17/50
313/313 - 48s - loss: 2.6645 - accuracy: 0.3051 - val_loss: 2.3874 - val_accuracy: 0.3676
Epoch 18/50
313/313 - 48s - loss: 2.5996 - accuracy: 0.3188 - val_loss: 2.3881 - val_accuracy: 0.3712
Epoch 19/50
313/313 - 48s - loss: 2.5470 - accuracy: 0.3309 - val_loss: 2.3073 - val_accuracy: 0.3840
Epoch 20/50
313/313 - 48s - loss: 2.4907 - accuracy: 0.3442 - val_loss: 2.3091 - val_accuracy: 0.3851
Epoch 21/50
313/313 - 48s - loss: 2.4504 - accuracy: 0.3539 - val_loss: 2.2197 - val_accuracy: 0.4020
Epoch 22/50
313/313 - 48s - loss: 2.4035 - accuracy: 0.3603 - val_loss: 2.1829 - val_accuracy: 0.4092
Epoch 23/50
313/313 - 48s - loss: 2.3421 - accuracy: 0.3776 - val_loss: 2.1024 - val_accuracy: 0.4275
Epoch 24/50
313/313 - 48s - loss: 2.3116 - accuracy: 0.3830 - val_loss: 2.1657 - val_accuracy: 0.4205
Epoch 25/50
313/313 - 48s - loss: 2.2660 - accuracy: 0.3946 - val_loss: 2.1332 - val_accuracy: 0.4270
Epoch 26/50
313/313 - 48s - loss: 2.2320 - accuracy: 0.4002 - val_loss: 2.0643 - val_accuracy: 0.4476
Epoch 27/50
313/313 - 48s - loss: 2.1934 - accuracy: 0.4103 - val_loss: 2.0547 - val_accuracy: 0.4440
Epoch 28/50
313/313 - 48s - loss: 2.1632 - accuracy: 0.4186 - val_loss: 2.0554 - val_accuracy: 0.4455
Epoch 29/50
313/313 - 48s - loss: 2.1199 - accuracy: 0.4275 - val_loss: 2.0494 - val_accuracy: 0.4468
Epoch 30/50
313/313 - 48s - loss: 2.0955 - accuracy: 0.4342 - val_loss: 1.9326 - val_accuracy: 0.4739
Epoch 31/50
313/313 - 48s - loss: 2.0543 - accuracy: 0.4407 - val_loss: 1.8611 - val_accuracy: 0.4889
Epoch 32/50
313/313 - 48s - loss: 2.0329 - accuracy: 0.4483 - val_loss: 1.8743 - val_accuracy: 0.4896
Epoch 33/50
313/313 - 48s - loss: 2.0032 - accuracy: 0.4523 - val_loss: 1.8867 - val_accuracy: 0.4824
Epoch 34/50
313/313 - 48s - loss: 1.9792 - accuracy: 0.4583 - val_loss: 1.8219 - val_accuracy: 0.4995
Epoch 35/50
313/313 - 48s - loss: 1.9442 - accuracy: 0.4661 - val_loss: 1.8730 - val_accuracy: 0.4928
Epoch 36/50
313/313 - 48s - loss: 1.9225 - accuracy: 0.4709 - val_loss: 1.8170 - val_accuracy: 0.5047
Epoch 37/50
313/313 - 48s - loss: 1.8915 - accuracy: 0.4817 - val_loss: 1.8489 - val_accuracy: 0.4952
Epoch 38/50
313/313 - 48s - loss: 1.8611 - accuracy: 0.4875 - val_loss: 1.7860 - val_accuracy: 0.5089
Epoch 39/50
313/313 - 48s - loss: 1.8438 - accuracy: 0.4920 - val_loss: 1.7283 - val_accuracy: 0.5260
Epoch 40/50
313/313 - 48s - loss: 1.8238 - accuracy: 0.4947 - val_loss: 1.7185 - val_accuracy: 0.5244
Epoch 41/50
313/313 - 48s - loss: 1.8003 - accuracy: 0.5028 - val_loss: 1.7957 - val_accuracy: 0.5122
Epoch 42/50
313/313 - 48s - loss: 1.7856 - accuracy: 0.5048 - val_loss: 1.7412 - val_accuracy: 0.5214
Epoch 43/50
313/313 - 48s - loss: 1.7515 - accuracy: 0.5134 - val_loss: 1.7712 - val_accuracy: 0.5212
Epoch 44/50
313/313 - 48s - loss: 1.7360 - accuracy: 0.5157 - val_loss: 1.6801 - val_accuracy: 0.5348
Epoch 45/50
313/313 - 48s - loss: 1.7040 - accuracy: 0.5239 - val_loss: 1.6963 - val_accuracy: 0.5369
Epoch 46/50
313/313 - 48s - loss: 1.6852 - accuracy: 0.5288 - val_loss: 1.7367 - val_accuracy: 0.5254
Epoch 47/50
313/313 - 48s - loss: 1.6784 - accuracy: 0.5310 - val_loss: 1.6457 - val_accuracy: 0.5480
Epoch 48/50
313/313 - 48s - loss: 1.6445 - accuracy: 0.5396 - val_loss: 1.6656 - val_accuracy: 0.5445
Epoch 49/50
313/313 - 48s - loss: 1.6261 - accuracy: 0.5426 - val_loss: 1.6421 - val_accuracy: 0.5468
Epoch 50/50
313/313 - 48s - loss: 1.6057 - accuracy: 0.5502 - val_loss: 1.6429 - val_accuracy: 0.5515
Model: "sequential_3"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_18 (Conv2D) (None, 32, 32, 128) 3584
_________________________________________________________________
conv2d_19 (Conv2D) (None, 30, 30, 128) 147584
_________________________________________________________________
max_pooling2d_9 (MaxPooling2 (None, 15, 15, 128) 0
_________________________________________________________________
dropout_15 (Dropout) (None, 15, 15, 128) 0
_________________________________________________________________
conv2d_20 (Conv2D) (None, 15, 15, 256) 295168
_________________________________________________________________
conv2d_21 (Conv2D) (None, 13, 13, 256) 590080
_________________________________________________________________
max_pooling2d_10 (MaxPooling (None, 6, 6, 256) 0
_________________________________________________________________
dropout_16 (Dropout) (None, 6, 6, 256) 0
_________________________________________________________________
conv2d_22 (Conv2D) (None, 6, 6, 512) 1180160
_________________________________________________________________
conv2d_23 (Conv2D) (None, 4, 4, 512) 2359808
_________________________________________________________________
max_pooling2d_11 (MaxPooling (None, 2, 2, 512) 0
_________________________________________________________________
dropout_17 (Dropout) (None, 2, 2, 512) 0
_________________________________________________________________
flatten_3 (Flatten) (None, 2048) 0
_________________________________________________________________
dense_9 (Dense) (None, 1000) 2049000
_________________________________________________________________
dropout_18 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_10 (Dense) (None, 1000) 1001000
_________________________________________________________________
dropout_19 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_11 (Dense) (None, 100) 100100
=================================================================
Total params: 7,726,484
Trainable params: 7,726,484
Non-trainable params: 0
_________________________________________________________________
Epoch 1/50
313/313 - 49s - loss: 4.5748 - accuracy: 0.0111 - val_loss: 4.5022 - val_accuracy: 0.0182
Epoch 2/50
313/313 - 48s - loss: 4.4805 - accuracy: 0.0181 - val_loss: 4.4517 - val_accuracy: 0.0213
Epoch 3/50
313/313 - 48s - loss: 4.3593 - accuracy: 0.0231 - val_loss: 4.2750 - val_accuracy: 0.0380
Epoch 4/50
313/313 - 48s - loss: 4.2229 - accuracy: 0.0402 - val_loss: 4.0272 - val_accuracy: 0.0678
Epoch 5/50
313/313 - 48s - loss: 4.0063 - accuracy: 0.0674 - val_loss: 3.7564 - val_accuracy: 0.1087
Epoch 6/50
313/313 - 48s - loss: 3.8181 - accuracy: 0.0922 - val_loss: 3.5876 - val_accuracy: 0.1345
Epoch 7/50
313/313 - 48s - loss: 3.6761 - accuracy: 0.1143 - val_loss: 3.4217 - val_accuracy: 0.1666
Epoch 8/50
313/313 - 48s - loss: 3.5350 - accuracy: 0.1387 - val_loss: 3.2677 - val_accuracy: 0.1916
Epoch 9/50
313/313 - 48s - loss: 3.4115 - accuracy: 0.1595 - val_loss: 3.2097 - val_accuracy: 0.2049
Epoch 10/50
313/313 - 48s - loss: 3.2909 - accuracy: 0.1817 - val_loss: 3.0126 - val_accuracy: 0.2406
Epoch 11/50
313/313 - 48s - loss: 3.1869 - accuracy: 0.2016 - val_loss: 3.0104 - val_accuracy: 0.2368
Epoch 12/50
313/313 - 48s - loss: 3.1018 - accuracy: 0.2170 - val_loss: 2.8295 - val_accuracy: 0.2739
Epoch 13/50
313/313 - 48s - loss: 3.0131 - accuracy: 0.2353 - val_loss: 2.7254 - val_accuracy: 0.3013
Epoch 14/50
313/313 - 48s - loss: 2.9238 - accuracy: 0.2515 - val_loss: 2.6819 - val_accuracy: 0.3047
Epoch 15/50
313/313 - 48s - loss: 2.8422 - accuracy: 0.2709 - val_loss: 2.5971 - val_accuracy: 0.3207
Epoch 16/50
313/313 - 48s - loss: 2.7657 - accuracy: 0.2836 - val_loss: 2.5050 - val_accuracy: 0.3426
Epoch 17/50
313/313 - 48s - loss: 2.7088 - accuracy: 0.2936 - val_loss: 2.4267 - val_accuracy: 0.3623
Epoch 18/50
313/313 - 48s - loss: 2.6589 - accuracy: 0.3064 - val_loss: 2.3963 - val_accuracy: 0.3672
Epoch 19/50
313/313 - 48s - loss: 2.5847 - accuracy: 0.3198 - val_loss: 2.3523 - val_accuracy: 0.3801
Epoch 20/50
313/313 - 48s - loss: 2.5376 - accuracy: 0.3306 - val_loss: 2.3038 - val_accuracy: 0.3930
Epoch 21/50
313/313 - 48s - loss: 2.4776 - accuracy: 0.3432 - val_loss: 2.2354 - val_accuracy: 0.4038
Epoch 22/50
313/313 - 48s - loss: 2.4353 - accuracy: 0.3534 - val_loss: 2.2137 - val_accuracy: 0.4112
Epoch 23/50
313/313 - 48s - loss: 2.3813 - accuracy: 0.3634 - val_loss: 2.1533 - val_accuracy: 0.4257
Epoch 24/50
313/313 - 48s - loss: 2.3510 - accuracy: 0.3706 - val_loss: 2.1253 - val_accuracy: 0.4247
Epoch 25/50
313/313 - 48s - loss: 2.2968 - accuracy: 0.3808 - val_loss: 2.0546 - val_accuracy: 0.4412
Epoch 26/50
313/313 - 48s - loss: 2.2666 - accuracy: 0.3901 - val_loss: 2.0750 - val_accuracy: 0.4366
Epoch 27/50
313/313 - 48s - loss: 2.2319 - accuracy: 0.3998 - val_loss: 2.0425 - val_accuracy: 0.4458
Epoch 28/50
313/313 - 48s - loss: 2.1839 - accuracy: 0.4117 - val_loss: 1.9782 - val_accuracy: 0.4594
Epoch 29/50
313/313 - 48s - loss: 2.1536 - accuracy: 0.4146 - val_loss: 1.9398 - val_accuracy: 0.4679
Epoch 30/50
313/313 - 48s - loss: 2.1264 - accuracy: 0.4238 - val_loss: 1.9833 - val_accuracy: 0.4595
Epoch 31/50
313/313 - 48s - loss: 2.0869 - accuracy: 0.4315 - val_loss: 1.9122 - val_accuracy: 0.4804
Epoch 32/50
313/313 - 48s - loss: 2.0607 - accuracy: 0.4387 - val_loss: 1.9129 - val_accuracy: 0.4788
Epoch 33/50
313/313 - 48s - loss: 2.0327 - accuracy: 0.4418 - val_loss: 1.8317 - val_accuracy: 0.4957
Epoch 34/50
313/313 - 48s - loss: 1.9979 - accuracy: 0.4521 - val_loss: 1.8930 - val_accuracy: 0.4834
Epoch 35/50
313/313 - 48s - loss: 1.9801 - accuracy: 0.4576 - val_loss: 1.8614 - val_accuracy: 0.4896
Epoch 36/50
313/313 - 48s - loss: 1.9419 - accuracy: 0.4660 - val_loss: 1.7870 - val_accuracy: 0.5066
Epoch 37/50
313/313 - 48s - loss: 1.9230 - accuracy: 0.4689 - val_loss: 1.8757 - val_accuracy: 0.4885
Epoch 38/50
313/313 - 48s - loss: 1.9041 - accuracy: 0.4724 - val_loss: 1.7859 - val_accuracy: 0.5106
Epoch 39/50
313/313 - 48s - loss: 1.8709 - accuracy: 0.4828 - val_loss: 1.7635 - val_accuracy: 0.5124
Epoch 40/50
313/313 - 48s - loss: 1.8486 - accuracy: 0.4865 - val_loss: 1.7504 - val_accuracy: 0.5168
Epoch 41/50
313/313 - 48s - loss: 1.8158 - accuracy: 0.4947 - val_loss: 1.7286 - val_accuracy: 0.5197
Epoch 42/50
313/313 - 48s - loss: 1.7996 - accuracy: 0.4976 - val_loss: 1.7381 - val_accuracy: 0.5231
Epoch 43/50
313/313 - 48s - loss: 1.7788 - accuracy: 0.5044 - val_loss: 1.7133 - val_accuracy: 0.5262
Epoch 44/50
313/313 - 48s - loss: 1.7632 - accuracy: 0.5072 - val_loss: 1.7157 - val_accuracy: 0.5290
Epoch 45/50
313/313 - 48s - loss: 1.7286 - accuracy: 0.5171 - val_loss: 1.7328 - val_accuracy: 0.5215
Epoch 46/50
313/313 - 48s - loss: 1.7139 - accuracy: 0.5203 - val_loss: 1.6498 - val_accuracy: 0.5410
Epoch 47/50
313/313 - 48s - loss: 1.6961 - accuracy: 0.5232 - val_loss: 1.6781 - val_accuracy: 0.5381
Epoch 48/50
313/313 - 48s - loss: 1.6662 - accuracy: 0.5337 - val_loss: 1.6601 - val_accuracy: 0.5441
Epoch 49/50
313/313 - 48s - loss: 1.6557 - accuracy: 0.5358 - val_loss: 1.6954 - val_accuracy: 0.5343
Epoch 50/50
313/313 - 48s - loss: 1.6378 - accuracy: 0.5370 - val_loss: 1.5824 - val_accuracy: 0.5563
Model: "sequential_4"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_24 (Conv2D) (None, 32, 32, 128) 3584
_________________________________________________________________
conv2d_25 (Conv2D) (None, 30, 30, 128) 147584
_________________________________________________________________
max_pooling2d_12 (MaxPooling (None, 15, 15, 128) 0
_________________________________________________________________
dropout_20 (Dropout) (None, 15, 15, 128) 0
_________________________________________________________________
conv2d_26 (Conv2D) (None, 15, 15, 256) 295168
_________________________________________________________________
conv2d_27 (Conv2D) (None, 13, 13, 256) 590080
_________________________________________________________________
max_pooling2d_13 (MaxPooling (None, 6, 6, 256) 0
_________________________________________________________________
dropout_21 (Dropout) (None, 6, 6, 256) 0
_________________________________________________________________
conv2d_28 (Conv2D) (None, 6, 6, 512) 1180160
_________________________________________________________________
conv2d_29 (Conv2D) (None, 4, 4, 512) 2359808
_________________________________________________________________
max_pooling2d_14 (MaxPooling (None, 2, 2, 512) 0
_________________________________________________________________
dropout_22 (Dropout) (None, 2, 2, 512) 0
_________________________________________________________________
flatten_4 (Flatten) (None, 2048) 0
_________________________________________________________________
dense_12 (Dense) (None, 1000) 2049000
_________________________________________________________________
dropout_23 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_13 (Dense) (None, 1000) 1001000
_________________________________________________________________
dropout_24 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_14 (Dense) (None, 100) 100100
=================================================================
Total params: 7,726,484
Trainable params: 7,726,484
Non-trainable params: 0
_________________________________________________________________
Epoch 1/50
313/313 - 49s - loss: 4.5523 - accuracy: 0.0134 - val_loss: 4.4756 - val_accuracy: 0.0193
Epoch 2/50
313/313 - 48s - loss: 4.4611 - accuracy: 0.0191 - val_loss: 4.3372 - val_accuracy: 0.0198
Epoch 3/50
313/313 - 48s - loss: 4.3132 - accuracy: 0.0273 - val_loss: 4.2258 - val_accuracy: 0.0401
Epoch 4/50
313/313 - 48s - loss: 4.1835 - accuracy: 0.0452 - val_loss: 3.9609 - val_accuracy: 0.0801
Epoch 5/50
313/313 - 48s - loss: 3.9705 - accuracy: 0.0706 - val_loss: 3.7852 - val_accuracy: 0.1019
Epoch 6/50
313/313 - 48s - loss: 3.8004 - accuracy: 0.0944 - val_loss: 3.6226 - val_accuracy: 0.1339
Epoch 7/50
313/313 - 48s - loss: 3.6419 - accuracy: 0.1163 - val_loss: 3.4493 - val_accuracy: 0.1596
Epoch 8/50
313/313 - 48s - loss: 3.5093 - accuracy: 0.1408 - val_loss: 3.2853 - val_accuracy: 0.1879
Epoch 9/50
313/313 - 48s - loss: 3.3849 - accuracy: 0.1596 - val_loss: 3.1519 - val_accuracy: 0.2183
Epoch 10/50
313/313 - 48s - loss: 3.2836 - accuracy: 0.1840 - val_loss: 3.0731 - val_accuracy: 0.2175
Epoch 11/50
313/313 - 48s - loss: 3.1782 - accuracy: 0.2033 - val_loss: 2.8912 - val_accuracy: 0.2621
Epoch 12/50
313/313 - 48s - loss: 3.0878 - accuracy: 0.2190 - val_loss: 2.8348 - val_accuracy: 0.2738
Epoch 13/50
313/313 - 48s - loss: 3.0043 - accuracy: 0.2397 - val_loss: 2.7050 - val_accuracy: 0.2988
Epoch 14/50
313/313 - 48s - loss: 2.9175 - accuracy: 0.2544 - val_loss: 2.6330 - val_accuracy: 0.3175
Epoch 15/50
313/313 - 46s - loss: 2.8399 - accuracy: 0.2704 - val_loss: 2.5640 - val_accuracy: 0.3288
Epoch 16/50
313/313 - 43s - loss: 2.7687 - accuracy: 0.2830 - val_loss: 2.5094 - val_accuracy: 0.3400
Epoch 17/50
313/313 - 43s - loss: 2.6931 - accuracy: 0.2989 - val_loss: 2.4587 - val_accuracy: 0.3537
Epoch 18/50
313/313 - 43s - loss: 2.6288 - accuracy: 0.3154 - val_loss: 2.4005 - val_accuracy: 0.3614
Epoch 19/50
313/313 - 43s - loss: 2.5762 - accuracy: 0.3241 - val_loss: 2.2977 - val_accuracy: 0.3881
Epoch 20/50
313/313 - 43s - loss: 2.5268 - accuracy: 0.3335 - val_loss: 2.3077 - val_accuracy: 0.3833
Epoch 21/50
313/313 - 43s - loss: 2.4748 - accuracy: 0.3451 - val_loss: 2.2484 - val_accuracy: 0.4000
Epoch 22/50
313/313 - 43s - loss: 2.4202 - accuracy: 0.3550 - val_loss: 2.1712 - val_accuracy: 0.4135
Epoch 23/50
313/313 - 43s - loss: 2.3776 - accuracy: 0.3683 - val_loss: 2.1494 - val_accuracy: 0.4159
Epoch 24/50
313/313 - 43s - loss: 2.3328 - accuracy: 0.3765 - val_loss: 2.1832 - val_accuracy: 0.4108
Epoch 25/50
313/313 - 48s - loss: 2.2932 - accuracy: 0.3810 - val_loss: 2.0881 - val_accuracy: 0.4307
Epoch 26/50
313/313 - 48s - loss: 2.2571 - accuracy: 0.3913 - val_loss: 2.0531 - val_accuracy: 0.4421
Epoch 27/50
313/313 - 48s - loss: 2.2218 - accuracy: 0.4007 - val_loss: 2.1374 - val_accuracy: 0.4289
Epoch 28/50
313/313 - 48s - loss: 2.1894 - accuracy: 0.4090 - val_loss: 1.9739 - val_accuracy: 0.4608
Epoch 29/50
313/313 - 48s - loss: 2.1478 - accuracy: 0.4177 - val_loss: 1.9253 - val_accuracy: 0.4724
Epoch 30/50
313/313 - 48s - loss: 2.1158 - accuracy: 0.4273 - val_loss: 1.9649 - val_accuracy: 0.4573
Epoch 31/50
313/313 - 48s - loss: 2.0896 - accuracy: 0.4346 - val_loss: 2.0402 - val_accuracy: 0.4503
Epoch 32/50
313/313 - 48s - loss: 2.0512 - accuracy: 0.4408 - val_loss: 1.8843 - val_accuracy: 0.4811
Epoch 33/50
313/313 - 48s - loss: 2.0230 - accuracy: 0.4489 - val_loss: 1.8830 - val_accuracy: 0.4834
Epoch 34/50
313/313 - 48s - loss: 1.9919 - accuracy: 0.4564 - val_loss: 1.9074 - val_accuracy: 0.4827
Epoch 35/50
313/313 - 48s - loss: 1.9671 - accuracy: 0.4623 - val_loss: 1.8758 - val_accuracy: 0.4861
Epoch 36/50
313/313 - 48s - loss: 1.9369 - accuracy: 0.4662 - val_loss: 1.8304 - val_accuracy: 0.4959
Epoch 37/50
313/313 - 48s - loss: 1.9162 - accuracy: 0.4746 - val_loss: 1.8138 - val_accuracy: 0.5008
Epoch 38/50
313/313 - 48s - loss: 1.8926 - accuracy: 0.4800 - val_loss: 1.8278 - val_accuracy: 0.4965
Epoch 39/50
313/313 - 48s - loss: 1.8696 - accuracy: 0.4846 - val_loss: 1.8296 - val_accuracy: 0.4979
Epoch 40/50
313/313 - 48s - loss: 1.8420 - accuracy: 0.4929 - val_loss: 1.7450 - val_accuracy: 0.5178
Epoch 41/50
313/313 - 48s - loss: 1.8072 - accuracy: 0.4994 - val_loss: 1.7600 - val_accuracy: 0.5147
Epoch 42/50
313/313 - 48s - loss: 1.7857 - accuracy: 0.5053 - val_loss: 1.6995 - val_accuracy: 0.5282
Epoch 43/50
313/313 - 48s - loss: 1.7681 - accuracy: 0.5083 - val_loss: 1.7125 - val_accuracy: 0.5256
Epoch 44/50
313/313 - 48s - loss: 1.7487 - accuracy: 0.5133 - val_loss: 1.7166 - val_accuracy: 0.5258
Epoch 45/50
313/313 - 48s - loss: 1.7219 - accuracy: 0.5233 - val_loss: 1.6826 - val_accuracy: 0.5339
Epoch 46/50
313/313 - 48s - loss: 1.6979 - accuracy: 0.5246 - val_loss: 1.6346 - val_accuracy: 0.5425
Epoch 47/50
313/313 - 48s - loss: 1.6914 - accuracy: 0.5260 - val_loss: 1.6819 - val_accuracy: 0.5380
Epoch 48/50
313/313 - 48s - loss: 1.6582 - accuracy: 0.5345 - val_loss: 1.6224 - val_accuracy: 0.5478
Epoch 49/50
313/313 - 48s - loss: 1.6473 - accuracy: 0.5357 - val_loss: 1.6802 - val_accuracy: 0.5372
Epoch 50/50
313/313 - 48s - loss: 1.6243 - accuracy: 0.5412 - val_loss: 1.6264 - val_accuracy: 0.5497
출력형식 : [Data augmentation-Dropout-l2 reg] (교차검증 시도/평균)
[010] ( [0.5447999835014343, 0.5440999865531921, 0.5526999831199646, 0.557200014591217, 0.5454999804496765] / 0.5488599896430969 )