diff --git a/UNet/old_Train_model.sh b/UNet/#Train_model.sh#
similarity index 69%
rename from UNet/old_Train_model.sh
rename to UNet/#Train_model.sh#
index e3c351e697ab96f69bb2c00b0f445ed962a75433..a1d7573ff67c4b081dce70e53860045ac1496ced 100644
--- a/UNet/old_Train_model.sh
+++ b/UNet/#Train_model.sh#
@@ -1,21 +1,23 @@
 #!/usr/local_rwth/bin/zsh
 ### Project account
-#SBATCH --account=thes1075
+#SBATCH --account=rwth0744
 
 ### Cluster Partition
 #SBATCH --partition=c18g
 
 #SBATCH -J training_model
-#SBATCH -o Sim_logs/UNet_V1_%J.log
+#SBATCH -o Sim_logs/UNet_64_V14_%J.log
  
 #SBATCH --gres=gpu:1
 #SBATCH --time=90:00:00
 ### Request memory you need for your job in MB
-#SBATCH --mem-per-cpu=10000
+#SBATCH --mem-per-cpu=15000
 #SBATCH --mem-per-gpu=16000
 module load cuda
 module load python/3.7.11
 pip3 install --user -Iv -q torch==1.10.1
-time python3 ./UNet_V1.py
+#time python3 ./UNet_V12.py
+#time python3 ./UNet_V13.py
+time python3 ./UNet_V14.py
 #print GPU Information
 #$CUDA_ROOT/extras/demo_suite/deviceQuery -noprompt
diff --git a/UNet/.#Train_model.sh b/UNet/.#Train_model.sh
new file mode 120000
index 0000000000000000000000000000000000000000..d0dfaea869caedd8207c2c126db76e1ea917a51f
--- /dev/null
+++ b/UNet/.#Train_model.sh
@@ -0,0 +1 @@
+yk138599@login18-x-1.hpc.itc.rwth-aachen.de.71560:1644816141
\ No newline at end of file
diff --git a/UNet/2_Train_model.sh b/UNet/2_Train_model.sh
index 77deaabbb09bc91efef7c2360d7df51c38288f97..2bd0b5d45b92e05ea5f349bd249a9ae810bd0a19 100644
--- a/UNet/2_Train_model.sh
+++ b/UNet/2_Train_model.sh
@@ -6,7 +6,7 @@
 #SBATCH --partition=c18g
 
 #SBATCH -J training_model
-#SBATCH -o Sim_logs/UNet_V9_1_%J.log
+#SBATCH -o Sim_logs/UNet_V10_%J.log
  
 #SBATCH --gres=gpu:1
 #SBATCH --time=50:00:00
@@ -17,6 +17,6 @@ module load cuda
 module load python/3.7.11
 echo "9.1 k=7 lr=1e-06"
 pip3 install --user -Iv -q torch==1.10.1
-time python3 ./UNet_V9_1.py
+time python3 ./UNet_V10.py
 #print GPU Information
 #$CUDA_ROOT/extras/demo_suite/deviceQuery -noprompt
diff --git a/UNet/Sim_logs/UNet_64_V12_25613707.log b/UNet/Sim_logs/UNet_64_V12_25613707.log
new file mode 100644
index 0000000000000000000000000000000000000000..f56025323deea037f4cda0a0918777a8be8234d2
--- /dev/null
+++ b/UNet/Sim_logs/UNet_64_V12_25613707.log
@@ -0,0 +1,520 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 500
+batchsize: 4
+learning rate: 3e-05
+kernel size is: 7
+ seed is: 1910338326
+Epoch [0], train_loss: 0.279216, val_loss: 0.272456, val_acc: 0.210326
+Epoch [1], train_loss: 0.252276, val_loss: 0.200921, val_acc: 3.433561
+Epoch [2], train_loss: 0.196350, val_loss: 0.170627, val_acc: 5.908857
+Epoch [3], train_loss: 0.169660, val_loss: 0.150642, val_acc: 8.629842
+Epoch [4], train_loss: 0.153131, val_loss: 0.137030, val_acc: 10.252650
+Epoch [5], train_loss: 0.139458, val_loss: 0.129110, val_acc: 11.926013
+Epoch [6], train_loss: 0.127830, val_loss: 0.116686, val_acc: 13.753400
+Epoch [7], train_loss: 0.118366, val_loss: 0.115906, val_acc: 12.808616
+Epoch [8], train_loss: 0.110485, val_loss: 0.107045, val_acc: 14.069930
+Epoch [9], train_loss: 0.103520, val_loss: 0.098818, val_acc: 15.464754
+Epoch [10], train_loss: 0.097392, val_loss: 0.094108, val_acc: 15.252579
+Epoch [11], train_loss: 0.091962, val_loss: 0.086852, val_acc: 17.935156
+Epoch [12], train_loss: 0.087081, val_loss: 0.083619, val_acc: 17.794321
+Epoch [13], train_loss: 0.082815, val_loss: 0.079706, val_acc: 18.267374
+Epoch [14], train_loss: 0.078697, val_loss: 0.076700, val_acc: 18.586863
+Epoch [15], train_loss: 0.075048, val_loss: 0.070452, val_acc: 20.032438
+Epoch [16], train_loss: 0.071900, val_loss: 0.069577, val_acc: 20.119120
+Epoch [17], train_loss: 0.069049, val_loss: 0.068643, val_acc: 18.821569
+Epoch [18], train_loss: 0.066113, val_loss: 0.066324, val_acc: 19.802124
+Epoch [19], train_loss: 0.063805, val_loss: 0.062064, val_acc: 21.384819
+Epoch [20], train_loss: 0.061511, val_loss: 0.059444, val_acc: 21.418034
+Epoch [21], train_loss: 0.059481, val_loss: 0.058907, val_acc: 21.342873
+Epoch [22], train_loss: 0.057425, val_loss: 0.055211, val_acc: 21.683472
+Epoch [23], train_loss: 0.055683, val_loss: 0.054235, val_acc: 21.732742
+Epoch [24], train_loss: 0.054187, val_loss: 0.054641, val_acc: 20.596256
+Epoch [25], train_loss: 0.052684, val_loss: 0.052031, val_acc: 21.879538
+Epoch [26], train_loss: 0.051185, val_loss: 0.050770, val_acc: 21.725441
+Epoch [27], train_loss: 0.050013, val_loss: 0.049732, val_acc: 21.852928
+Epoch [28], train_loss: 0.048807, val_loss: 0.048571, val_acc: 21.871237
+Epoch [29], train_loss: 0.047686, val_loss: 0.047473, val_acc: 21.917542
+Epoch [30], train_loss: 0.046558, val_loss: 0.046388, val_acc: 22.263245
+Epoch [31], train_loss: 0.045528, val_loss: 0.046715, val_acc: 21.557850
+Epoch [32], train_loss: 0.044648, val_loss: 0.045375, val_acc: 21.564627
+Epoch [33], train_loss: 0.043702, val_loss: 0.044305, val_acc: 22.063948
+Epoch [34], train_loss: 0.042968, val_loss: 0.043322, val_acc: 22.298937
+Epoch [35], train_loss: 0.042156, val_loss: 0.042790, val_acc: 22.097178
+Epoch [36], train_loss: 0.041292, val_loss: 0.042806, val_acc: 22.112711
+Epoch [37], train_loss: 0.040633, val_loss: 0.042774, val_acc: 21.515921
+Epoch [38], train_loss: 0.039871, val_loss: 0.041723, val_acc: 22.100189
+Epoch [39], train_loss: 0.039180, val_loss: 0.040909, val_acc: 22.415174
+Epoch [40], train_loss: 0.038600, val_loss: 0.040370, val_acc: 22.469610
+Epoch [41], train_loss: 0.037950, val_loss: 0.039785, val_acc: 22.282602
+Epoch [42], train_loss: 0.037314, val_loss: 0.039992, val_acc: 22.026562
+Epoch [43], train_loss: 0.036735, val_loss: 0.039110, val_acc: 22.353193
+Epoch [44], train_loss: 0.036036, val_loss: 0.038033, val_acc: 22.564043
+Epoch [45], train_loss: 0.035347, val_loss: 0.037175, val_acc: 22.468130
+Epoch [46], train_loss: 0.034786, val_loss: 0.036600, val_acc: 22.584965
+Epoch [47], train_loss: 0.034284, val_loss: 0.036597, val_acc: 22.294373
+Epoch [48], train_loss: 0.033712, val_loss: 0.036154, val_acc: 22.634331
+Epoch [49], train_loss: 0.033224, val_loss: 0.035442, val_acc: 22.820354
+Epoch [50], train_loss: 0.032744, val_loss: 0.035107, val_acc: 22.150755
+Epoch [51], train_loss: 0.032278, val_loss: 0.034881, val_acc: 22.822775
+Epoch [52], train_loss: 0.031835, val_loss: 0.034443, val_acc: 22.704865
+Epoch [53], train_loss: 0.031482, val_loss: 0.034039, val_acc: 22.816376
+Epoch [54], train_loss: 0.031123, val_loss: 0.033837, val_acc: 22.904795
+Epoch [55], train_loss: 0.030882, val_loss: 0.033932, val_acc: 22.224516
+Epoch [56], train_loss: 0.030566, val_loss: 0.033585, val_acc: 22.725069
+Epoch [57], train_loss: 0.030338, val_loss: 0.033372, val_acc: 22.963825
+Epoch [58], train_loss: 0.030108, val_loss: 0.033272, val_acc: 22.391689
+Epoch [59], train_loss: 0.029833, val_loss: 0.033218, val_acc: 22.843399
+Epoch [60], train_loss: 0.029714, val_loss: 0.033133, val_acc: 22.562941
+Epoch [61], train_loss: 0.029510, val_loss: 0.032857, val_acc: 22.644201
+Epoch [62], train_loss: 0.029379, val_loss: 0.033256, val_acc: 22.463881
+Epoch [63], train_loss: 0.029328, val_loss: 0.032969, val_acc: 22.475294
+Epoch [64], train_loss: 0.029169, val_loss: 0.032998, val_acc: 22.394735
+Epoch [65], train_loss: 0.028988, val_loss: 0.032759, val_acc: 22.559858
+Epoch [66], train_loss: 0.028980, val_loss: 0.032821, val_acc: 22.717176
+Epoch [67], train_loss: 0.028830, val_loss: 0.032669, val_acc: 22.769297
+Epoch [68], train_loss: 0.028681, val_loss: 0.032751, val_acc: 22.902859
+Epoch [69], train_loss: 0.028598, val_loss: 0.033001, val_acc: 21.688602
+Epoch [70], train_loss: 0.028517, val_loss: 0.032568, val_acc: 22.653433
+Epoch [71], train_loss: 0.028459, val_loss: 0.032770, val_acc: 22.566343
+Epoch [72], train_loss: 0.028423, val_loss: 0.032816, val_acc: 22.680098
+Epoch [73], train_loss: 0.028307, val_loss: 0.032407, val_acc: 22.596573
+Epoch [74], train_loss: 0.028248, val_loss: 0.032640, val_acc: 22.484482
+Epoch [75], train_loss: 0.028202, val_loss: 0.033229, val_acc: 21.603077
+Epoch [76], train_loss: 0.028125, val_loss: 0.032503, val_acc: 22.255381
+Epoch [77], train_loss: 0.028017, val_loss: 0.032303, val_acc: 22.841970
+Epoch [78], train_loss: 0.027952, val_loss: 0.032500, val_acc: 22.646088
+Epoch [79], train_loss: 0.027912, val_loss: 0.032731, val_acc: 22.073692
+Epoch [80], train_loss: 0.027908, val_loss: 0.032959, val_acc: 21.589310
+Epoch [81], train_loss: 0.027815, val_loss: 0.032358, val_acc: 22.686445
+Epoch [82], train_loss: 0.027681, val_loss: 0.032385, val_acc: 22.381123
+Epoch [83], train_loss: 0.027705, val_loss: 0.032370, val_acc: 22.620667
+Epoch [84], train_loss: 0.027621, val_loss: 0.032521, val_acc: 22.649765
+Epoch [85], train_loss: 0.027592, val_loss: 0.032645, val_acc: 22.403254
+Epoch [86], train_loss: 0.027554, val_loss: 0.032577, val_acc: 22.096455
+Epoch [87], train_loss: 0.027484, val_loss: 0.032645, val_acc: 22.584856
+Epoch [88], train_loss: 0.027431, val_loss: 0.032170, val_acc: 22.520321
+Epoch [89], train_loss: 0.027401, val_loss: 0.032284, val_acc: 22.591597
+Epoch [90], train_loss: 0.027347, val_loss: 0.032546, val_acc: 22.171906
+Epoch [91], train_loss: 0.027306, val_loss: 0.032196, val_acc: 22.542919
+Epoch [92], train_loss: 0.027343, val_loss: 0.032264, val_acc: 22.460058
+Epoch [93], train_loss: 0.027167, val_loss: 0.032668, val_acc: 21.944807
+Epoch [94], train_loss: 0.027156, val_loss: 0.032368, val_acc: 22.125475
+Epoch [95], train_loss: 0.027115, val_loss: 0.032301, val_acc: 22.516758
+Epoch [96], train_loss: 0.027009, val_loss: 0.032335, val_acc: 22.363045
+Epoch [97], train_loss: 0.026954, val_loss: 0.032472, val_acc: 22.141020
+Epoch [98], train_loss: 0.026962, val_loss: 0.032574, val_acc: 22.011221
+Epoch [99], train_loss: 0.026967, val_loss: 0.032336, val_acc: 22.508385
+Epoch [100], train_loss: 0.026832, val_loss: 0.032153, val_acc: 22.391973
+Epoch [101], train_loss: 0.026815, val_loss: 0.032513, val_acc: 22.176556
+Epoch [102], train_loss: 0.026790, val_loss: 0.032562, val_acc: 22.208826
+Epoch [103], train_loss: 0.026727, val_loss: 0.032421, val_acc: 22.237684
+Epoch [104], train_loss: 0.026724, val_loss: 0.032346, val_acc: 22.451405
+Epoch [105], train_loss: 0.026730, val_loss: 0.032506, val_acc: 22.052896
+Epoch [106], train_loss: 0.026669, val_loss: 0.032393, val_acc: 22.061819
+Epoch [107], train_loss: 0.026544, val_loss: 0.032554, val_acc: 21.948917
+Epoch [108], train_loss: 0.026553, val_loss: 0.032426, val_acc: 22.395702
+Epoch [109], train_loss: 0.026491, val_loss: 0.032295, val_acc: 22.215605
+Epoch [110], train_loss: 0.026499, val_loss: 0.032438, val_acc: 22.250067
+Epoch [111], train_loss: 0.026431, val_loss: 0.032826, val_acc: 21.595020
+Epoch [112], train_loss: 0.026438, val_loss: 0.032287, val_acc: 22.256987
+Epoch [113], train_loss: 0.026347, val_loss: 0.032274, val_acc: 22.245840
+Epoch [114], train_loss: 0.026392, val_loss: 0.032433, val_acc: 22.091520
+Epoch [115], train_loss: 0.026259, val_loss: 0.032336, val_acc: 22.182508
+Epoch [116], train_loss: 0.026186, val_loss: 0.032454, val_acc: 21.942469
+Epoch [117], train_loss: 0.026163, val_loss: 0.032620, val_acc: 21.975971
+Epoch [118], train_loss: 0.026158, val_loss: 0.032403, val_acc: 22.010416
+Epoch [119], train_loss: 0.026140, val_loss: 0.032509, val_acc: 21.821005
+Epoch [120], train_loss: 0.026094, val_loss: 0.032419, val_acc: 22.091908
+Epoch [121], train_loss: 0.026028, val_loss: 0.032322, val_acc: 22.265829
+Epoch [122], train_loss: 0.025991, val_loss: 0.032365, val_acc: 22.297066
+Epoch [123], train_loss: 0.025947, val_loss: 0.032328, val_acc: 22.310963
+Epoch [124], train_loss: 0.025927, val_loss: 0.032443, val_acc: 22.135237
+Epoch [125], train_loss: 0.025907, val_loss: 0.032419, val_acc: 22.041990
+Epoch [126], train_loss: 0.025926, val_loss: 0.032459, val_acc: 22.182623
+Epoch [127], train_loss: 0.025884, val_loss: 0.032487, val_acc: 21.986275
+Epoch [128], train_loss: 0.025845, val_loss: 0.032363, val_acc: 21.889317
+Epoch [129], train_loss: 0.025815, val_loss: 0.032660, val_acc: 21.695026
+Epoch [130], train_loss: 0.025736, val_loss: 0.032523, val_acc: 21.846308
+Epoch [131], train_loss: 0.025694, val_loss: 0.032631, val_acc: 21.744444
+Epoch [132], train_loss: 0.025675, val_loss: 0.032493, val_acc: 22.095070
+Epoch [133], train_loss: 0.025684, val_loss: 0.032575, val_acc: 21.920437
+Epoch [134], train_loss: 0.025604, val_loss: 0.032375, val_acc: 22.136061
+Epoch [135], train_loss: 0.025602, val_loss: 0.032617, val_acc: 22.220005
+Epoch [136], train_loss: 0.025565, val_loss: 0.032632, val_acc: 22.008747
+Epoch [137], train_loss: 0.025540, val_loss: 0.032678, val_acc: 21.814713
+Epoch [138], train_loss: 0.025526, val_loss: 0.032464, val_acc: 22.125389
+Epoch [139], train_loss: 0.025476, val_loss: 0.032467, val_acc: 22.148727
+Epoch [140], train_loss: 0.025437, val_loss: 0.032485, val_acc: 22.180922
+Epoch [141], train_loss: 0.025518, val_loss: 0.032488, val_acc: 21.983219
+Epoch [142], train_loss: 0.025436, val_loss: 0.032620, val_acc: 21.701351
+Epoch [143], train_loss: 0.025382, val_loss: 0.032613, val_acc: 21.946867
+Epoch [144], train_loss: 0.025381, val_loss: 0.032379, val_acc: 22.147783
+Epoch [145], train_loss: 0.025289, val_loss: 0.032498, val_acc: 21.887640
+Epoch [146], train_loss: 0.025359, val_loss: 0.032989, val_acc: 21.371414
+Epoch [147], train_loss: 0.025347, val_loss: 0.032458, val_acc: 22.157839
+Epoch [148], train_loss: 0.025307, val_loss: 0.032464, val_acc: 22.170790
+Epoch [149], train_loss: 0.025251, val_loss: 0.032494, val_acc: 22.048361
+Epoch [150], train_loss: 0.025217, val_loss: 0.032379, val_acc: 21.985939
+Epoch [151], train_loss: 0.025170, val_loss: 0.032810, val_acc: 21.680389
+Epoch [152], train_loss: 0.025119, val_loss: 0.032640, val_acc: 21.860662
+Epoch [153], train_loss: 0.025114, val_loss: 0.032519, val_acc: 21.844038
+Epoch [154], train_loss: 0.025073, val_loss: 0.032833, val_acc: 21.837755
+Epoch [155], train_loss: 0.025096, val_loss: 0.032570, val_acc: 22.203327
+Epoch [156], train_loss: 0.025086, val_loss: 0.032523, val_acc: 22.000555
+Epoch [157], train_loss: 0.025016, val_loss: 0.032534, val_acc: 21.960505
+Epoch [158], train_loss: 0.025020, val_loss: 0.032487, val_acc: 21.976133
+Epoch [159], train_loss: 0.024959, val_loss: 0.032714, val_acc: 21.872694
+Epoch [160], train_loss: 0.025003, val_loss: 0.032741, val_acc: 21.605772
+Epoch [161], train_loss: 0.024967, val_loss: 0.032740, val_acc: 21.491064
+Epoch [162], train_loss: 0.024922, val_loss: 0.032499, val_acc: 22.037142
+Epoch [163], train_loss: 0.024914, val_loss: 0.032516, val_acc: 22.049389
+Epoch [164], train_loss: 0.024856, val_loss: 0.032378, val_acc: 22.160009
+Epoch [165], train_loss: 0.024847, val_loss: 0.032619, val_acc: 22.016426
+Epoch [166], train_loss: 0.024777, val_loss: 0.032805, val_acc: 21.805492
+Epoch [167], train_loss: 0.024769, val_loss: 0.032526, val_acc: 22.090330
+Epoch [168], train_loss: 0.024759, val_loss: 0.032609, val_acc: 22.009153
+Epoch [169], train_loss: 0.024763, val_loss: 0.032608, val_acc: 21.960278
+Epoch [170], train_loss: 0.024699, val_loss: 0.032715, val_acc: 21.701275
+Epoch [171], train_loss: 0.024709, val_loss: 0.032735, val_acc: 22.175646
+Epoch [172], train_loss: 0.024692, val_loss: 0.033041, val_acc: 21.277317
+Epoch [173], train_loss: 0.024657, val_loss: 0.032561, val_acc: 22.004248
+Epoch [174], train_loss: 0.024582, val_loss: 0.032707, val_acc: 21.853758
+Epoch [175], train_loss: 0.024647, val_loss: 0.032489, val_acc: 22.118616
+Epoch [176], train_loss: 0.024625, val_loss: 0.032658, val_acc: 21.765310
+Epoch [177], train_loss: 0.024596, val_loss: 0.032641, val_acc: 21.846022
+Epoch [178], train_loss: 0.024574, val_loss: 0.032904, val_acc: 21.703238
+Epoch [179], train_loss: 0.024588, val_loss: 0.032702, val_acc: 21.991226
+Epoch [180], train_loss: 0.024533, val_loss: 0.032622, val_acc: 21.851828
+Epoch [181], train_loss: 0.024483, val_loss: 0.032683, val_acc: 21.721756
+Epoch [182], train_loss: 0.024500, val_loss: 0.032629, val_acc: 22.101620
+Epoch [183], train_loss: 0.024454, val_loss: 0.032649, val_acc: 21.742636
+Epoch [184], train_loss: 0.024457, val_loss: 0.032655, val_acc: 21.811773
+Epoch [185], train_loss: 0.024470, val_loss: 0.032784, val_acc: 21.844397
+Epoch [186], train_loss: 0.024438, val_loss: 0.033149, val_acc: 21.887537
+Epoch [187], train_loss: 0.024385, val_loss: 0.032710, val_acc: 21.736547
+Epoch [188], train_loss: 0.024344, val_loss: 0.032691, val_acc: 22.132444
+Epoch [189], train_loss: 0.024299, val_loss: 0.032811, val_acc: 21.721933
+Epoch [190], train_loss: 0.024334, val_loss: 0.032862, val_acc: 21.596130
+Epoch [191], train_loss: 0.024320, val_loss: 0.032908, val_acc: 21.533146
+Epoch [192], train_loss: 0.024261, val_loss: 0.032764, val_acc: 21.770247
+Epoch [193], train_loss: 0.024221, val_loss: 0.032916, val_acc: 21.519705
+Epoch [194], train_loss: 0.024196, val_loss: 0.032971, val_acc: 21.548523
+Epoch [195], train_loss: 0.024302, val_loss: 0.033072, val_acc: 21.445379
+Epoch [196], train_loss: 0.024140, val_loss: 0.032837, val_acc: 21.839413
+Epoch [197], train_loss: 0.024117, val_loss: 0.032617, val_acc: 21.970251
+Epoch [198], train_loss: 0.024107, val_loss: 0.032944, val_acc: 21.462498
+Epoch [199], train_loss: 0.024172, val_loss: 0.032578, val_acc: 22.019812
+Epoch [200], train_loss: 0.024047, val_loss: 0.032914, val_acc: 21.877476
+Epoch [201], train_loss: 0.024071, val_loss: 0.032850, val_acc: 22.006872
+Epoch [202], train_loss: 0.023991, val_loss: 0.032734, val_acc: 21.797266
+Epoch [203], train_loss: 0.024030, val_loss: 0.032994, val_acc: 21.566648
+Epoch [204], train_loss: 0.023985, val_loss: 0.032644, val_acc: 22.110514
+Epoch [205], train_loss: 0.023978, val_loss: 0.032532, val_acc: 22.091618
+Epoch [206], train_loss: 0.024006, val_loss: 0.032963, val_acc: 21.537451
+Epoch [207], train_loss: 0.023991, val_loss: 0.032914, val_acc: 21.672836
+Epoch [208], train_loss: 0.023941, val_loss: 0.033062, val_acc: 21.459225
+Epoch [209], train_loss: 0.023951, val_loss: 0.032995, val_acc: 21.777254
+Epoch [210], train_loss: 0.023904, val_loss: 0.033071, val_acc: 21.533451
+Epoch [211], train_loss: 0.023935, val_loss: 0.032803, val_acc: 21.777853
+Epoch [212], train_loss: 0.023868, val_loss: 0.032951, val_acc: 21.652571
+Epoch [213], train_loss: 0.023880, val_loss: 0.032937, val_acc: 21.589643
+Epoch [214], train_loss: 0.023860, val_loss: 0.033082, val_acc: 21.468903
+Epoch [215], train_loss: 0.023850, val_loss: 0.032825, val_acc: 21.839384
+Epoch [216], train_loss: 0.023797, val_loss: 0.032831, val_acc: 21.920876
+Epoch [217], train_loss: 0.023764, val_loss: 0.032921, val_acc: 21.720474
+Epoch [218], train_loss: 0.023727, val_loss: 0.032952, val_acc: 21.704786
+Epoch [219], train_loss: 0.023703, val_loss: 0.032806, val_acc: 21.954390
+Epoch [220], train_loss: 0.023708, val_loss: 0.032897, val_acc: 21.807829
+Epoch [221], train_loss: 0.023749, val_loss: 0.032905, val_acc: 21.717958
+Epoch [222], train_loss: 0.023685, val_loss: 0.032830, val_acc: 21.708628
+Epoch [223], train_loss: 0.023712, val_loss: 0.033140, val_acc: 21.483740
+Epoch [224], train_loss: 0.023719, val_loss: 0.033280, val_acc: 21.146219
+Epoch [225], train_loss: 0.023661, val_loss: 0.032823, val_acc: 21.977598
+Epoch [226], train_loss: 0.023607, val_loss: 0.032781, val_acc: 21.901289
+Epoch [227], train_loss: 0.023635, val_loss: 0.033033, val_acc: 21.741911
+Epoch [228], train_loss: 0.023651, val_loss: 0.032884, val_acc: 21.842442
+Epoch [229], train_loss: 0.023652, val_loss: 0.032966, val_acc: 21.789673
+Epoch [230], train_loss: 0.023613, val_loss: 0.032938, val_acc: 21.678793
+Epoch [231], train_loss: 0.023582, val_loss: 0.032731, val_acc: 22.065128
+Epoch [232], train_loss: 0.023531, val_loss: 0.032950, val_acc: 21.943279
+Epoch [233], train_loss: 0.023497, val_loss: 0.032968, val_acc: 21.604061
+Epoch [234], train_loss: 0.023520, val_loss: 0.033096, val_acc: 21.512112
+Epoch [235], train_loss: 0.023559, val_loss: 0.032819, val_acc: 21.988539
+Epoch [236], train_loss: 0.023469, val_loss: 0.032983, val_acc: 21.857649
+Epoch [237], train_loss: 0.023476, val_loss: 0.032838, val_acc: 21.824984
+Epoch [238], train_loss: 0.023388, val_loss: 0.033003, val_acc: 21.835157
+Epoch [239], train_loss: 0.023493, val_loss: 0.033214, val_acc: 21.766117
+Epoch [240], train_loss: 0.023450, val_loss: 0.033164, val_acc: 21.436117
+Epoch [241], train_loss: 0.023327, val_loss: 0.032963, val_acc: 21.853735
+Epoch [242], train_loss: 0.023462, val_loss: 0.032901, val_acc: 21.936028
+Epoch [243], train_loss: 0.023425, val_loss: 0.033094, val_acc: 21.636944
+Epoch [244], train_loss: 0.023405, val_loss: 0.033043, val_acc: 21.759815
+Epoch [245], train_loss: 0.023286, val_loss: 0.032929, val_acc: 21.906908
+Epoch [246], train_loss: 0.023375, val_loss: 0.033229, val_acc: 21.476046
+Epoch [247], train_loss: 0.023306, val_loss: 0.032953, val_acc: 21.856230
+Epoch [248], train_loss: 0.023311, val_loss: 0.033071, val_acc: 21.738567
+Epoch [249], train_loss: 0.023267, val_loss: 0.033300, val_acc: 21.291162
+Epoch [250], train_loss: 0.023224, val_loss: 0.033111, val_acc: 21.537472
+Epoch [251], train_loss: 0.023217, val_loss: 0.033222, val_acc: 21.529058
+Epoch [252], train_loss: 0.023343, val_loss: 0.033200, val_acc: 21.788263
+Epoch [253], train_loss: 0.023254, val_loss: 0.033019, val_acc: 21.827818
+Epoch [254], train_loss: 0.023180, val_loss: 0.033269, val_acc: 21.503046
+Epoch [255], train_loss: 0.023220, val_loss: 0.033348, val_acc: 21.170797
+Epoch [256], train_loss: 0.023178, val_loss: 0.033278, val_acc: 21.371750
+Epoch [257], train_loss: 0.023185, val_loss: 0.033084, val_acc: 21.734818
+Epoch [258], train_loss: 0.023195, val_loss: 0.033172, val_acc: 21.682463
+Epoch [259], train_loss: 0.023124, val_loss: 0.033189, val_acc: 21.805607
+Epoch [260], train_loss: 0.023119, val_loss: 0.033237, val_acc: 21.642002
+Epoch [261], train_loss: 0.023077, val_loss: 0.033097, val_acc: 21.676571
+Epoch [262], train_loss: 0.023105, val_loss: 0.033087, val_acc: 21.639957
+Epoch [263], train_loss: 0.023096, val_loss: 0.033050, val_acc: 21.649162
+Epoch [264], train_loss: 0.023058, val_loss: 0.033297, val_acc: 21.508095
+Epoch [265], train_loss: 0.023055, val_loss: 0.033009, val_acc: 21.816694
+Epoch [266], train_loss: 0.023047, val_loss: 0.033261, val_acc: 21.584383
+Epoch [267], train_loss: 0.023030, val_loss: 0.033058, val_acc: 21.790791
+Epoch [268], train_loss: 0.023020, val_loss: 0.033226, val_acc: 21.709301
+Epoch [269], train_loss: 0.023036, val_loss: 0.033188, val_acc: 21.479944
+Epoch [270], train_loss: 0.023019, val_loss: 0.033073, val_acc: 21.793188
+Epoch [271], train_loss: 0.022962, val_loss: 0.033176, val_acc: 21.627445
+Epoch [272], train_loss: 0.023006, val_loss: 0.033147, val_acc: 21.533674
+Epoch [273], train_loss: 0.023005, val_loss: 0.033087, val_acc: 21.627729
+Epoch [274], train_loss: 0.022886, val_loss: 0.033434, val_acc: 21.144140
+Epoch [275], train_loss: 0.022970, val_loss: 0.033154, val_acc: 21.706282
+Epoch [276], train_loss: 0.022917, val_loss: 0.033244, val_acc: 21.613035
+Epoch [277], train_loss: 0.022884, val_loss: 0.033184, val_acc: 21.637022
+Epoch [278], train_loss: 0.022901, val_loss: 0.033495, val_acc: 21.237921
+Epoch [279], train_loss: 0.022964, val_loss: 0.033149, val_acc: 21.554625
+Epoch [280], train_loss: 0.022828, val_loss: 0.033156, val_acc: 21.603342
+Epoch [281], train_loss: 0.022887, val_loss: 0.033048, val_acc: 21.802179
+Epoch [282], train_loss: 0.022803, val_loss: 0.033117, val_acc: 21.623405
+Epoch [283], train_loss: 0.022856, val_loss: 0.033276, val_acc: 21.416780
+Epoch [284], train_loss: 0.022853, val_loss: 0.033139, val_acc: 21.713314
+Epoch [285], train_loss: 0.022752, val_loss: 0.033367, val_acc: 21.309668
+Epoch [286], train_loss: 0.022768, val_loss: 0.033273, val_acc: 21.565905
+Epoch [287], train_loss: 0.022826, val_loss: 0.033142, val_acc: 21.570286
+Epoch [288], train_loss: 0.022751, val_loss: 0.033308, val_acc: 21.408367
+Epoch [289], train_loss: 0.022809, val_loss: 0.033059, val_acc: 21.717051
+Epoch [290], train_loss: 0.022797, val_loss: 0.033419, val_acc: 21.288448
+Epoch [291], train_loss: 0.022724, val_loss: 0.033399, val_acc: 21.259958
+Epoch [292], train_loss: 0.022754, val_loss: 0.033065, val_acc: 21.762419
+Epoch [293], train_loss: 0.022684, val_loss: 0.033096, val_acc: 21.946356
+Epoch [294], train_loss: 0.022690, val_loss: 0.033153, val_acc: 21.856405
+Epoch [295], train_loss: 0.022686, val_loss: 0.033642, val_acc: 21.383839
+Epoch [296], train_loss: 0.022690, val_loss: 0.033272, val_acc: 21.405439
+Epoch [297], train_loss: 0.022682, val_loss: 0.033201, val_acc: 21.510736
+Epoch [298], train_loss: 0.022603, val_loss: 0.033068, val_acc: 21.819193
+Epoch [299], train_loss: 0.022599, val_loss: 0.033250, val_acc: 21.371881
+Epoch [300], train_loss: 0.022629, val_loss: 0.033310, val_acc: 21.453459
+Epoch [301], train_loss: 0.022616, val_loss: 0.033352, val_acc: 21.599735
+Epoch [302], train_loss: 0.022563, val_loss: 0.033468, val_acc: 21.413122
+Epoch [303], train_loss: 0.022548, val_loss: 0.033458, val_acc: 21.369635
+Epoch [304], train_loss: 0.022552, val_loss: 0.033280, val_acc: 21.667742
+Epoch [305], train_loss: 0.022498, val_loss: 0.033044, val_acc: 21.843170
+Epoch [306], train_loss: 0.022568, val_loss: 0.033603, val_acc: 21.122578
+Epoch [307], train_loss: 0.022553, val_loss: 0.033267, val_acc: 21.578550
+Epoch [308], train_loss: 0.022516, val_loss: 0.033307, val_acc: 21.640202
+Epoch [309], train_loss: 0.022567, val_loss: 0.033492, val_acc: 21.457829
+Epoch [310], train_loss: 0.022552, val_loss: 0.033403, val_acc: 21.343300
+Epoch [311], train_loss: 0.022515, val_loss: 0.033303, val_acc: 21.566183
+Epoch [312], train_loss: 0.022499, val_loss: 0.033370, val_acc: 21.577055
+Epoch [313], train_loss: 0.022491, val_loss: 0.033288, val_acc: 21.638662
+Epoch [314], train_loss: 0.022465, val_loss: 0.033437, val_acc: 21.295414
+Epoch [315], train_loss: 0.022422, val_loss: 0.033168, val_acc: 21.543978
+Epoch [316], train_loss: 0.022446, val_loss: 0.033378, val_acc: 21.652159
+Epoch [317], train_loss: 0.022410, val_loss: 0.033577, val_acc: 21.115101
+Epoch [318], train_loss: 0.022431, val_loss: 0.033241, val_acc: 21.691511
+Epoch [319], train_loss: 0.022373, val_loss: 0.033655, val_acc: 21.112722
+Epoch [320], train_loss: 0.022416, val_loss: 0.033296, val_acc: 21.842503
+Epoch [321], train_loss: 0.022467, val_loss: 0.033486, val_acc: 21.447210
+Epoch [322], train_loss: 0.022377, val_loss: 0.033229, val_acc: 21.765614
+Epoch [323], train_loss: 0.022353, val_loss: 0.033335, val_acc: 21.628172
+Epoch [324], train_loss: 0.022324, val_loss: 0.033357, val_acc: 21.644857
+Epoch [325], train_loss: 0.022331, val_loss: 0.033494, val_acc: 21.405531
+Epoch [326], train_loss: 0.022308, val_loss: 0.033312, val_acc: 21.490234
+Epoch [327], train_loss: 0.022355, val_loss: 0.033351, val_acc: 21.638096
+Epoch [328], train_loss: 0.022367, val_loss: 0.033433, val_acc: 21.445660
+Epoch [329], train_loss: 0.022251, val_loss: 0.033456, val_acc: 21.334908
+Epoch [330], train_loss: 0.022273, val_loss: 0.033694, val_acc: 21.215614
+Epoch [331], train_loss: 0.022304, val_loss: 0.033551, val_acc: 21.327702
+Epoch [332], train_loss: 0.022295, val_loss: 0.033471, val_acc: 21.368116
+Epoch [333], train_loss: 0.022248, val_loss: 0.033350, val_acc: 21.579090
+Epoch [334], train_loss: 0.022257, val_loss: 0.033380, val_acc: 21.516310
+Epoch [335], train_loss: 0.022243, val_loss: 0.033239, val_acc: 21.614391
+Epoch [336], train_loss: 0.022223, val_loss: 0.033150, val_acc: 21.807331
+Epoch [337], train_loss: 0.022168, val_loss: 0.033533, val_acc: 21.385584
+Epoch [338], train_loss: 0.022242, val_loss: 0.033323, val_acc: 21.522163
+Epoch [339], train_loss: 0.022243, val_loss: 0.033592, val_acc: 21.591873
+Epoch [340], train_loss: 0.022183, val_loss: 0.033448, val_acc: 21.554684
+Epoch [341], train_loss: 0.022105, val_loss: 0.033626, val_acc: 21.288939
+Epoch [342], train_loss: 0.022195, val_loss: 0.033154, val_acc: 21.751322
+Epoch [343], train_loss: 0.022174, val_loss: 0.033556, val_acc: 21.390249
+Epoch [344], train_loss: 0.022159, val_loss: 0.033333, val_acc: 21.500612
+Epoch [345], train_loss: 0.022164, val_loss: 0.033456, val_acc: 21.527090
+Epoch [346], train_loss: 0.022146, val_loss: 0.033339, val_acc: 21.725903
+Epoch [347], train_loss: 0.022115, val_loss: 0.033411, val_acc: 21.426935
+Epoch [348], train_loss: 0.022127, val_loss: 0.033479, val_acc: 21.736153
+Epoch [349], train_loss: 0.022158, val_loss: 0.033407, val_acc: 21.547560
+Epoch [350], train_loss: 0.022153, val_loss: 0.033417, val_acc: 21.744808
+Epoch [351], train_loss: 0.022066, val_loss: 0.033271, val_acc: 21.633331
+Epoch [352], train_loss: 0.022097, val_loss: 0.033562, val_acc: 21.283804
+Epoch [353], train_loss: 0.022058, val_loss: 0.033749, val_acc: 21.052301
+Epoch [354], train_loss: 0.022051, val_loss: 0.033359, val_acc: 21.512589
+Epoch [355], train_loss: 0.022051, val_loss: 0.033342, val_acc: 21.631477
+Epoch [356], train_loss: 0.022033, val_loss: 0.033357, val_acc: 21.569574
+Epoch [357], train_loss: 0.021988, val_loss: 0.033372, val_acc: 21.656693
+Epoch [358], train_loss: 0.022070, val_loss: 0.033493, val_acc: 21.521513
+Epoch [359], train_loss: 0.021972, val_loss: 0.033539, val_acc: 21.497023
+Epoch [360], train_loss: 0.022004, val_loss: 0.033315, val_acc: 21.759375
+Epoch [361], train_loss: 0.022015, val_loss: 0.033520, val_acc: 21.728653
+Epoch [362], train_loss: 0.021920, val_loss: 0.033457, val_acc: 21.558462
+Epoch [363], train_loss: 0.022010, val_loss: 0.033230, val_acc: 21.671682
+Epoch [364], train_loss: 0.021956, val_loss: 0.033607, val_acc: 21.430349
+Epoch [365], train_loss: 0.021879, val_loss: 0.033724, val_acc: 21.265373
+Epoch [366], train_loss: 0.021950, val_loss: 0.033550, val_acc: 21.500996
+Epoch [367], train_loss: 0.021882, val_loss: 0.033606, val_acc: 21.339960
+Epoch [368], train_loss: 0.021864, val_loss: 0.033406, val_acc: 21.461857
+Epoch [369], train_loss: 0.021897, val_loss: 0.033497, val_acc: 21.355536
+Epoch [370], train_loss: 0.021855, val_loss: 0.033761, val_acc: 21.328190
+Epoch [371], train_loss: 0.021880, val_loss: 0.033356, val_acc: 21.744467
+Epoch [372], train_loss: 0.021900, val_loss: 0.033676, val_acc: 21.466225
+Epoch [373], train_loss: 0.021880, val_loss: 0.033647, val_acc: 21.429388
+Epoch [374], train_loss: 0.021907, val_loss: 0.033250, val_acc: 21.683378
+Epoch [375], train_loss: 0.021820, val_loss: 0.033522, val_acc: 21.584150
+Epoch [376], train_loss: 0.021811, val_loss: 0.033469, val_acc: 21.616478
+Epoch [377], train_loss: 0.021792, val_loss: 0.033571, val_acc: 21.429132
+Epoch [378], train_loss: 0.021819, val_loss: 0.033641, val_acc: 21.478268
+Epoch [379], train_loss: 0.021811, val_loss: 0.033527, val_acc: 21.513985
+Epoch [380], train_loss: 0.021773, val_loss: 0.033415, val_acc: 21.647060
+Epoch [381], train_loss: 0.021799, val_loss: 0.033594, val_acc: 21.264927
+Epoch [382], train_loss: 0.021762, val_loss: 0.033614, val_acc: 21.684111
+Epoch [383], train_loss: 0.021789, val_loss: 0.033604, val_acc: 21.408270
+Epoch [384], train_loss: 0.021740, val_loss: 0.033550, val_acc: 21.480589
+Epoch [385], train_loss: 0.021756, val_loss: 0.033293, val_acc: 21.746555
+Epoch [386], train_loss: 0.021653, val_loss: 0.033642, val_acc: 21.382948
+Epoch [387], train_loss: 0.021728, val_loss: 0.033578, val_acc: 21.452061
+Epoch [388], train_loss: 0.021734, val_loss: 0.033485, val_acc: 21.526892
+Epoch [389], train_loss: 0.021682, val_loss: 0.033431, val_acc: 21.692677
+Epoch [390], train_loss: 0.021758, val_loss: 0.033613, val_acc: 21.596437
+Epoch [391], train_loss: 0.021728, val_loss: 0.033486, val_acc: 21.720032
+Epoch [392], train_loss: 0.021669, val_loss: 0.033315, val_acc: 21.640972
+Epoch [393], train_loss: 0.021722, val_loss: 0.033442, val_acc: 21.492792
+Epoch [394], train_loss: 0.021650, val_loss: 0.033711, val_acc: 21.266087
+Epoch [395], train_loss: 0.021672, val_loss: 0.033536, val_acc: 21.526716
+Epoch [396], train_loss: 0.021665, val_loss: 0.033478, val_acc: 21.635277
+Epoch [397], train_loss: 0.021672, val_loss: 0.033610, val_acc: 21.509125
+Epoch [398], train_loss: 0.021661, val_loss: 0.033319, val_acc: 21.808136
+Epoch [399], train_loss: 0.021616, val_loss: 0.033443, val_acc: 21.483652
+Epoch [400], train_loss: 0.021627, val_loss: 0.033581, val_acc: 21.553230
+Epoch [401], train_loss: 0.021615, val_loss: 0.033803, val_acc: 21.197500
+Epoch [402], train_loss: 0.021592, val_loss: 0.033814, val_acc: 21.079565
+Epoch [403], train_loss: 0.021590, val_loss: 0.033734, val_acc: 21.330158
+Epoch [404], train_loss: 0.021638, val_loss: 0.033553, val_acc: 21.557631
+Epoch [405], train_loss: 0.021603, val_loss: 0.033494, val_acc: 21.489962
+Epoch [406], train_loss: 0.021588, val_loss: 0.033492, val_acc: 21.469776
+Epoch [407], train_loss: 0.021597, val_loss: 0.033644, val_acc: 21.219751
+Epoch [408], train_loss: 0.021592, val_loss: 0.033586, val_acc: 21.382584
+Epoch [409], train_loss: 0.021503, val_loss: 0.033504, val_acc: 21.625822
+Epoch [410], train_loss: 0.021514, val_loss: 0.033715, val_acc: 21.481680
+Epoch [411], train_loss: 0.021555, val_loss: 0.033739, val_acc: 21.483879
+Epoch [412], train_loss: 0.021565, val_loss: 0.033695, val_acc: 21.359907
+Epoch [413], train_loss: 0.021564, val_loss: 0.033645, val_acc: 21.503111
+Epoch [414], train_loss: 0.021536, val_loss: 0.033601, val_acc: 21.536352
+Epoch [415], train_loss: 0.021532, val_loss: 0.033618, val_acc: 21.698914
+Epoch [416], train_loss: 0.021500, val_loss: 0.033661, val_acc: 21.520132
+Epoch [417], train_loss: 0.021500, val_loss: 0.033579, val_acc: 21.516415
+Epoch [418], train_loss: 0.021506, val_loss: 0.033939, val_acc: 20.944401
+Epoch [419], train_loss: 0.021450, val_loss: 0.033505, val_acc: 21.584686
+Epoch [420], train_loss: 0.021462, val_loss: 0.033655, val_acc: 21.451221
+Epoch [421], train_loss: 0.021464, val_loss: 0.033836, val_acc: 21.275028
+Epoch [422], train_loss: 0.021468, val_loss: 0.033567, val_acc: 21.508257
+Epoch [423], train_loss: 0.021424, val_loss: 0.033452, val_acc: 21.597925
+Epoch [424], train_loss: 0.021443, val_loss: 0.033711, val_acc: 21.462715
+Epoch [425], train_loss: 0.021438, val_loss: 0.033592, val_acc: 21.649187
+Epoch [426], train_loss: 0.021366, val_loss: 0.033445, val_acc: 21.529461
+Epoch [427], train_loss: 0.021428, val_loss: 0.033512, val_acc: 21.650869
+Epoch [428], train_loss: 0.021420, val_loss: 0.033851, val_acc: 21.211470
+Epoch [429], train_loss: 0.021396, val_loss: 0.033594, val_acc: 21.607876
+Epoch [430], train_loss: 0.021431, val_loss: 0.033716, val_acc: 21.365900
+Epoch [431], train_loss: 0.021440, val_loss: 0.033602, val_acc: 21.621601
+Epoch [432], train_loss: 0.021429, val_loss: 0.033553, val_acc: 21.528461
+Epoch [433], train_loss: 0.021422, val_loss: 0.033546, val_acc: 21.675819
+Epoch [434], train_loss: 0.021332, val_loss: 0.033459, val_acc: 21.641857
+Epoch [435], train_loss: 0.021392, val_loss: 0.033772, val_acc: 21.442854
+Epoch [436], train_loss: 0.021335, val_loss: 0.033523, val_acc: 21.512363
+Epoch [437], train_loss: 0.021375, val_loss: 0.033846, val_acc: 21.283596
+Epoch [438], train_loss: 0.021330, val_loss: 0.033428, val_acc: 21.674513
+Epoch [439], train_loss: 0.021415, val_loss: 0.033671, val_acc: 21.436253
+Epoch [440], train_loss: 0.021294, val_loss: 0.033668, val_acc: 21.645481
+Epoch [441], train_loss: 0.021356, val_loss: 0.033649, val_acc: 21.302015
+Epoch [442], train_loss: 0.021370, val_loss: 0.034000, val_acc: 21.016520
+Epoch [443], train_loss: 0.021358, val_loss: 0.033575, val_acc: 21.664778
+Epoch [444], train_loss: 0.021341, val_loss: 0.033456, val_acc: 21.683456
+Epoch [445], train_loss: 0.021281, val_loss: 0.033409, val_acc: 21.747004
+Epoch [446], train_loss: 0.021265, val_loss: 0.033595, val_acc: 21.440237
+Epoch [447], train_loss: 0.021314, val_loss: 0.033750, val_acc: 21.437346
+Epoch [448], train_loss: 0.021318, val_loss: 0.033626, val_acc: 21.712721
+Epoch [449], train_loss: 0.021236, val_loss: 0.033866, val_acc: 21.317661
+Epoch [450], train_loss: 0.021261, val_loss: 0.033846, val_acc: 21.128815
+Epoch [451], train_loss: 0.021280, val_loss: 0.033640, val_acc: 21.423243
+Epoch [452], train_loss: 0.021315, val_loss: 0.033694, val_acc: 21.477646
+Epoch [453], train_loss: 0.021255, val_loss: 0.033758, val_acc: 21.395571
+Epoch [454], train_loss: 0.021272, val_loss: 0.033513, val_acc: 21.803038
+Epoch [455], train_loss: 0.021206, val_loss: 0.033547, val_acc: 21.546385
+Epoch [456], train_loss: 0.021233, val_loss: 0.033679, val_acc: 21.569098
+Epoch [457], train_loss: 0.021254, val_loss: 0.033625, val_acc: 21.523748
+Epoch [458], train_loss: 0.021219, val_loss: 0.033915, val_acc: 21.151838
+Epoch [459], train_loss: 0.021227, val_loss: 0.033568, val_acc: 21.622812
+Epoch [460], train_loss: 0.021273, val_loss: 0.033515, val_acc: 21.683596
+Epoch [461], train_loss: 0.021197, val_loss: 0.033814, val_acc: 21.331337
+Epoch [462], train_loss: 0.021214, val_loss: 0.033711, val_acc: 21.639751
+Epoch [463], train_loss: 0.021199, val_loss: 0.033595, val_acc: 21.581459
+Epoch [464], train_loss: 0.021205, val_loss: 0.033837, val_acc: 21.259447
+Epoch [465], train_loss: 0.021118, val_loss: 0.033810, val_acc: 21.236078
+Epoch [466], train_loss: 0.021282, val_loss: 0.033669, val_acc: 21.501059
+Epoch [467], train_loss: 0.021166, val_loss: 0.033907, val_acc: 21.267422
+Epoch [468], train_loss: 0.021166, val_loss: 0.033701, val_acc: 21.556013
+Epoch [469], train_loss: 0.021155, val_loss: 0.033680, val_acc: 21.406895
+Epoch [470], train_loss: 0.021175, val_loss: 0.034115, val_acc: 20.840073
+Epoch [471], train_loss: 0.021181, val_loss: 0.033569, val_acc: 21.551146
+Epoch [472], train_loss: 0.021182, val_loss: 0.033843, val_acc: 21.149593
+Epoch [473], train_loss: 0.021193, val_loss: 0.033692, val_acc: 21.461260
+Epoch [474], train_loss: 0.021149, val_loss: 0.033621, val_acc: 21.492258
+Epoch [475], train_loss: 0.021141, val_loss: 0.033555, val_acc: 21.791946
+Epoch [476], train_loss: 0.021092, val_loss: 0.033696, val_acc: 21.646463
+Epoch [477], train_loss: 0.021094, val_loss: 0.033610, val_acc: 21.647190
+Epoch [478], train_loss: 0.021077, val_loss: 0.033603, val_acc: 21.721857
+Epoch [479], train_loss: 0.021122, val_loss: 0.033782, val_acc: 21.369867
+Epoch [480], train_loss: 0.021161, val_loss: 0.033597, val_acc: 21.520435
+Epoch [481], train_loss: 0.021072, val_loss: 0.033802, val_acc: 21.484446
+Epoch [482], train_loss: 0.021113, val_loss: 0.033664, val_acc: 21.547850
+Epoch [483], train_loss: 0.021089, val_loss: 0.033801, val_acc: 21.501854
+Epoch [484], train_loss: 0.021084, val_loss: 0.033734, val_acc: 21.421919
+Epoch [485], train_loss: 0.021049, val_loss: 0.033570, val_acc: 21.646879
+Epoch [486], train_loss: 0.021116, val_loss: 0.033689, val_acc: 21.669378
+Epoch [487], train_loss: 0.021066, val_loss: 0.033827, val_acc: 21.574465
+Epoch [488], train_loss: 0.021029, val_loss: 0.034012, val_acc: 21.109198
+Epoch [489], train_loss: 0.021082, val_loss: 0.033643, val_acc: 21.581972
+Epoch [490], train_loss: 0.021031, val_loss: 0.033614, val_acc: 21.734110
+Epoch [491], train_loss: 0.021042, val_loss: 0.033725, val_acc: 21.458958
+Epoch [492], train_loss: 0.021019, val_loss: 0.033701, val_acc: 21.450068
+Epoch [493], train_loss: 0.021122, val_loss: 0.033977, val_acc: 21.191273
+Epoch [494], train_loss: 0.021018, val_loss: 0.033479, val_acc: 21.808832
+Epoch [495], train_loss: 0.021050, val_loss: 0.033667, val_acc: 21.455084
+Epoch [496], train_loss: 0.020994, val_loss: 0.033534, val_acc: 21.676916
+Epoch [497], train_loss: 0.021076, val_loss: 0.033944, val_acc: 21.197302
+Epoch [498], train_loss: 0.020961, val_loss: 0.033739, val_acc: 21.529356
+Epoch [499], train_loss: 0.020999, val_loss: 0.033579, val_acc: 21.536421
+python3 ./UNet_V12.py  53311.68s user 52238.68s system 99% cpu 29:20:00.40 total
diff --git a/UNet/Sim_logs/UNet_64_V12_25614663.log b/UNet/Sim_logs/UNet_64_V12_25614663.log
new file mode 100644
index 0000000000000000000000000000000000000000..29d23bf339e625e0ae5adb38d1fe8741ae15fac3
--- /dev/null
+++ b/UNet/Sim_logs/UNet_64_V12_25614663.log
@@ -0,0 +1,46 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 500
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 9
+ seed is: 2518441936
+Traceback (most recent call last):
+  File "./UNet_V12.py", line 250, in <module>
+    history = fit(num_epochs, lr, model, train_dl, valid_dl,f'{path_to_rep}/UNet/output', opt_func)
+  File "./UNet_V12.py", line 165, in fit
+    loss = model.training_step(batch)
+  File "./UNet_V12.py", line 108, in training_step
+    out = self(input)                  # Generate predictions
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/nn/modules/module.py", line 1102, in _call_impl
+    return forward_call(*input, **kwargs)
+  File "./UNet_V12.py", line 147, in forward
+    out      = self.decoder(enc_ftrs[::-1][0], enc_ftrs[::-1][1:])
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/nn/modules/module.py", line 1102, in _call_impl
+    return forward_call(*input, **kwargs)
+  File "./UNet_V12.py", line 93, in forward
+    x        = self.dec_blocks[i](x)
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/nn/modules/module.py", line 1102, in _call_impl
+    return forward_call(*input, **kwargs)
+  File "./UNet_V12.py", line 29, in forward
+    x = self.batch_norm_1(self.relu(self.pointwise_1(self.depthwise_1(x))))
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/nn/modules/module.py", line 1102, in _call_impl
+    return forward_call(*input, **kwargs)
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/nn/modules/conv.py", line 590, in forward
+    return self._conv_forward(input, self.weight, self.bias)
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/nn/modules/conv.py", line 586, in _conv_forward
+    input, weight, bias, self.stride, self.padding, self.dilation, self.groups
+RuntimeError: CUDA out of memory. Tried to allocate 512.00 MiB (GPU 0; 15.78 GiB total capacity; 14.15 GiB already allocated; 280.50 MiB free; 14.16 GiB reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation.  See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF
+python3 ./UNet_V12.py  4.92s user 5.82s system 16% cpu 1:06.40 total
diff --git a/UNet/Sim_logs/UNet_64_V13_25614318.log b/UNet/Sim_logs/UNet_64_V13_25614318.log
new file mode 100644
index 0000000000000000000000000000000000000000..e8371e6132b07b777ef4e56fa76ca95625257f57
--- /dev/null
+++ b/UNet/Sim_logs/UNet_64_V13_25614318.log
@@ -0,0 +1,38 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 500
+batchsize: 16
+learning rate: 3e-05
+kernel size is: 9
+ seed is: 2628832979
+Epoch [0], train_loss: 0.271159, val_loss: 0.268970, val_acc: 0.014685
+Epoch [1], train_loss: 0.269396, val_loss: 0.270057, val_acc: 0.073417
+Epoch [2], train_loss: 0.268085, val_loss: 0.279366, val_acc: 0.120659
+Epoch [3], train_loss: 0.266099, val_loss: 0.254583, val_acc: 0.435022
+Epoch [4], train_loss: 0.263552, val_loss: 0.256586, val_acc: 0.376657
+Epoch [5], train_loss: 0.261619, val_loss: 0.242178, val_acc: 0.313965
+Epoch [6], train_loss: 0.260539, val_loss: 0.247519, val_acc: 0.305485
+Epoch [7], train_loss: 0.259419, val_loss: 0.248480, val_acc: 0.254837
+Epoch [8], train_loss: 0.258631, val_loss: 0.247978, val_acc: 0.210317
+Epoch [9], train_loss: 0.257922, val_loss: 0.255808, val_acc: 0.172337
+Epoch [10], train_loss: 0.257285, val_loss: 0.252549, val_acc: 0.182081
+Epoch [11], train_loss: 0.256655, val_loss: 0.258195, val_acc: 0.166881
+Epoch [12], train_loss: 0.256037, val_loss: 0.265417, val_acc: 0.211055
+Epoch [13], train_loss: 0.255511, val_loss: 0.254048, val_acc: 0.176106
+Epoch [14], train_loss: 0.254910, val_loss: 0.249992, val_acc: 0.237055
+Epoch [15], train_loss: 0.254372, val_loss: 0.251587, val_acc: 0.127559
+Epoch [16], train_loss: 0.253764, val_loss: 0.260919, val_acc: 0.167581
+Epoch [17], train_loss: 0.253268, val_loss: 0.259768, val_acc: 0.206201
+python3 ./UNet_V13.py  1570.35s user 1560.30s system 96% cpu 53:54.66 total
diff --git a/UNet/Sim_logs/UNet_V9_1_25611080.log b/UNet/Sim_logs/UNet_64_V13_25614634.log
similarity index 69%
rename from UNet/Sim_logs/UNet_V9_1_25611080.log
rename to UNet/Sim_logs/UNet_64_V13_25614634.log
index 0bc401d89ac2e7761fead0617ffac115edc92959..67b01f6da2c580256eb6c6b3ad669a9f695ed1c4 100644
--- a/UNet/Sim_logs/UNet_V9_1_25611080.log
+++ b/UNet/Sim_logs/UNet_64_V13_25614634.log
@@ -2,7 +2,6 @@
 (OK) Loading python 3.7.11
 (!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
  Built with GCC compilers.
-9.1 k=7 lr=1e-06
 Collecting torch==1.10.1
   Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
 Collecting typing-extensions
@@ -13,5 +12,10 @@ Installing collected packages: typing-extensions, torch
 Successfully installed torch-1.10.1 typing-extensions-4.1.1
 WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
 You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
-python3: can't open file './UNet_V9_1py': [Errno 2] No such file or directory
-python3 ./UNet_V9_1py  0.02s user 0.00s system 43% cpu 0.052 total
+Traceback (most recent call last):
+  File "./UNet_V14.py", line 10, in <module>
+    import torch
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/__init__.py", line 197, in <module>
+    from torch._C import *  # noqa: F403
+ImportError: /home/yk138599/.local/lib/python3.7/site-packages/torch/lib/libtorch_cuda.so: cannot read file data
+python3 ./UNet_V14.py  0.14s user 0.06s system 47% cpu 0.420 total
diff --git a/UNet/Sim_logs/UNet_64_V14_25617675.log b/UNet/Sim_logs/UNet_64_V14_25617675.log
new file mode 100644
index 0000000000000000000000000000000000000000..ac6eb10c86cae42afacf8afec9d347cc803cf6d8
--- /dev/null
+++ b/UNet/Sim_logs/UNet_64_V14_25617675.log
@@ -0,0 +1,47 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 500
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 9
+ seed is: 1197567716
+Traceback (most recent call last):
+  File "./UNet_V14.py", line 249, in <module>
+Traceback (most recent call last):
+  File "/rwthfs/rz/SW/UTIL.common/Python/3.7.11/x86_64/lib/python3.7/multiprocessing/queues.py", line 242, in _feed
+    send_bytes(obj)
+  File "/rwthfs/rz/SW/UTIL.common/Python/3.7.11/x86_64/lib/python3.7/multiprocessing/connection.py", line 200, in send_bytes
+    self._send_bytes(m[offset:offset + size])
+  File "/rwthfs/rz/SW/UTIL.common/Python/3.7.11/x86_64/lib/python3.7/multiprocessing/connection.py", line 404, in _send_bytes
+    self._send(header + buf)
+  File "/rwthfs/rz/SW/UTIL.common/Python/3.7.11/x86_64/lib/python3.7/multiprocessing/connection.py", line 368, in _send
+    n = write(self._handle, buf)
+BrokenPipeError: [Errno 32] Broken pipe
+    history = fit(num_epochs, lr, model, train_dl, valid_dl,f'{path_to_rep}/UNet/output', opt_func)
+  File "./UNet_V14.py", line 163, in fit
+    for batch in train_loader:
+  File "./UNet_V14.py", line 201, in __iter__
+    yield to_device(b, self.device)
+  File "./UNet_V14.py", line 189, in to_device
+    return [to_device(x, device) for x in data]
+  File "./UNet_V14.py", line 189, in <listcomp>
+    return [to_device(x, device) for x in data]
+  File "./UNet_V14.py", line 190, in to_device
+    return data.to(device, non_blocking=True)
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/utils/data/_utils/signal_handling.py", line 66, in handler
+    _error_if_any_worker_fails()
+RuntimeError: DataLoader worker (pid 53817) is killed by signal: Killed. 
+python3 ./UNet_V14.py  6.29s user 14.51s system 17% cpu 2:00.50 total
+slurmstepd: error: Detected 1 oom-kill event(s) in step 25617675.batch cgroup. Some of your processes may have been killed by the cgroup out-of-memory handler.
diff --git a/UNet/Sim_logs/UNet_64_V14_25621929.log b/UNet/Sim_logs/UNet_64_V14_25621929.log
new file mode 100644
index 0000000000000000000000000000000000000000..b0e5b7de331116b7ff4290d8c850441576782811
--- /dev/null
+++ b/UNet/Sim_logs/UNet_64_V14_25621929.log
@@ -0,0 +1,35 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 500
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 9
+ seed is: 1383180841
+Traceback (most recent call last):
+  File "./UNet_V14.py", line 249, in <module>
+    history = fit(num_epochs, lr, model, train_dl, valid_dl,f'{path_to_rep}/UNet/output', opt_func)
+  File "./UNet_V14.py", line 170, in fit
+    result = evaluate(model, val_loader)
+  File "/home/yk138599/.local/lib/python3.7/site-packages/torch/autograd/grad_mode.py", line 28, in decorate_context
+    return func(*args, **kwargs)
+  File "./UNet_V14.py", line 153, in evaluate
+    outputs = [model.validation_step(batch) for batch in val_loader]
+  File "./UNet_V14.py", line 153, in <listcomp>
+    outputs = [model.validation_step(batch) for batch in val_loader]
+  File "./UNet_V14.py", line 115, in validation_step
+    acc = accuracy(out.detach(), labels.detach())         # Calculate accuracy
+TypeError: accuracy() missing 1 required positional argument: 'normalization'
+terminate called without an active exception
+python3 ./UNet_V14.py  42.18s user 50.52s system 45% cpu 3:24.39 total
diff --git a/UNet/Sim_logs/UNet_64_V14_25622923.log b/UNet/Sim_logs/UNet_64_V14_25622923.log
new file mode 100644
index 0000000000000000000000000000000000000000..9246fc361e5d6c0e9fb47777952f5f57b889564a
--- /dev/null
+++ b/UNet/Sim_logs/UNet_64_V14_25622923.log
@@ -0,0 +1,530 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 500
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 9
+ seed is: 1942621789
+Epoch [0], train_loss: 0.164275, val_loss: 0.154094, val_acc: 3.106299
+Epoch [1], train_loss: 0.161620, val_loss: 0.146837, val_acc: 3.740176
+Epoch [2], train_loss: 0.159047, val_loss: 0.149153, val_acc: 3.595475
+Epoch [3], train_loss: 0.157656, val_loss: 0.153217, val_acc: 3.222910
+Epoch [4], train_loss: 0.157267, val_loss: 0.154433, val_acc: 3.089663
+Epoch [5], train_loss: 0.157067, val_loss: 0.155600, val_acc: 3.058465
+Epoch [6], train_loss: 0.156904, val_loss: 0.155745, val_acc: 3.038082
+Epoch [7], train_loss: 0.156689, val_loss: 0.156574, val_acc: 3.074161
+Epoch [8], train_loss: 0.156496, val_loss: 0.155376, val_acc: 3.068932
+Epoch [9], train_loss: 0.156331, val_loss: 0.154627, val_acc: 3.113573
+Epoch [10], train_loss: 0.156131, val_loss: 0.155342, val_acc: 3.077760
+Epoch [11], train_loss: 0.155966, val_loss: 0.155863, val_acc: 3.088178
+Epoch [12], train_loss: 0.155740, val_loss: 0.157912, val_acc: 3.121452
+Epoch [13], train_loss: 0.155606, val_loss: 0.153306, val_acc: 3.162503
+Epoch [14], train_loss: 0.155351, val_loss: 0.155345, val_acc: 3.142939
+Epoch [15], train_loss: 0.155227, val_loss: 0.153072, val_acc: 3.182159
+Epoch [16], train_loss: 0.155046, val_loss: 0.157110, val_acc: 3.144909
+Epoch [17], train_loss: 0.154873, val_loss: 0.156572, val_acc: 3.203168
+Epoch [18], train_loss: 0.154679, val_loss: 0.153232, val_acc: 3.239622
+Epoch [19], train_loss: 0.154439, val_loss: 0.153702, val_acc: 3.235147
+Epoch [20], train_loss: 0.154278, val_loss: 0.154391, val_acc: 3.246606
+Epoch [21], train_loss: 0.154140, val_loss: 0.153587, val_acc: 3.272015
+Epoch [22], train_loss: 0.153920, val_loss: 0.153145, val_acc: 3.301957
+Epoch [23], train_loss: 0.153737, val_loss: 0.152254, val_acc: 3.311252
+Epoch [24], train_loss: 0.153558, val_loss: 0.151712, val_acc: 3.326952
+Epoch [25], train_loss: 0.153363, val_loss: 0.151382, val_acc: 3.339718
+Epoch [26], train_loss: 0.153277, val_loss: 0.154774, val_acc: 3.342932
+Epoch [27], train_loss: 0.153042, val_loss: 0.160454, val_acc: 3.327057
+Epoch [28], train_loss: 0.152869, val_loss: 0.155585, val_acc: 3.378287
+Epoch [29], train_loss: 0.152670, val_loss: 0.153769, val_acc: 3.394281
+Epoch [30], train_loss: 0.152499, val_loss: 0.151279, val_acc: 3.438950
+Epoch [31], train_loss: 0.152326, val_loss: 0.153772, val_acc: 3.434758
+Epoch [32], train_loss: 0.152178, val_loss: 0.154387, val_acc: 3.450200
+Epoch [33], train_loss: 0.151953, val_loss: 0.153493, val_acc: 3.475944
+Epoch [34], train_loss: 0.151728, val_loss: 0.151216, val_acc: 3.501959
+Epoch [35], train_loss: 0.151569, val_loss: 0.150503, val_acc: 3.522294
+Epoch [36], train_loss: 0.151402, val_loss: 0.150466, val_acc: 3.525056
+Epoch [37], train_loss: 0.151201, val_loss: 0.152100, val_acc: 3.543562
+Epoch [38], train_loss: 0.151020, val_loss: 0.152956, val_acc: 3.553348
+Epoch [39], train_loss: 0.150831, val_loss: 0.151746, val_acc: 3.585845
+Epoch [40], train_loss: 0.150689, val_loss: 0.152664, val_acc: 3.597040
+Epoch [41], train_loss: 0.150471, val_loss: 0.149467, val_acc: 3.623189
+Epoch [42], train_loss: 0.150293, val_loss: 0.151150, val_acc: 3.629575
+Epoch [43], train_loss: 0.150076, val_loss: 0.149250, val_acc: 3.659418
+Epoch [44], train_loss: 0.149926, val_loss: 0.149757, val_acc: 3.656725
+Epoch [45], train_loss: 0.149734, val_loss: 0.150163, val_acc: 3.683978
+Epoch [46], train_loss: 0.149549, val_loss: 0.148804, val_acc: 3.706119
+Epoch [47], train_loss: 0.149407, val_loss: 0.147687, val_acc: 3.726373
+Epoch [48], train_loss: 0.149173, val_loss: 0.147767, val_acc: 3.744282
+Epoch [49], train_loss: 0.149031, val_loss: 0.147328, val_acc: 3.762639
+Epoch [50], train_loss: 0.148812, val_loss: 0.147153, val_acc: 3.789956
+Epoch [51], train_loss: 0.148630, val_loss: 0.147469, val_acc: 3.781422
+Epoch [52], train_loss: 0.148437, val_loss: 0.149101, val_acc: 3.788633
+Epoch [53], train_loss: 0.148250, val_loss: 0.148013, val_acc: 3.827875
+Epoch [54], train_loss: 0.148086, val_loss: 0.148039, val_acc: 3.839360
+Epoch [55], train_loss: 0.147877, val_loss: 0.148036, val_acc: 3.847586
+Epoch [56], train_loss: 0.147743, val_loss: 0.148182, val_acc: 3.853851
+Epoch [57], train_loss: 0.147532, val_loss: 0.148210, val_acc: 3.880925
+Epoch [58], train_loss: 0.147365, val_loss: 0.146296, val_acc: 3.914625
+Epoch [59], train_loss: 0.147188, val_loss: 0.146656, val_acc: 3.920943
+Epoch [60], train_loss: 0.147011, val_loss: 0.145973, val_acc: 3.926489
+Epoch [61], train_loss: 0.146816, val_loss: 0.145767, val_acc: 3.959781
+Epoch [62], train_loss: 0.146638, val_loss: 0.146882, val_acc: 3.964249
+Epoch [63], train_loss: 0.146441, val_loss: 0.148918, val_acc: 3.977205
+Epoch [64], train_loss: 0.146271, val_loss: 0.147513, val_acc: 3.998291
+Epoch [65], train_loss: 0.146053, val_loss: 0.144721, val_acc: 4.028870
+Epoch [66], train_loss: 0.145894, val_loss: 0.144817, val_acc: 4.045803
+Epoch [67], train_loss: 0.145736, val_loss: 0.145347, val_acc: 4.054386
+Epoch [68], train_loss: 0.145505, val_loss: 0.144762, val_acc: 4.065557
+Epoch [69], train_loss: 0.145362, val_loss: 0.147393, val_acc: 4.078851
+Epoch [70], train_loss: 0.145183, val_loss: 0.144131, val_acc: 4.102670
+Epoch [71], train_loss: 0.144970, val_loss: 0.144166, val_acc: 4.117026
+Epoch [72], train_loss: 0.144763, val_loss: 0.143892, val_acc: 4.135971
+Epoch [73], train_loss: 0.144611, val_loss: 0.144013, val_acc: 4.145560
+Epoch [74], train_loss: 0.144401, val_loss: 0.146970, val_acc: 4.145509
+Epoch [75], train_loss: 0.144230, val_loss: 0.143785, val_acc: 4.179195
+Epoch [76], train_loss: 0.144088, val_loss: 0.144246, val_acc: 4.195899
+Epoch [77], train_loss: 0.143865, val_loss: 0.144036, val_acc: 4.200036
+Epoch [78], train_loss: 0.143684, val_loss: 0.141648, val_acc: 4.231328
+Epoch [79], train_loss: 0.143480, val_loss: 0.141406, val_acc: 4.246238
+Epoch [80], train_loss: 0.143313, val_loss: 0.151031, val_acc: 4.195585
+Epoch [81], train_loss: 0.143151, val_loss: 0.145827, val_acc: 4.254258
+Epoch [82], train_loss: 0.142962, val_loss: 0.141944, val_acc: 4.300659
+Epoch [83], train_loss: 0.142754, val_loss: 0.142856, val_acc: 4.304292
+Epoch [84], train_loss: 0.142538, val_loss: 0.153055, val_acc: 4.245128
+Epoch [85], train_loss: 0.142404, val_loss: 0.143004, val_acc: 4.329192
+Epoch [86], train_loss: 0.142178, val_loss: 0.142162, val_acc: 4.347109
+Epoch [87], train_loss: 0.142016, val_loss: 0.142882, val_acc: 4.360372
+Epoch [88], train_loss: 0.141797, val_loss: 0.141485, val_acc: 4.376443
+Epoch [89], train_loss: 0.141652, val_loss: 0.140734, val_acc: 4.396636
+Epoch [90], train_loss: 0.141480, val_loss: 0.140736, val_acc: 4.416362
+Epoch [91], train_loss: 0.141279, val_loss: 0.139516, val_acc: 4.431009
+Epoch [92], train_loss: 0.141085, val_loss: 0.140904, val_acc: 4.439287
+Epoch [93], train_loss: 0.140894, val_loss: 0.140681, val_acc: 4.451593
+Epoch [94], train_loss: 0.140706, val_loss: 0.140694, val_acc: 4.463684
+Epoch [95], train_loss: 0.140534, val_loss: 0.139117, val_acc: 4.486958
+Epoch [96], train_loss: 0.140339, val_loss: 0.138380, val_acc: 4.511100
+Epoch [97], train_loss: 0.140182, val_loss: 0.139750, val_acc: 4.517749
+Epoch [98], train_loss: 0.139989, val_loss: 0.148104, val_acc: 4.479321
+Epoch [99], train_loss: 0.139814, val_loss: 0.143547, val_acc: 4.513866
+Epoch [100], train_loss: 0.139666, val_loss: 0.160294, val_acc: 4.402160
+Epoch [101], train_loss: 0.139461, val_loss: 0.142264, val_acc: 4.543054
+Epoch [102], train_loss: 0.139254, val_loss: 0.139147, val_acc: 4.577896
+Epoch [103], train_loss: 0.139078, val_loss: 0.140537, val_acc: 4.581303
+Epoch [104], train_loss: 0.138899, val_loss: 0.139877, val_acc: 4.591590
+Epoch [105], train_loss: 0.138747, val_loss: 0.140546, val_acc: 4.604139
+Epoch [106], train_loss: 0.138549, val_loss: 0.137516, val_acc: 4.638970
+Epoch [107], train_loss: 0.138296, val_loss: 0.136925, val_acc: 4.647872
+Epoch [108], train_loss: 0.138180, val_loss: 0.136565, val_acc: 4.661136
+Epoch [109], train_loss: 0.138019, val_loss: 0.136935, val_acc: 4.669614
+Epoch [110], train_loss: 0.137774, val_loss: 0.136587, val_acc: 4.689005
+Epoch [111], train_loss: 0.137574, val_loss: 0.137129, val_acc: 4.688179
+Epoch [112], train_loss: 0.137440, val_loss: 0.137388, val_acc: 4.715113
+Epoch [113], train_loss: 0.137237, val_loss: 0.136958, val_acc: 4.717060
+Epoch [114], train_loss: 0.137053, val_loss: 0.136670, val_acc: 4.725399
+Epoch [115], train_loss: 0.136870, val_loss: 0.135161, val_acc: 4.745119
+Epoch [116], train_loss: 0.136649, val_loss: 0.134122, val_acc: 4.772412
+Epoch [117], train_loss: 0.136457, val_loss: 0.133902, val_acc: 4.782037
+Epoch [118], train_loss: 0.136291, val_loss: 0.135846, val_acc: 4.764773
+Epoch [119], train_loss: 0.136107, val_loss: 0.137481, val_acc: 4.771959
+Epoch [120], train_loss: 0.135933, val_loss: 0.135372, val_acc: 4.794849
+Epoch [121], train_loss: 0.135757, val_loss: 0.137575, val_acc: 4.799680
+Epoch [122], train_loss: 0.135577, val_loss: 0.133857, val_acc: 4.831560
+Epoch [123], train_loss: 0.135377, val_loss: 0.135510, val_acc: 4.824293
+Epoch [124], train_loss: 0.135218, val_loss: 0.135049, val_acc: 4.840257
+Epoch [125], train_loss: 0.135060, val_loss: 0.135519, val_acc: 4.846591
+Epoch [126], train_loss: 0.134807, val_loss: 0.133817, val_acc: 4.866966
+Epoch [127], train_loss: 0.134675, val_loss: 0.133339, val_acc: 4.874251
+Epoch [128], train_loss: 0.134437, val_loss: 0.133873, val_acc: 4.871865
+Epoch [129], train_loss: 0.134279, val_loss: 0.132277, val_acc: 4.901555
+Epoch [130], train_loss: 0.134040, val_loss: 0.132075, val_acc: 4.913158
+Epoch [131], train_loss: 0.133897, val_loss: 0.132076, val_acc: 4.916838
+Epoch [132], train_loss: 0.133711, val_loss: 0.133343, val_acc: 4.922461
+Epoch [133], train_loss: 0.133538, val_loss: 0.134370, val_acc: 4.913289
+Epoch [134], train_loss: 0.133347, val_loss: 0.132898, val_acc: 4.938629
+Epoch [135], train_loss: 0.133134, val_loss: 0.130629, val_acc: 4.964163
+Epoch [136], train_loss: 0.132958, val_loss: 0.132339, val_acc: 4.954789
+Epoch [137], train_loss: 0.132791, val_loss: 0.130903, val_acc: 4.977744
+Epoch [138], train_loss: 0.132613, val_loss: 0.131110, val_acc: 4.988430
+Epoch [139], train_loss: 0.132424, val_loss: 0.131832, val_acc: 4.990123
+Epoch [140], train_loss: 0.132227, val_loss: 0.130648, val_acc: 5.000812
+Epoch [141], train_loss: 0.132135, val_loss: 0.129453, val_acc: 5.019682
+Epoch [142], train_loss: 0.131923, val_loss: 0.130786, val_acc: 5.014193
+Epoch [143], train_loss: 0.131706, val_loss: 0.130341, val_acc: 5.023720
+Epoch [144], train_loss: 0.131548, val_loss: 0.134395, val_acc: 5.002271
+Epoch [145], train_loss: 0.131363, val_loss: 0.130809, val_acc: 5.027940
+Epoch [146], train_loss: 0.131146, val_loss: 0.129567, val_acc: 5.047523
+Epoch [147], train_loss: 0.130992, val_loss: 0.129385, val_acc: 5.048512
+Epoch [148], train_loss: 0.130821, val_loss: 0.129226, val_acc: 5.060954
+Epoch [149], train_loss: 0.130587, val_loss: 0.134119, val_acc: 5.037586
+Epoch [150], train_loss: 0.130389, val_loss: 0.129209, val_acc: 5.072182
+Epoch [151], train_loss: 0.130250, val_loss: 0.129179, val_acc: 5.078640
+Epoch [152], train_loss: 0.130057, val_loss: 0.128589, val_acc: 5.086357
+Epoch [153], train_loss: 0.129877, val_loss: 0.128912, val_acc: 5.089007
+Epoch [154], train_loss: 0.129684, val_loss: 0.127897, val_acc: 5.100817
+Epoch [155], train_loss: 0.129493, val_loss: 0.127958, val_acc: 5.107635
+Epoch [156], train_loss: 0.129299, val_loss: 0.127673, val_acc: 5.115718
+Epoch [157], train_loss: 0.129093, val_loss: 0.128566, val_acc: 5.112843
+Epoch [158], train_loss: 0.128965, val_loss: 0.131181, val_acc: 5.098126
+Epoch [159], train_loss: 0.128767, val_loss: 0.143262, val_acc: 5.031446
+Epoch [160], train_loss: 0.128552, val_loss: 0.133458, val_acc: 5.096402
+Epoch [161], train_loss: 0.128399, val_loss: 0.128471, val_acc: 5.128979
+Epoch [162], train_loss: 0.128226, val_loss: 0.130653, val_acc: 5.119245
+Epoch [163], train_loss: 0.128047, val_loss: 0.129423, val_acc: 5.135730
+Epoch [164], train_loss: 0.127837, val_loss: 0.126925, val_acc: 5.148107
+Epoch [165], train_loss: 0.127615, val_loss: 0.127460, val_acc: 5.148556
+Epoch [166], train_loss: 0.127478, val_loss: 0.124825, val_acc: 5.166609
+Epoch [167], train_loss: 0.127288, val_loss: 0.124714, val_acc: 5.172360
+Epoch [168], train_loss: 0.127107, val_loss: 0.124869, val_acc: 5.173521
+Epoch [169], train_loss: 0.126899, val_loss: 0.125587, val_acc: 5.174158
+Epoch [170], train_loss: 0.126753, val_loss: 0.128853, val_acc: 5.158038
+Epoch [171], train_loss: 0.126497, val_loss: 0.127386, val_acc: 5.163083
+Epoch [172], train_loss: 0.126346, val_loss: 0.127880, val_acc: 5.165955
+Epoch [173], train_loss: 0.126171, val_loss: 0.125695, val_acc: 5.180370
+Epoch [174], train_loss: 0.126010, val_loss: 0.123187, val_acc: 5.198707
+Epoch [175], train_loss: 0.125815, val_loss: 0.125114, val_acc: 5.189867
+Epoch [176], train_loss: 0.125612, val_loss: 0.125570, val_acc: 5.170240
+Epoch [177], train_loss: 0.125447, val_loss: 0.125299, val_acc: 5.145611
+Epoch [178], train_loss: 0.125260, val_loss: 0.126978, val_acc: 5.114961
+Epoch [179], train_loss: 0.125044, val_loss: 0.125280, val_acc: 5.149275
+Epoch [180], train_loss: 0.124885, val_loss: 0.125177, val_acc: 5.101291
+Epoch [181], train_loss: 0.124608, val_loss: 0.123618, val_acc: 4.904806
+Epoch [182], train_loss: 0.124327, val_loss: 0.123851, val_acc: 4.589157
+Epoch [183], train_loss: 0.123577, val_loss: 0.129057, val_acc: 2.610797
+Epoch [184], train_loss: 0.122699, val_loss: 0.123109, val_acc: 2.980874
+Epoch [185], train_loss: 0.122231, val_loss: 0.121048, val_acc: 3.002264
+Epoch [186], train_loss: 0.121929, val_loss: 0.123868, val_acc: 3.097968
+Epoch [187], train_loss: 0.121627, val_loss: 0.123302, val_acc: 3.060883
+Epoch [188], train_loss: 0.121335, val_loss: 0.119526, val_acc: 3.065238
+Epoch [189], train_loss: 0.121069, val_loss: 0.118398, val_acc: 2.938415
+Epoch [190], train_loss: 0.120828, val_loss: 0.120153, val_acc: 2.932837
+Epoch [191], train_loss: 0.120578, val_loss: 0.120037, val_acc: 2.890176
+Epoch [192], train_loss: 0.120370, val_loss: 0.119004, val_acc: 2.963700
+Epoch [193], train_loss: 0.120161, val_loss: 0.120110, val_acc: 2.858114
+Epoch [194], train_loss: 0.119912, val_loss: 0.118224, val_acc: 2.915896
+Epoch [195], train_loss: 0.119686, val_loss: 0.118054, val_acc: 2.847379
+Epoch [196], train_loss: 0.119366, val_loss: 0.116499, val_acc: 2.846215
+Epoch [197], train_loss: 0.119234, val_loss: 0.116280, val_acc: 2.874922
+Epoch [198], train_loss: 0.118941, val_loss: 0.120695, val_acc: 2.733601
+Epoch [199], train_loss: 0.118712, val_loss: 0.118723, val_acc: 2.881884
+Epoch [200], train_loss: 0.118526, val_loss: 0.119272, val_acc: 2.848202
+Epoch [201], train_loss: 0.118318, val_loss: 0.120511, val_acc: 2.895041
+Epoch [202], train_loss: 0.118050, val_loss: 0.117970, val_acc: 2.806500
+Epoch [203], train_loss: 0.117866, val_loss: 0.116142, val_acc: 2.754291
+Epoch [204], train_loss: 0.117589, val_loss: 0.117143, val_acc: 2.767565
+Epoch [205], train_loss: 0.117356, val_loss: 0.115391, val_acc: 2.778460
+Epoch [206], train_loss: 0.117142, val_loss: 0.115713, val_acc: 2.972733
+Epoch [207], train_loss: 0.116890, val_loss: 0.114999, val_acc: 2.981343
+Epoch [208], train_loss: 0.116688, val_loss: 0.116315, val_acc: 2.870224
+Epoch [209], train_loss: 0.116458, val_loss: 0.115467, val_acc: 2.869621
+Epoch [210], train_loss: 0.116249, val_loss: 0.114437, val_acc: 2.923340
+Epoch [211], train_loss: 0.116007, val_loss: 0.115264, val_acc: 2.979499
+Epoch [212], train_loss: 0.115760, val_loss: 0.115981, val_acc: 2.898903
+Epoch [213], train_loss: 0.115552, val_loss: 0.117181, val_acc: 2.927045
+Epoch [214], train_loss: 0.115326, val_loss: 0.114060, val_acc: 2.912263
+Epoch [215], train_loss: 0.115126, val_loss: 0.114667, val_acc: 2.934695
+Epoch [216], train_loss: 0.114900, val_loss: 0.114015, val_acc: 2.947665
+Epoch [217], train_loss: 0.114674, val_loss: 0.113625, val_acc: 2.956177
+Epoch [218], train_loss: 0.114453, val_loss: 0.116047, val_acc: 2.818378
+Epoch [219], train_loss: 0.114212, val_loss: 0.114036, val_acc: 2.859174
+Epoch [220], train_loss: 0.113998, val_loss: 0.113045, val_acc: 2.992398
+Epoch [221], train_loss: 0.113781, val_loss: 0.112870, val_acc: 2.882979
+Epoch [222], train_loss: 0.113600, val_loss: 0.112124, val_acc: 3.087470
+Epoch [223], train_loss: 0.113320, val_loss: 0.111100, val_acc: 3.062519
+Epoch [224], train_loss: 0.113136, val_loss: 0.114712, val_acc: 2.891523
+Epoch [225], train_loss: 0.112933, val_loss: 0.115324, val_acc: 2.752265
+Epoch [226], train_loss: 0.112677, val_loss: 0.113279, val_acc: 3.036456
+Epoch [227], train_loss: 0.112446, val_loss: 0.113464, val_acc: 2.926842
+Epoch [228], train_loss: 0.112220, val_loss: 0.111693, val_acc: 2.914069
+Epoch [229], train_loss: 0.112045, val_loss: 0.114549, val_acc: 2.948331
+Epoch [230], train_loss: 0.111826, val_loss: 0.115222, val_acc: 2.907038
+Epoch [231], train_loss: 0.111620, val_loss: 0.113647, val_acc: 3.050123
+Epoch [232], train_loss: 0.111383, val_loss: 0.112089, val_acc: 2.820757
+Epoch [233], train_loss: 0.111142, val_loss: 0.111997, val_acc: 2.961417
+Epoch [234], train_loss: 0.110974, val_loss: 0.111878, val_acc: 3.021868
+Epoch [235], train_loss: 0.110705, val_loss: 0.109093, val_acc: 3.065913
+Epoch [236], train_loss: 0.110564, val_loss: 0.110651, val_acc: 2.886591
+Epoch [237], train_loss: 0.110273, val_loss: 0.109715, val_acc: 2.963789
+Epoch [238], train_loss: 0.110075, val_loss: 0.108677, val_acc: 3.030513
+Epoch [239], train_loss: 0.109863, val_loss: 0.107338, val_acc: 3.128039
+Epoch [240], train_loss: 0.109656, val_loss: 0.110220, val_acc: 2.891601
+Epoch [241], train_loss: 0.109413, val_loss: 0.109188, val_acc: 3.012608
+Epoch [242], train_loss: 0.109197, val_loss: 0.108993, val_acc: 2.930562
+Epoch [243], train_loss: 0.108994, val_loss: 0.107433, val_acc: 3.046186
+Epoch [244], train_loss: 0.108756, val_loss: 0.107876, val_acc: 3.100905
+Epoch [245], train_loss: 0.108557, val_loss: 0.106010, val_acc: 3.074952
+Epoch [246], train_loss: 0.108367, val_loss: 0.106307, val_acc: 3.073375
+Epoch [247], train_loss: 0.108102, val_loss: 0.106311, val_acc: 3.031282
+Epoch [248], train_loss: 0.107865, val_loss: 0.105407, val_acc: 3.160540
+Epoch [249], train_loss: 0.107688, val_loss: 0.105706, val_acc: 3.125927
+Epoch [250], train_loss: 0.107461, val_loss: 0.105822, val_acc: 3.096532
+Epoch [251], train_loss: 0.107218, val_loss: 0.106026, val_acc: 3.017698
+Epoch [252], train_loss: 0.107038, val_loss: 0.105783, val_acc: 3.022573
+Epoch [253], train_loss: 0.106763, val_loss: 0.104777, val_acc: 3.126608
+Epoch [254], train_loss: 0.106608, val_loss: 0.105006, val_acc: 3.075949
+Epoch [255], train_loss: 0.106377, val_loss: 0.106050, val_acc: 3.036595
+Epoch [256], train_loss: 0.106141, val_loss: 0.104598, val_acc: 3.094727
+Epoch [257], train_loss: 0.105970, val_loss: 0.104594, val_acc: 3.151241
+Epoch [258], train_loss: 0.105723, val_loss: 0.107779, val_acc: 3.136473
+Epoch [259], train_loss: 0.105500, val_loss: 0.104121, val_acc: 3.317698
+Epoch [260], train_loss: 0.105299, val_loss: 0.105572, val_acc: 3.034870
+Epoch [261], train_loss: 0.105089, val_loss: 0.105484, val_acc: 3.146155
+Epoch [262], train_loss: 0.104946, val_loss: 0.107883, val_acc: 3.229842
+Epoch [263], train_loss: 0.104702, val_loss: 0.111062, val_acc: 3.096228
+Epoch [264], train_loss: 0.104478, val_loss: 0.107287, val_acc: 3.018247
+Epoch [265], train_loss: 0.104243, val_loss: 0.104715, val_acc: 2.961586
+Epoch [266], train_loss: 0.104043, val_loss: 0.103652, val_acc: 3.226850
+Epoch [267], train_loss: 0.103786, val_loss: 0.104055, val_acc: 3.176761
+Epoch [268], train_loss: 0.103555, val_loss: 0.102686, val_acc: 3.222022
+Epoch [269], train_loss: 0.103321, val_loss: 0.102759, val_acc: 3.237506
+Epoch [270], train_loss: 0.103163, val_loss: 0.102694, val_acc: 3.253319
+Epoch [271], train_loss: 0.102899, val_loss: 0.102933, val_acc: 3.138817
+Epoch [272], train_loss: 0.102706, val_loss: 0.102387, val_acc: 3.168958
+Epoch [273], train_loss: 0.102477, val_loss: 0.101970, val_acc: 3.229593
+Epoch [274], train_loss: 0.102285, val_loss: 0.102413, val_acc: 3.204744
+Epoch [275], train_loss: 0.102058, val_loss: 0.103163, val_acc: 2.920055
+Epoch [276], train_loss: 0.101819, val_loss: 0.103170, val_acc: 3.005846
+Epoch [277], train_loss: 0.101607, val_loss: 0.099911, val_acc: 3.254048
+Epoch [278], train_loss: 0.101386, val_loss: 0.099738, val_acc: 3.355536
+Epoch [279], train_loss: 0.101171, val_loss: 0.099886, val_acc: 3.434690
+Epoch [280], train_loss: 0.100958, val_loss: 0.100155, val_acc: 3.357364
+Epoch [281], train_loss: 0.100741, val_loss: 0.101007, val_acc: 3.114925
+Epoch [282], train_loss: 0.100529, val_loss: 0.100120, val_acc: 3.266027
+Epoch [283], train_loss: 0.100322, val_loss: 0.100879, val_acc: 3.073557
+Epoch [284], train_loss: 0.100087, val_loss: 0.098737, val_acc: 3.343679
+Epoch [285], train_loss: 0.099874, val_loss: 0.102175, val_acc: 3.171875
+Epoch [286], train_loss: 0.099698, val_loss: 0.098479, val_acc: 3.186453
+Epoch [287], train_loss: 0.099452, val_loss: 0.098974, val_acc: 3.125187
+Epoch [288], train_loss: 0.099228, val_loss: 0.098486, val_acc: 3.143797
+Epoch [289], train_loss: 0.099024, val_loss: 0.098519, val_acc: 3.303273
+Epoch [290], train_loss: 0.098803, val_loss: 0.097930, val_acc: 3.300733
+Epoch [291], train_loss: 0.098614, val_loss: 0.098199, val_acc: 3.198596
+Epoch [292], train_loss: 0.098387, val_loss: 0.096999, val_acc: 3.366136
+Epoch [293], train_loss: 0.098185, val_loss: 0.103066, val_acc: 3.500359
+Epoch [294], train_loss: 0.097931, val_loss: 0.099079, val_acc: 3.149076
+Epoch [295], train_loss: 0.097736, val_loss: 0.096658, val_acc: 3.251934
+Epoch [296], train_loss: 0.097529, val_loss: 0.097489, val_acc: 3.194124
+Epoch [297], train_loss: 0.097290, val_loss: 0.096748, val_acc: 3.550591
+Epoch [298], train_loss: 0.097119, val_loss: 0.094908, val_acc: 3.378948
+Epoch [299], train_loss: 0.096836, val_loss: 0.094465, val_acc: 3.316211
+Epoch [300], train_loss: 0.096634, val_loss: 0.097280, val_acc: 3.134475
+Traceback (most recent call last):
+  File "/rwthfs/rz/SW/UTIL.common/Python/3.7.11/x86_64/lib/python3.7/multiprocessing/queues.py", line 242, in _feed
+    send_bytes(obj)
+  File "/rwthfs/rz/SW/UTIL.common/Python/3.7.11/x86_64/lib/python3.7/multiprocessing/connection.py", line 200, in send_bytes
+    self._send_bytes(m[offset:offset + size])
+  File "/rwthfs/rz/SW/UTIL.common/Python/3.7.11/x86_64/lib/python3.7/multiprocessing/connection.py", line 404, in _send_bytes
+    self._send(header + buf)
+  File "/rwthfs/rz/SW/UTIL.common/Python/3.7.11/x86_64/lib/python3.7/multiprocessing/connection.py", line 368, in _send
+    n = write(self._handle, buf)
+BrokenPipeError: [Errno 32] Broken pipe
+Epoch [301], train_loss: 0.096411, val_loss: 0.097074, val_acc: 3.456997
+Epoch [302], train_loss: 0.096215, val_loss: 0.095550, val_acc: 3.401972
+Epoch [303], train_loss: 0.096020, val_loss: 0.093489, val_acc: 3.450133
+Epoch [304], train_loss: 0.095785, val_loss: 0.099396, val_acc: 3.555710
+Epoch [305], train_loss: 0.095615, val_loss: 0.099964, val_acc: 3.342927
+Epoch [306], train_loss: 0.095351, val_loss: 0.095430, val_acc: 3.152560
+Epoch [307], train_loss: 0.095122, val_loss: 0.093982, val_acc: 3.427960
+Epoch [308], train_loss: 0.094900, val_loss: 0.093227, val_acc: 3.672066
+Epoch [309], train_loss: 0.094697, val_loss: 0.092141, val_acc: 3.682017
+Epoch [310], train_loss: 0.094469, val_loss: 0.091456, val_acc: 3.530071
+Epoch [311], train_loss: 0.094237, val_loss: 0.092444, val_acc: 3.441194
+Epoch [312], train_loss: 0.094032, val_loss: 0.092414, val_acc: 3.568276
+Epoch [313], train_loss: 0.093877, val_loss: 0.093288, val_acc: 3.482829
+Epoch [314], train_loss: 0.093674, val_loss: 0.094052, val_acc: 3.363915
+Epoch [315], train_loss: 0.093436, val_loss: 0.092899, val_acc: 3.352724
+Epoch [316], train_loss: 0.093196, val_loss: 0.093557, val_acc: 3.458592
+Epoch [317], train_loss: 0.093024, val_loss: 0.094031, val_acc: 3.638203
+Epoch [318], train_loss: 0.092793, val_loss: 0.090470, val_acc: 3.687723
+Epoch [319], train_loss: 0.092556, val_loss: 0.090609, val_acc: 3.421433
+Epoch [320], train_loss: 0.092354, val_loss: 0.089833, val_acc: 3.554562
+Epoch [321], train_loss: 0.092101, val_loss: 0.091140, val_acc: 3.573798
+Epoch [322], train_loss: 0.091958, val_loss: 0.087380, val_acc: 3.823299
+Epoch [323], train_loss: 0.091704, val_loss: 0.088386, val_acc: 3.628786
+Epoch [324], train_loss: 0.091513, val_loss: 0.089668, val_acc: 3.523546
+Epoch [325], train_loss: 0.091271, val_loss: 0.089938, val_acc: 3.822260
+Epoch [326], train_loss: 0.091093, val_loss: 0.091444, val_acc: 3.600101
+Epoch [327], train_loss: 0.090830, val_loss: 0.090551, val_acc: 3.572951
+Epoch [328], train_loss: 0.090649, val_loss: 0.089586, val_acc: 3.651095
+Epoch [329], train_loss: 0.090407, val_loss: 0.089882, val_acc: 3.474342
+Epoch [330], train_loss: 0.090204, val_loss: 0.088106, val_acc: 3.465448
+Epoch [331], train_loss: 0.089968, val_loss: 0.091018, val_acc: 3.599392
+Epoch [332], train_loss: 0.089778, val_loss: 0.090760, val_acc: 3.599772
+Epoch [333], train_loss: 0.089558, val_loss: 0.089337, val_acc: 3.440086
+Epoch [334], train_loss: 0.089371, val_loss: 0.089853, val_acc: 3.746447
+Epoch [335], train_loss: 0.089126, val_loss: 0.087222, val_acc: 3.445903
+Epoch [336], train_loss: 0.088897, val_loss: 0.087434, val_acc: 3.655097
+Epoch [337], train_loss: 0.088682, val_loss: 0.092653, val_acc: 3.865633
+Epoch [338], train_loss: 0.088468, val_loss: 0.083040, val_acc: 4.028133
+Epoch [339], train_loss: 0.088283, val_loss: 0.099323, val_acc: 3.741745
+Epoch [340], train_loss: 0.088066, val_loss: 0.110286, val_acc: 3.464825
+Epoch [341], train_loss: 0.087844, val_loss: 0.090012, val_acc: 3.540604
+Epoch [342], train_loss: 0.087650, val_loss: 0.087382, val_acc: 3.643894
+Epoch [343], train_loss: 0.087423, val_loss: 0.084367, val_acc: 3.745334
+Epoch [344], train_loss: 0.087205, val_loss: 0.082805, val_acc: 3.751684
+Epoch [345], train_loss: 0.086967, val_loss: 0.083944, val_acc: 3.550677
+Epoch [346], train_loss: 0.086754, val_loss: 0.083976, val_acc: 3.476385
+Epoch [347], train_loss: 0.086555, val_loss: 0.086227, val_acc: 3.720221
+Epoch [348], train_loss: 0.086315, val_loss: 0.088126, val_acc: 3.965500
+Epoch [349], train_loss: 0.086104, val_loss: 0.085754, val_acc: 3.819758
+Epoch [350], train_loss: 0.085890, val_loss: 0.090616, val_acc: 3.599756
+Epoch [351], train_loss: 0.085705, val_loss: 0.091299, val_acc: 4.051337
+Epoch [352], train_loss: 0.085504, val_loss: 0.095680, val_acc: 3.848045
+Epoch [353], train_loss: 0.085299, val_loss: 0.086708, val_acc: 4.174683
+Epoch [354], train_loss: 0.085049, val_loss: 0.088798, val_acc: 4.210249
+Epoch [355], train_loss: 0.084814, val_loss: 0.085646, val_acc: 3.907702
+Epoch [356], train_loss: 0.084626, val_loss: 0.086310, val_acc: 3.956470
+Epoch [357], train_loss: 0.084414, val_loss: 0.086437, val_acc: 4.286590
+Epoch [358], train_loss: 0.084204, val_loss: 0.082976, val_acc: 4.168536
+Epoch [359], train_loss: 0.083964, val_loss: 0.085823, val_acc: 3.972070
+Epoch [360], train_loss: 0.083795, val_loss: 0.096365, val_acc: 3.922252
+Epoch [361], train_loss: 0.083537, val_loss: 0.090217, val_acc: 4.628379
+Epoch [362], train_loss: 0.083344, val_loss: 0.081952, val_acc: 4.460131
+Epoch [363], train_loss: 0.083137, val_loss: 0.086748, val_acc: 4.458138
+Epoch [364], train_loss: 0.082909, val_loss: 0.081038, val_acc: 4.591435
+Epoch [365], train_loss: 0.082688, val_loss: 0.081669, val_acc: 4.555032
+Epoch [366], train_loss: 0.082467, val_loss: 0.079704, val_acc: 4.583354
+Epoch [367], train_loss: 0.082299, val_loss: 0.081524, val_acc: 4.390409
+Epoch [368], train_loss: 0.082041, val_loss: 0.079189, val_acc: 4.723099
+Epoch [369], train_loss: 0.081807, val_loss: 0.080281, val_acc: 4.473562
+Epoch [370], train_loss: 0.081586, val_loss: 0.081198, val_acc: 4.404632
+Epoch [371], train_loss: 0.081427, val_loss: 0.078377, val_acc: 4.767471
+Epoch [372], train_loss: 0.081185, val_loss: 0.078997, val_acc: 4.868511
+Epoch [373], train_loss: 0.080961, val_loss: 0.081544, val_acc: 4.830508
+Epoch [374], train_loss: 0.080732, val_loss: 0.081138, val_acc: 5.091708
+Epoch [375], train_loss: 0.080574, val_loss: 0.079327, val_acc: 4.956901
+Epoch [376], train_loss: 0.080356, val_loss: 0.079315, val_acc: 4.932571
+Epoch [377], train_loss: 0.080106, val_loss: 0.079266, val_acc: 5.225095
+Epoch [378], train_loss: 0.079883, val_loss: 0.076586, val_acc: 5.028248
+Epoch [379], train_loss: 0.079672, val_loss: 0.075628, val_acc: 5.324450
+Epoch [380], train_loss: 0.079450, val_loss: 0.077473, val_acc: 5.004211
+Epoch [381], train_loss: 0.079240, val_loss: 0.077773, val_acc: 5.161286
+Epoch [382], train_loss: 0.079032, val_loss: 0.078321, val_acc: 5.332475
+Epoch [383], train_loss: 0.078838, val_loss: 0.078501, val_acc: 5.364811
+Epoch [384], train_loss: 0.078620, val_loss: 0.079751, val_acc: 5.428712
+Epoch [385], train_loss: 0.078410, val_loss: 0.076464, val_acc: 5.552095
+Epoch [386], train_loss: 0.078188, val_loss: 0.077532, val_acc: 5.530581
+Epoch [387], train_loss: 0.077985, val_loss: 0.076668, val_acc: 5.740917
+Epoch [388], train_loss: 0.077743, val_loss: 0.079121, val_acc: 5.386025
+Epoch [389], train_loss: 0.077534, val_loss: 0.077761, val_acc: 5.681257
+Epoch [390], train_loss: 0.077314, val_loss: 0.077714, val_acc: 6.075178
+Epoch [391], train_loss: 0.077096, val_loss: 0.076843, val_acc: 5.871815
+Epoch [392], train_loss: 0.076909, val_loss: 0.077732, val_acc: 5.614595
+Epoch [393], train_loss: 0.076672, val_loss: 0.075751, val_acc: 6.264469
+Epoch [394], train_loss: 0.076486, val_loss: 0.077375, val_acc: 5.899315
+Epoch [395], train_loss: 0.076240, val_loss: 0.075875, val_acc: 6.094369
+Epoch [396], train_loss: 0.076094, val_loss: 0.074508, val_acc: 6.719326
+Epoch [397], train_loss: 0.075819, val_loss: 0.075124, val_acc: 6.495990
+Epoch [398], train_loss: 0.075654, val_loss: 0.073193, val_acc: 6.597473
+Epoch [399], train_loss: 0.075397, val_loss: 0.073536, val_acc: 6.576932
+Epoch [400], train_loss: 0.075189, val_loss: 0.072178, val_acc: 6.860695
+Epoch [401], train_loss: 0.074995, val_loss: 0.072048, val_acc: 6.911035
+Epoch [402], train_loss: 0.074778, val_loss: 0.072800, val_acc: 6.858966
+Epoch [403], train_loss: 0.074535, val_loss: 0.074589, val_acc: 6.616336
+Epoch [404], train_loss: 0.074334, val_loss: 0.072213, val_acc: 6.868907
+Epoch [405], train_loss: 0.074149, val_loss: 0.073902, val_acc: 7.039032
+Epoch [406], train_loss: 0.073928, val_loss: 0.071790, val_acc: 7.100025
+Epoch [407], train_loss: 0.073705, val_loss: 0.072628, val_acc: 7.184677
+Epoch [408], train_loss: 0.073533, val_loss: 0.071383, val_acc: 7.174581
+Epoch [409], train_loss: 0.073313, val_loss: 0.070414, val_acc: 7.874671
+Epoch [410], train_loss: 0.073138, val_loss: 0.076815, val_acc: 6.659985
+Epoch [411], train_loss: 0.072874, val_loss: 0.071595, val_acc: 7.707265
+Epoch [412], train_loss: 0.072657, val_loss: 0.069525, val_acc: 7.755891
+Epoch [413], train_loss: 0.072442, val_loss: 0.071737, val_acc: 7.740369
+Epoch [414], train_loss: 0.072262, val_loss: 0.070814, val_acc: 7.671385
+Epoch [415], train_loss: 0.072026, val_loss: 0.070427, val_acc: 7.633419
+Epoch [416], train_loss: 0.071822, val_loss: 0.069367, val_acc: 8.069200
+Epoch [417], train_loss: 0.071622, val_loss: 0.070212, val_acc: 7.975227
+Epoch [418], train_loss: 0.071391, val_loss: 0.069615, val_acc: 8.162470
+Epoch [419], train_loss: 0.071218, val_loss: 0.068316, val_acc: 8.205042
+Epoch [420], train_loss: 0.070995, val_loss: 0.067894, val_acc: 8.378463
+Epoch [421], train_loss: 0.070777, val_loss: 0.068525, val_acc: 8.718242
+Epoch [422], train_loss: 0.070567, val_loss: 0.067834, val_acc: 8.651355
+Epoch [423], train_loss: 0.070395, val_loss: 0.069003, val_acc: 8.194207
+Epoch [424], train_loss: 0.070160, val_loss: 0.071303, val_acc: 8.492015
+Epoch [425], train_loss: 0.069979, val_loss: 0.069976, val_acc: 8.689736
+Epoch [426], train_loss: 0.069741, val_loss: 0.070676, val_acc: 8.544791
+Epoch [427], train_loss: 0.069562, val_loss: 0.066658, val_acc: 9.089087
+Epoch [428], train_loss: 0.069324, val_loss: 0.066589, val_acc: 9.009697
+Epoch [429], train_loss: 0.069129, val_loss: 0.066502, val_acc: 9.217211
+Epoch [430], train_loss: 0.068902, val_loss: 0.067510, val_acc: 9.282691
+Epoch [431], train_loss: 0.068737, val_loss: 0.067274, val_acc: 8.792559
+Epoch [432], train_loss: 0.068523, val_loss: 0.070240, val_acc: 9.342624
+Epoch [433], train_loss: 0.068360, val_loss: 0.066604, val_acc: 9.108301
+Epoch [434], train_loss: 0.068097, val_loss: 0.067621, val_acc: 9.665083
+Epoch [435], train_loss: 0.067886, val_loss: 0.064913, val_acc: 9.890109
+Epoch [436], train_loss: 0.067672, val_loss: 0.064530, val_acc: 9.981361
+Epoch [437], train_loss: 0.067474, val_loss: 0.070958, val_acc: 9.464131
+Epoch [438], train_loss: 0.067301, val_loss: 0.067020, val_acc: 10.093780
+Epoch [439], train_loss: 0.067111, val_loss: 0.068024, val_acc: 9.709292
+Epoch [440], train_loss: 0.066903, val_loss: 0.064445, val_acc: 9.961127
+Epoch [441], train_loss: 0.066714, val_loss: 0.065912, val_acc: 9.948335
+Epoch [442], train_loss: 0.066517, val_loss: 0.066486, val_acc: 9.931500
+Epoch [443], train_loss: 0.066293, val_loss: 0.065021, val_acc: 10.549462
+Epoch [444], train_loss: 0.066078, val_loss: 0.066368, val_acc: 10.179710
+Epoch [445], train_loss: 0.065929, val_loss: 0.065051, val_acc: 10.111173
+Epoch [446], train_loss: 0.065685, val_loss: 0.069110, val_acc: 10.247801
+Epoch [447], train_loss: 0.065534, val_loss: 0.061783, val_acc: 10.992946
+Epoch [448], train_loss: 0.065326, val_loss: 0.063340, val_acc: 10.454101
+Epoch [449], train_loss: 0.065126, val_loss: 0.063185, val_acc: 10.802825
+Epoch [450], train_loss: 0.064925, val_loss: 0.062916, val_acc: 11.025386
+Epoch [451], train_loss: 0.064729, val_loss: 0.060978, val_acc: 11.228795
+Epoch [452], train_loss: 0.064576, val_loss: 0.062576, val_acc: 11.024516
+Epoch [453], train_loss: 0.064308, val_loss: 0.063956, val_acc: 10.864369
+Epoch [454], train_loss: 0.064147, val_loss: 0.061694, val_acc: 11.351214
+Epoch [455], train_loss: 0.063972, val_loss: 0.062913, val_acc: 10.952724
+Epoch [456], train_loss: 0.063799, val_loss: 0.062701, val_acc: 11.238605
+Epoch [457], train_loss: 0.063562, val_loss: 0.060120, val_acc: 11.439193
+Epoch [458], train_loss: 0.063345, val_loss: 0.061088, val_acc: 11.554216
+Epoch [459], train_loss: 0.063148, val_loss: 0.065431, val_acc: 11.660631
+Epoch [460], train_loss: 0.062994, val_loss: 0.063342, val_acc: 11.512183
+Epoch [461], train_loss: 0.062815, val_loss: 0.068094, val_acc: 11.769664
+Epoch [462], train_loss: 0.062637, val_loss: 0.059325, val_acc: 11.777516
+Epoch [463], train_loss: 0.062453, val_loss: 0.061133, val_acc: 11.631029
+Epoch [464], train_loss: 0.062209, val_loss: 0.064542, val_acc: 11.606249
+Epoch [465], train_loss: 0.062023, val_loss: 0.058360, val_acc: 12.409564
+Epoch [466], train_loss: 0.061835, val_loss: 0.065129, val_acc: 11.676725
+Epoch [467], train_loss: 0.061664, val_loss: 0.063682, val_acc: 11.754915
+Epoch [468], train_loss: 0.061475, val_loss: 0.061115, val_acc: 12.465158
+Epoch [469], train_loss: 0.061273, val_loss: 0.063360, val_acc: 12.036865
+Epoch [470], train_loss: 0.061075, val_loss: 0.063839, val_acc: 12.023086
+Epoch [471], train_loss: 0.060886, val_loss: 0.057444, val_acc: 12.497092
+Epoch [472], train_loss: 0.060730, val_loss: 0.056031, val_acc: 12.912421
+Epoch [473], train_loss: 0.060582, val_loss: 0.063092, val_acc: 12.101847
+Epoch [474], train_loss: 0.060351, val_loss: 0.057771, val_acc: 12.572411
+Epoch [475], train_loss: 0.060213, val_loss: 0.055045, val_acc: 13.246772
+Epoch [476], train_loss: 0.060041, val_loss: 0.055970, val_acc: 13.122810
+Epoch [477], train_loss: 0.059893, val_loss: 0.058526, val_acc: 12.819507
+Epoch [478], train_loss: 0.059691, val_loss: 0.058953, val_acc: 12.799950
+Epoch [479], train_loss: 0.059582, val_loss: 0.059166, val_acc: 12.882187
+Epoch [480], train_loss: 0.059254, val_loss: 0.057647, val_acc: 12.761700
+Epoch [481], train_loss: 0.059105, val_loss: 0.058877, val_acc: 12.954269
+Epoch [482], train_loss: 0.058905, val_loss: 0.058316, val_acc: 12.992416
+Epoch [483], train_loss: 0.058770, val_loss: 0.058225, val_acc: 13.144249
+Epoch [484], train_loss: 0.058557, val_loss: 0.055993, val_acc: 13.222450
+Epoch [485], train_loss: 0.058344, val_loss: 0.056004, val_acc: 13.284205
+Epoch [486], train_loss: 0.058191, val_loss: 0.057555, val_acc: 13.366986
+Epoch [487], train_loss: 0.058012, val_loss: 0.054864, val_acc: 13.377782
+Epoch [488], train_loss: 0.057848, val_loss: 0.054276, val_acc: 13.534507
+Epoch [489], train_loss: 0.057670, val_loss: 0.055937, val_acc: 13.604939
+Epoch [490], train_loss: 0.057460, val_loss: 0.053996, val_acc: 13.609464
+Epoch [491], train_loss: 0.057309, val_loss: 0.053848, val_acc: 13.653211
+Epoch [492], train_loss: 0.057205, val_loss: 0.062097, val_acc: 13.619132
+Epoch [493], train_loss: 0.057039, val_loss: 0.061629, val_acc: 13.502625
+Epoch [494], train_loss: 0.056879, val_loss: 0.052865, val_acc: 14.091949
+Epoch [495], train_loss: 0.056675, val_loss: 0.053252, val_acc: 13.748832
+Epoch [496], train_loss: 0.056434, val_loss: 0.053388, val_acc: 13.756686
+Epoch [497], train_loss: 0.056335, val_loss: 0.052871, val_acc: 14.004088
+Epoch [498], train_loss: 0.056132, val_loss: 0.055166, val_acc: 14.042987
+Epoch [499], train_loss: 0.055972, val_loss: 0.052768, val_acc: 13.979456
+python3 ./UNet_V14.py  18953.93s user 19255.71s system 99% cpu 10:38:41.13 total
diff --git a/UNet/Sim_logs/UNet_64_V15_25617886.log b/UNet/Sim_logs/UNet_64_V15_25617886.log
new file mode 100644
index 0000000000000000000000000000000000000000..7aab4b81390368c377a7e8cc6c3f45e4dcb70302
--- /dev/null
+++ b/UNet/Sim_logs/UNet_64_V15_25617886.log
@@ -0,0 +1,1020 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 1000
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 7
+ seed is: 2377594295
+Epoch [0], train_loss: 0.185025, val_loss: 0.146054, val_acc: 5.087505
+Epoch [1], train_loss: 0.175324, val_loss: 0.128748, val_acc: 7.883666
+Epoch [2], train_loss: 0.165671, val_loss: 0.123503, val_acc: 9.453648
+Epoch [3], train_loss: 0.158355, val_loss: 0.121826, val_acc: 9.850308
+Epoch [4], train_loss: 0.153474, val_loss: 0.116063, val_acc: 10.747286
+Epoch [5], train_loss: 0.148356, val_loss: 0.114177, val_acc: 10.970490
+Epoch [6], train_loss: 0.144751, val_loss: 0.113088, val_acc: 11.120526
+Epoch [7], train_loss: 0.140854, val_loss: 0.115185, val_acc: 10.942829
+Epoch [8], train_loss: 0.137701, val_loss: 0.111403, val_acc: 11.464479
+Epoch [9], train_loss: 0.135292, val_loss: 0.109478, val_acc: 11.776126
+Epoch [10], train_loss: 0.133219, val_loss: 0.108466, val_acc: 11.943646
+Epoch [11], train_loss: 0.130521, val_loss: 0.106457, val_acc: 12.237556
+Epoch [12], train_loss: 0.128135, val_loss: 0.105055, val_acc: 12.451003
+Epoch [13], train_loss: 0.125895, val_loss: 0.102186, val_acc: 12.816888
+Epoch [14], train_loss: 0.124328, val_loss: 0.099850, val_acc: 13.178556
+Epoch [15], train_loss: 0.122582, val_loss: 0.100127, val_acc: 13.230832
+Epoch [16], train_loss: 0.120285, val_loss: 0.098070, val_acc: 13.493578
+Epoch [17], train_loss: 0.118230, val_loss: 0.097976, val_acc: 13.659941
+Epoch [18], train_loss: 0.116936, val_loss: 0.096450, val_acc: 13.750072
+Epoch [19], train_loss: 0.114840, val_loss: 0.096794, val_acc: 13.674782
+Epoch [20], train_loss: 0.113157, val_loss: 0.090802, val_acc: 14.531814
+Epoch [21], train_loss: 0.111719, val_loss: 0.090718, val_acc: 14.593040
+Epoch [22], train_loss: 0.109945, val_loss: 0.089093, val_acc: 14.821428
+Epoch [23], train_loss: 0.108373, val_loss: 0.088411, val_acc: 15.021918
+Epoch [24], train_loss: 0.106624, val_loss: 0.087548, val_acc: 15.150649
+Epoch [25], train_loss: 0.105605, val_loss: 0.086385, val_acc: 15.278821
+Epoch [26], train_loss: 0.104275, val_loss: 0.084645, val_acc: 15.532139
+Epoch [27], train_loss: 0.102778, val_loss: 0.083957, val_acc: 15.664930
+Epoch [28], train_loss: 0.101575, val_loss: 0.083094, val_acc: 16.001734
+Epoch [29], train_loss: 0.100574, val_loss: 0.082391, val_acc: 16.033895
+Epoch [30], train_loss: 0.099082, val_loss: 0.078679, val_acc: 16.816832
+Epoch [31], train_loss: 0.097823, val_loss: 0.080915, val_acc: 16.293455
+Epoch [32], train_loss: 0.096833, val_loss: 0.078737, val_acc: 16.799589
+Epoch [33], train_loss: 0.096522, val_loss: 0.077766, val_acc: 17.129803
+Epoch [34], train_loss: 0.095042, val_loss: 0.077295, val_acc: 16.907890
+Epoch [35], train_loss: 0.094485, val_loss: 0.077695, val_acc: 16.776434
+Epoch [36], train_loss: 0.092778, val_loss: 0.077419, val_acc: 16.923708
+Epoch [37], train_loss: 0.091928, val_loss: 0.075315, val_acc: 17.702030
+Epoch [38], train_loss: 0.090821, val_loss: 0.074344, val_acc: 17.920004
+Epoch [39], train_loss: 0.090005, val_loss: 0.074436, val_acc: 17.888311
+Epoch [40], train_loss: 0.088898, val_loss: 0.074333, val_acc: 17.896139
+Epoch [41], train_loss: 0.088033, val_loss: 0.073280, val_acc: 18.238987
+Epoch [42], train_loss: 0.087212, val_loss: 0.074950, val_acc: 17.782919
+Epoch [43], train_loss: 0.086461, val_loss: 0.073094, val_acc: 18.365620
+Epoch [44], train_loss: 0.085407, val_loss: 0.071995, val_acc: 18.708593
+Epoch [45], train_loss: 0.085689, val_loss: 0.071896, val_acc: 18.690218
+Epoch [46], train_loss: 0.084440, val_loss: 0.071272, val_acc: 18.893396
+Epoch [47], train_loss: 0.083412, val_loss: 0.069674, val_acc: 19.612228
+Epoch [48], train_loss: 0.082332, val_loss: 0.069537, val_acc: 19.880692
+Epoch [49], train_loss: 0.082045, val_loss: 0.069787, val_acc: 19.411072
+Epoch [50], train_loss: 0.081381, val_loss: 0.068676, val_acc: 20.010115
+Epoch [51], train_loss: 0.080443, val_loss: 0.068560, val_acc: 20.015327
+Epoch [52], train_loss: 0.080087, val_loss: 0.068317, val_acc: 20.228973
+Epoch [53], train_loss: 0.078964, val_loss: 0.067876, val_acc: 20.583868
+Epoch [54], train_loss: 0.078661, val_loss: 0.067418, val_acc: 20.536242
+Epoch [55], train_loss: 0.077735, val_loss: 0.066822, val_acc: 20.862350
+Epoch [56], train_loss: 0.077489, val_loss: 0.065989, val_acc: 21.234602
+Epoch [57], train_loss: 0.076530, val_loss: 0.066138, val_acc: 21.103962
+Epoch [58], train_loss: 0.075929, val_loss: 0.066639, val_acc: 20.740797
+Epoch [59], train_loss: 0.075323, val_loss: 0.065090, val_acc: 21.904671
+Epoch [60], train_loss: 0.075152, val_loss: 0.065586, val_acc: 21.567869
+Epoch [61], train_loss: 0.074236, val_loss: 0.064541, val_acc: 22.183670
+Epoch [62], train_loss: 0.074112, val_loss: 0.064958, val_acc: 22.048651
+Epoch [63], train_loss: 0.073441, val_loss: 0.064047, val_acc: 22.427576
+Epoch [64], train_loss: 0.073007, val_loss: 0.064362, val_acc: 22.134470
+Epoch [65], train_loss: 0.072321, val_loss: 0.063550, val_acc: 22.806780
+Epoch [66], train_loss: 0.072083, val_loss: 0.063552, val_acc: 22.623606
+Epoch [67], train_loss: 0.071751, val_loss: 0.063149, val_acc: 22.903643
+Epoch [68], train_loss: 0.071361, val_loss: 0.062525, val_acc: 23.557106
+Epoch [69], train_loss: 0.070701, val_loss: 0.062423, val_acc: 23.564289
+Epoch [70], train_loss: 0.070627, val_loss: 0.062492, val_acc: 23.563435
+Epoch [71], train_loss: 0.070096, val_loss: 0.062113, val_acc: 23.802628
+Epoch [72], train_loss: 0.069641, val_loss: 0.062134, val_acc: 23.890949
+Epoch [73], train_loss: 0.069403, val_loss: 0.061813, val_acc: 24.040857
+Epoch [74], train_loss: 0.069009, val_loss: 0.062163, val_acc: 23.726639
+Epoch [75], train_loss: 0.068765, val_loss: 0.061904, val_acc: 23.821224
+Epoch [76], train_loss: 0.068080, val_loss: 0.061365, val_acc: 24.325291
+Epoch [77], train_loss: 0.067615, val_loss: 0.061126, val_acc: 24.498367
+Epoch [78], train_loss: 0.067267, val_loss: 0.061114, val_acc: 24.511717
+Epoch [79], train_loss: 0.067495, val_loss: 0.061164, val_acc: 24.495920
+Epoch [80], train_loss: 0.066997, val_loss: 0.061290, val_acc: 24.423712
+Epoch [81], train_loss: 0.066801, val_loss: 0.060522, val_acc: 24.955708
+Epoch [82], train_loss: 0.066533, val_loss: 0.060664, val_acc: 24.846397
+Epoch [83], train_loss: 0.066410, val_loss: 0.060762, val_acc: 24.729939
+Epoch [84], train_loss: 0.065869, val_loss: 0.060594, val_acc: 24.851379
+Epoch [85], train_loss: 0.065734, val_loss: 0.060168, val_acc: 25.168589
+Epoch [86], train_loss: 0.065686, val_loss: 0.060315, val_acc: 25.116753
+Epoch [87], train_loss: 0.065421, val_loss: 0.060249, val_acc: 25.144426
+Epoch [88], train_loss: 0.065163, val_loss: 0.060124, val_acc: 25.265419
+Epoch [89], train_loss: 0.065139, val_loss: 0.060133, val_acc: 25.365564
+Epoch [90], train_loss: 0.064956, val_loss: 0.059928, val_acc: 25.385494
+Epoch [91], train_loss: 0.064782, val_loss: 0.060109, val_acc: 25.345453
+Epoch [92], train_loss: 0.064622, val_loss: 0.060047, val_acc: 25.333971
+Epoch [93], train_loss: 0.064600, val_loss: 0.059891, val_acc: 25.412045
+Epoch [94], train_loss: 0.064163, val_loss: 0.059806, val_acc: 25.443689
+Epoch [95], train_loss: 0.064415, val_loss: 0.059820, val_acc: 25.508184
+Epoch [96], train_loss: 0.064046, val_loss: 0.059661, val_acc: 25.543444
+Epoch [97], train_loss: 0.064062, val_loss: 0.059374, val_acc: 25.652472
+Epoch [98], train_loss: 0.064088, val_loss: 0.059591, val_acc: 25.556311
+Epoch [99], train_loss: 0.063804, val_loss: 0.059376, val_acc: 25.656101
+Epoch [100], train_loss: 0.063937, val_loss: 0.059682, val_acc: 25.566740
+Epoch [101], train_loss: 0.063736, val_loss: 0.059556, val_acc: 25.617922
+Epoch [102], train_loss: 0.063971, val_loss: 0.059688, val_acc: 25.589180
+Epoch [103], train_loss: 0.063558, val_loss: 0.059179, val_acc: 25.715981
+Epoch [104], train_loss: 0.063550, val_loss: 0.059386, val_acc: 25.655157
+Epoch [105], train_loss: 0.063383, val_loss: 0.059396, val_acc: 25.660517
+Epoch [106], train_loss: 0.063575, val_loss: 0.059415, val_acc: 25.663532
+Epoch [107], train_loss: 0.063239, val_loss: 0.059104, val_acc: 25.735085
+Epoch [108], train_loss: 0.063352, val_loss: 0.059487, val_acc: 25.659761
+Epoch [109], train_loss: 0.063109, val_loss: 0.059426, val_acc: 25.656248
+Epoch [110], train_loss: 0.063118, val_loss: 0.059307, val_acc: 25.698708
+Epoch [111], train_loss: 0.063071, val_loss: 0.059368, val_acc: 25.657812
+Epoch [112], train_loss: 0.063135, val_loss: 0.059169, val_acc: 25.727335
+Epoch [113], train_loss: 0.063162, val_loss: 0.059208, val_acc: 25.711096
+Epoch [114], train_loss: 0.062970, val_loss: 0.059388, val_acc: 25.688463
+Epoch [115], train_loss: 0.063175, val_loss: 0.059432, val_acc: 25.668585
+Epoch [116], train_loss: 0.062823, val_loss: 0.059105, val_acc: 25.723335
+Epoch [117], train_loss: 0.062967, val_loss: 0.059435, val_acc: 25.686655
+Epoch [118], train_loss: 0.062861, val_loss: 0.059283, val_acc: 25.736774
+Epoch [119], train_loss: 0.062442, val_loss: 0.059232, val_acc: 25.724863
+Epoch [120], train_loss: 0.062608, val_loss: 0.059298, val_acc: 25.706743
+Epoch [121], train_loss: 0.062764, val_loss: 0.059451, val_acc: 25.669334
+Epoch [122], train_loss: 0.062549, val_loss: 0.059167, val_acc: 25.728016
+Epoch [123], train_loss: 0.062746, val_loss: 0.059161, val_acc: 25.731133
+Epoch [124], train_loss: 0.062558, val_loss: 0.058959, val_acc: 25.752504
+Epoch [125], train_loss: 0.062497, val_loss: 0.059141, val_acc: 25.740091
+Epoch [126], train_loss: 0.062471, val_loss: 0.058977, val_acc: 25.755882
+Epoch [127], train_loss: 0.062423, val_loss: 0.059067, val_acc: 25.747770
+Epoch [128], train_loss: 0.062551, val_loss: 0.059093, val_acc: 25.762812
+Epoch [129], train_loss: 0.062359, val_loss: 0.059083, val_acc: 25.748869
+Epoch [130], train_loss: 0.062380, val_loss: 0.059061, val_acc: 25.753534
+Epoch [131], train_loss: 0.062373, val_loss: 0.058983, val_acc: 25.770838
+Epoch [132], train_loss: 0.062192, val_loss: 0.059050, val_acc: 25.760313
+Epoch [133], train_loss: 0.062612, val_loss: 0.059219, val_acc: 25.715940
+Epoch [134], train_loss: 0.062435, val_loss: 0.059169, val_acc: 25.772301
+Epoch [135], train_loss: 0.062228, val_loss: 0.058857, val_acc: 25.803036
+Epoch [136], train_loss: 0.062165, val_loss: 0.059076, val_acc: 25.782833
+Epoch [137], train_loss: 0.062197, val_loss: 0.058928, val_acc: 25.774494
+Epoch [138], train_loss: 0.062086, val_loss: 0.058900, val_acc: 25.786766
+Epoch [139], train_loss: 0.062251, val_loss: 0.058889, val_acc: 25.777941
+Epoch [140], train_loss: 0.062193, val_loss: 0.059103, val_acc: 25.778721
+Epoch [141], train_loss: 0.061985, val_loss: 0.059067, val_acc: 25.770029
+Epoch [142], train_loss: 0.062216, val_loss: 0.058906, val_acc: 25.785955
+Epoch [143], train_loss: 0.062358, val_loss: 0.058861, val_acc: 25.756828
+Epoch [144], train_loss: 0.062178, val_loss: 0.058800, val_acc: 25.820543
+Epoch [145], train_loss: 0.061998, val_loss: 0.058785, val_acc: 25.786610
+Epoch [146], train_loss: 0.061977, val_loss: 0.059100, val_acc: 25.760921
+Epoch [147], train_loss: 0.061960, val_loss: 0.058871, val_acc: 25.778345
+Epoch [148], train_loss: 0.061954, val_loss: 0.058954, val_acc: 25.764112
+Epoch [149], train_loss: 0.061922, val_loss: 0.058742, val_acc: 25.801712
+Epoch [150], train_loss: 0.061853, val_loss: 0.058890, val_acc: 25.739847
+Epoch [151], train_loss: 0.061928, val_loss: 0.058983, val_acc: 25.785490
+Epoch [152], train_loss: 0.062193, val_loss: 0.058669, val_acc: 25.822201
+Epoch [153], train_loss: 0.061929, val_loss: 0.058721, val_acc: 25.802275
+Epoch [154], train_loss: 0.061899, val_loss: 0.058678, val_acc: 25.829506
+Epoch [155], train_loss: 0.061830, val_loss: 0.059015, val_acc: 25.755095
+Epoch [156], train_loss: 0.061779, val_loss: 0.058815, val_acc: 25.821924
+Epoch [157], train_loss: 0.061814, val_loss: 0.058793, val_acc: 25.775068
+Epoch [158], train_loss: 0.061720, val_loss: 0.058711, val_acc: 25.834154
+Epoch [159], train_loss: 0.061920, val_loss: 0.058748, val_acc: 25.814631
+Epoch [160], train_loss: 0.061712, val_loss: 0.058699, val_acc: 25.825783
+Epoch [161], train_loss: 0.061703, val_loss: 0.058651, val_acc: 25.832035
+Epoch [162], train_loss: 0.061652, val_loss: 0.058559, val_acc: 25.828764
+Epoch [163], train_loss: 0.061645, val_loss: 0.058753, val_acc: 25.768730
+Epoch [164], train_loss: 0.061593, val_loss: 0.058719, val_acc: 25.839159
+Epoch [165], train_loss: 0.061689, val_loss: 0.058724, val_acc: 25.854477
+Epoch [166], train_loss: 0.061618, val_loss: 0.058692, val_acc: 25.812046
+Epoch [167], train_loss: 0.061536, val_loss: 0.058653, val_acc: 25.794802
+Epoch [168], train_loss: 0.061646, val_loss: 0.058673, val_acc: 25.816431
+Epoch [169], train_loss: 0.061463, val_loss: 0.058654, val_acc: 25.832365
+Epoch [170], train_loss: 0.061584, val_loss: 0.058938, val_acc: 25.797857
+Epoch [171], train_loss: 0.061445, val_loss: 0.058553, val_acc: 25.824272
+Epoch [172], train_loss: 0.061579, val_loss: 0.058668, val_acc: 25.825354
+Epoch [173], train_loss: 0.061692, val_loss: 0.058841, val_acc: 25.818796
+Epoch [174], train_loss: 0.061422, val_loss: 0.058580, val_acc: 25.827036
+Epoch [175], train_loss: 0.061585, val_loss: 0.058708, val_acc: 25.866522
+Epoch [176], train_loss: 0.061606, val_loss: 0.058735, val_acc: 25.820366
+Epoch [177], train_loss: 0.061414, val_loss: 0.058624, val_acc: 25.862749
+Epoch [178], train_loss: 0.061523, val_loss: 0.058507, val_acc: 25.822329
+Epoch [179], train_loss: 0.061387, val_loss: 0.058672, val_acc: 25.812370
+Epoch [180], train_loss: 0.061325, val_loss: 0.058617, val_acc: 25.866245
+Epoch [181], train_loss: 0.061322, val_loss: 0.058716, val_acc: 25.806496
+Epoch [182], train_loss: 0.061370, val_loss: 0.058504, val_acc: 25.830276
+Epoch [183], train_loss: 0.061289, val_loss: 0.058517, val_acc: 25.859037
+Epoch [184], train_loss: 0.061529, val_loss: 0.058632, val_acc: 25.801956
+Epoch [185], train_loss: 0.061490, val_loss: 0.058420, val_acc: 25.852638
+Epoch [186], train_loss: 0.061370, val_loss: 0.058590, val_acc: 25.886347
+Epoch [187], train_loss: 0.061404, val_loss: 0.058785, val_acc: 25.805660
+Epoch [188], train_loss: 0.061396, val_loss: 0.058588, val_acc: 25.847437
+Epoch [189], train_loss: 0.061291, val_loss: 0.058519, val_acc: 25.807613
+Epoch [190], train_loss: 0.061299, val_loss: 0.058431, val_acc: 25.898951
+Epoch [191], train_loss: 0.061186, val_loss: 0.058387, val_acc: 25.882118
+Epoch [192], train_loss: 0.061521, val_loss: 0.058591, val_acc: 25.846046
+Epoch [193], train_loss: 0.061216, val_loss: 0.058650, val_acc: 25.842962
+Epoch [194], train_loss: 0.061177, val_loss: 0.058286, val_acc: 25.883747
+Epoch [195], train_loss: 0.061273, val_loss: 0.058414, val_acc: 25.880867
+Epoch [196], train_loss: 0.061206, val_loss: 0.058506, val_acc: 25.849607
+Epoch [197], train_loss: 0.061143, val_loss: 0.058508, val_acc: 25.869553
+Epoch [198], train_loss: 0.061115, val_loss: 0.058441, val_acc: 25.851765
+Epoch [199], train_loss: 0.061005, val_loss: 0.058515, val_acc: 25.855268
+Epoch [200], train_loss: 0.061097, val_loss: 0.058441, val_acc: 25.842106
+Epoch [201], train_loss: 0.060956, val_loss: 0.058468, val_acc: 25.816877
+Epoch [202], train_loss: 0.061264, val_loss: 0.058441, val_acc: 25.920353
+Epoch [203], train_loss: 0.061300, val_loss: 0.058425, val_acc: 25.865393
+Epoch [204], train_loss: 0.061184, val_loss: 0.058638, val_acc: 25.822159
+Epoch [205], train_loss: 0.061065, val_loss: 0.058653, val_acc: 25.831640
+Epoch [206], train_loss: 0.061211, val_loss: 0.058614, val_acc: 25.845142
+Epoch [207], train_loss: 0.061088, val_loss: 0.058557, val_acc: 25.814610
+Epoch [208], train_loss: 0.061055, val_loss: 0.058716, val_acc: 25.824081
+Epoch [209], train_loss: 0.061087, val_loss: 0.058380, val_acc: 25.837404
+Epoch [210], train_loss: 0.061137, val_loss: 0.058491, val_acc: 25.818192
+Epoch [211], train_loss: 0.061022, val_loss: 0.058545, val_acc: 25.818926
+Epoch [212], train_loss: 0.061044, val_loss: 0.058453, val_acc: 25.839043
+Epoch [213], train_loss: 0.061020, val_loss: 0.058541, val_acc: 25.877329
+Epoch [214], train_loss: 0.061114, val_loss: 0.058297, val_acc: 25.926125
+Epoch [215], train_loss: 0.061025, val_loss: 0.058488, val_acc: 25.800312
+Epoch [216], train_loss: 0.061027, val_loss: 0.058422, val_acc: 25.888590
+Epoch [217], train_loss: 0.061008, val_loss: 0.058324, val_acc: 25.901814
+Epoch [218], train_loss: 0.061126, val_loss: 0.058554, val_acc: 25.849527
+Epoch [219], train_loss: 0.061400, val_loss: 0.058445, val_acc: 25.904331
+Epoch [220], train_loss: 0.061099, val_loss: 0.058492, val_acc: 25.864771
+Epoch [221], train_loss: 0.061060, val_loss: 0.058542, val_acc: 25.836948
+Epoch [222], train_loss: 0.060778, val_loss: 0.058401, val_acc: 25.883955
+Epoch [223], train_loss: 0.060888, val_loss: 0.058267, val_acc: 25.937214
+Epoch [224], train_loss: 0.060960, val_loss: 0.058335, val_acc: 25.899643
+Epoch [225], train_loss: 0.060901, val_loss: 0.058530, val_acc: 25.846170
+Epoch [226], train_loss: 0.061058, val_loss: 0.058491, val_acc: 25.834562
+Epoch [227], train_loss: 0.060987, val_loss: 0.058380, val_acc: 25.920572
+Epoch [228], train_loss: 0.060867, val_loss: 0.058406, val_acc: 25.866278
+Epoch [229], train_loss: 0.060848, val_loss: 0.058329, val_acc: 25.905724
+Epoch [230], train_loss: 0.060924, val_loss: 0.058328, val_acc: 25.894382
+Epoch [231], train_loss: 0.061005, val_loss: 0.058240, val_acc: 25.920033
+Epoch [232], train_loss: 0.060826, val_loss: 0.058381, val_acc: 25.864803
+Epoch [233], train_loss: 0.060978, val_loss: 0.058402, val_acc: 25.902176
+Epoch [234], train_loss: 0.060732, val_loss: 0.058297, val_acc: 25.879892
+Epoch [235], train_loss: 0.060895, val_loss: 0.058291, val_acc: 25.931242
+Epoch [236], train_loss: 0.060803, val_loss: 0.058420, val_acc: 25.846132
+Epoch [237], train_loss: 0.060817, val_loss: 0.058752, val_acc: 25.770185
+Epoch [238], train_loss: 0.060856, val_loss: 0.058397, val_acc: 25.838671
+Epoch [239], train_loss: 0.060735, val_loss: 0.058258, val_acc: 25.891422
+Epoch [240], train_loss: 0.060727, val_loss: 0.058331, val_acc: 25.916843
+Epoch [241], train_loss: 0.060837, val_loss: 0.058212, val_acc: 25.908775
+Epoch [242], train_loss: 0.060669, val_loss: 0.058205, val_acc: 25.888697
+Epoch [243], train_loss: 0.060709, val_loss: 0.058397, val_acc: 25.851721
+Epoch [244], train_loss: 0.060812, val_loss: 0.058202, val_acc: 25.861103
+Epoch [245], train_loss: 0.060824, val_loss: 0.058179, val_acc: 25.928759
+Epoch [246], train_loss: 0.060851, val_loss: 0.058243, val_acc: 25.953964
+Epoch [247], train_loss: 0.060700, val_loss: 0.058318, val_acc: 25.877296
+Epoch [248], train_loss: 0.060754, val_loss: 0.058284, val_acc: 25.900017
+Epoch [249], train_loss: 0.060817, val_loss: 0.058360, val_acc: 25.922045
+Epoch [250], train_loss: 0.060788, val_loss: 0.058350, val_acc: 25.856102
+Epoch [251], train_loss: 0.060704, val_loss: 0.058387, val_acc: 25.878616
+Epoch [252], train_loss: 0.060710, val_loss: 0.058363, val_acc: 25.855301
+Epoch [253], train_loss: 0.060568, val_loss: 0.058173, val_acc: 25.859737
+Epoch [254], train_loss: 0.060636, val_loss: 0.058204, val_acc: 25.929083
+Epoch [255], train_loss: 0.060719, val_loss: 0.058299, val_acc: 25.897081
+Epoch [256], train_loss: 0.060456, val_loss: 0.058243, val_acc: 25.877424
+Epoch [257], train_loss: 0.060715, val_loss: 0.058240, val_acc: 25.877232
+Epoch [258], train_loss: 0.060721, val_loss: 0.058488, val_acc: 25.854460
+Epoch [259], train_loss: 0.060633, val_loss: 0.058384, val_acc: 25.909975
+Epoch [260], train_loss: 0.060696, val_loss: 0.058512, val_acc: 25.867216
+Epoch [261], train_loss: 0.060704, val_loss: 0.058425, val_acc: 25.874306
+Epoch [262], train_loss: 0.060552, val_loss: 0.058264, val_acc: 25.920549
+Epoch [263], train_loss: 0.060621, val_loss: 0.058329, val_acc: 25.906044
+Epoch [264], train_loss: 0.060519, val_loss: 0.058323, val_acc: 25.867039
+Epoch [265], train_loss: 0.060651, val_loss: 0.058172, val_acc: 25.961824
+Epoch [266], train_loss: 0.060661, val_loss: 0.058172, val_acc: 25.920734
+Epoch [267], train_loss: 0.060669, val_loss: 0.058371, val_acc: 25.890182
+Epoch [268], train_loss: 0.060499, val_loss: 0.058154, val_acc: 25.969028
+Epoch [269], train_loss: 0.060697, val_loss: 0.058218, val_acc: 25.849728
+Epoch [270], train_loss: 0.060547, val_loss: 0.058218, val_acc: 25.899147
+Epoch [271], train_loss: 0.060510, val_loss: 0.058262, val_acc: 25.905466
+Epoch [272], train_loss: 0.060471, val_loss: 0.058452, val_acc: 25.860554
+Epoch [273], train_loss: 0.060483, val_loss: 0.058278, val_acc: 25.839405
+Epoch [274], train_loss: 0.060467, val_loss: 0.058185, val_acc: 25.949244
+Epoch [275], train_loss: 0.060658, val_loss: 0.058289, val_acc: 25.873615
+Epoch [276], train_loss: 0.060434, val_loss: 0.058160, val_acc: 25.897165
+Epoch [277], train_loss: 0.060706, val_loss: 0.058386, val_acc: 25.862679
+Epoch [278], train_loss: 0.060547, val_loss: 0.058205, val_acc: 25.909824
+Epoch [279], train_loss: 0.060692, val_loss: 0.058189, val_acc: 25.907221
+Epoch [280], train_loss: 0.060324, val_loss: 0.058448, val_acc: 25.838758
+Epoch [281], train_loss: 0.060580, val_loss: 0.058195, val_acc: 25.926996
+Epoch [282], train_loss: 0.060465, val_loss: 0.058176, val_acc: 25.877989
+Epoch [283], train_loss: 0.060423, val_loss: 0.058154, val_acc: 25.925514
+Epoch [284], train_loss: 0.060553, val_loss: 0.058305, val_acc: 25.890009
+Epoch [285], train_loss: 0.060586, val_loss: 0.058414, val_acc: 25.871645
+Epoch [286], train_loss: 0.060492, val_loss: 0.058351, val_acc: 25.826393
+Epoch [287], train_loss: 0.060743, val_loss: 0.058346, val_acc: 25.860126
+Epoch [288], train_loss: 0.060477, val_loss: 0.058225, val_acc: 25.875446
+Epoch [289], train_loss: 0.060488, val_loss: 0.058186, val_acc: 25.873911
+Epoch [290], train_loss: 0.060542, val_loss: 0.058402, val_acc: 25.855593
+Epoch [291], train_loss: 0.060404, val_loss: 0.058245, val_acc: 25.840666
+Epoch [292], train_loss: 0.060450, val_loss: 0.058193, val_acc: 25.877718
+Epoch [293], train_loss: 0.060486, val_loss: 0.058270, val_acc: 25.886541
+Epoch [294], train_loss: 0.060336, val_loss: 0.058175, val_acc: 25.864626
+Epoch [295], train_loss: 0.060383, val_loss: 0.058357, val_acc: 25.842840
+Epoch [296], train_loss: 0.060412, val_loss: 0.058306, val_acc: 25.869503
+Epoch [297], train_loss: 0.060385, val_loss: 0.058205, val_acc: 25.905115
+Epoch [298], train_loss: 0.060382, val_loss: 0.058164, val_acc: 25.954046
+Epoch [299], train_loss: 0.060367, val_loss: 0.058233, val_acc: 25.880907
+Epoch [300], train_loss: 0.060381, val_loss: 0.058046, val_acc: 25.923733
+Epoch [301], train_loss: 0.060515, val_loss: 0.058200, val_acc: 25.920008
+Epoch [302], train_loss: 0.060401, val_loss: 0.058171, val_acc: 25.931494
+Epoch [303], train_loss: 0.060342, val_loss: 0.058113, val_acc: 25.906927
+Epoch [304], train_loss: 0.060435, val_loss: 0.058139, val_acc: 25.941015
+Epoch [305], train_loss: 0.060316, val_loss: 0.058234, val_acc: 25.897850
+Epoch [306], train_loss: 0.060371, val_loss: 0.058127, val_acc: 25.926571
+Epoch [307], train_loss: 0.060308, val_loss: 0.058138, val_acc: 25.936777
+Epoch [308], train_loss: 0.060345, val_loss: 0.058197, val_acc: 25.891575
+Epoch [309], train_loss: 0.060458, val_loss: 0.058364, val_acc: 25.863329
+Epoch [310], train_loss: 0.060377, val_loss: 0.058269, val_acc: 25.920073
+Epoch [311], train_loss: 0.060428, val_loss: 0.058113, val_acc: 25.944248
+Epoch [312], train_loss: 0.060316, val_loss: 0.058258, val_acc: 25.836994
+Epoch [313], train_loss: 0.060373, val_loss: 0.058272, val_acc: 25.952141
+Epoch [314], train_loss: 0.060265, val_loss: 0.058297, val_acc: 25.875338
+Epoch [315], train_loss: 0.060349, val_loss: 0.058199, val_acc: 25.898996
+Epoch [316], train_loss: 0.060268, val_loss: 0.058333, val_acc: 25.874311
+Epoch [317], train_loss: 0.060331, val_loss: 0.058074, val_acc: 25.916584
+Epoch [318], train_loss: 0.060350, val_loss: 0.058126, val_acc: 25.910307
+Epoch [319], train_loss: 0.060199, val_loss: 0.058242, val_acc: 25.925180
+Epoch [320], train_loss: 0.060347, val_loss: 0.058146, val_acc: 25.929079
+Epoch [321], train_loss: 0.060297, val_loss: 0.058052, val_acc: 25.944798
+Epoch [322], train_loss: 0.060359, val_loss: 0.058233, val_acc: 25.868156
+Epoch [323], train_loss: 0.060324, val_loss: 0.058314, val_acc: 25.850338
+Epoch [324], train_loss: 0.060258, val_loss: 0.058248, val_acc: 25.905716
+Epoch [325], train_loss: 0.060247, val_loss: 0.058204, val_acc: 25.885904
+Epoch [326], train_loss: 0.060368, val_loss: 0.058088, val_acc: 25.869997
+Epoch [327], train_loss: 0.060290, val_loss: 0.058233, val_acc: 25.908268
+Epoch [328], train_loss: 0.060283, val_loss: 0.058142, val_acc: 25.875565
+Epoch [329], train_loss: 0.060331, val_loss: 0.058168, val_acc: 25.905811
+Epoch [330], train_loss: 0.060194, val_loss: 0.058244, val_acc: 25.935225
+Epoch [331], train_loss: 0.060236, val_loss: 0.058156, val_acc: 25.849043
+Epoch [332], train_loss: 0.060186, val_loss: 0.058122, val_acc: 25.951548
+Epoch [333], train_loss: 0.060309, val_loss: 0.058226, val_acc: 25.881708
+Epoch [334], train_loss: 0.060174, val_loss: 0.058166, val_acc: 25.909685
+Epoch [335], train_loss: 0.060262, val_loss: 0.058307, val_acc: 25.882778
+Epoch [336], train_loss: 0.060253, val_loss: 0.058249, val_acc: 25.911222
+Epoch [337], train_loss: 0.060274, val_loss: 0.058054, val_acc: 25.972954
+Epoch [338], train_loss: 0.060322, val_loss: 0.058204, val_acc: 25.908970
+Epoch [339], train_loss: 0.060265, val_loss: 0.058097, val_acc: 25.949873
+Epoch [340], train_loss: 0.060209, val_loss: 0.058189, val_acc: 25.895361
+Epoch [341], train_loss: 0.060175, val_loss: 0.058175, val_acc: 25.903194
+Epoch [342], train_loss: 0.060112, val_loss: 0.058134, val_acc: 25.977524
+Epoch [343], train_loss: 0.060132, val_loss: 0.058112, val_acc: 25.943659
+Epoch [344], train_loss: 0.060112, val_loss: 0.058230, val_acc: 25.893654
+Epoch [345], train_loss: 0.060178, val_loss: 0.058107, val_acc: 25.928770
+Epoch [346], train_loss: 0.060210, val_loss: 0.057957, val_acc: 25.920628
+Epoch [347], train_loss: 0.060197, val_loss: 0.058240, val_acc: 25.892899
+Epoch [348], train_loss: 0.060289, val_loss: 0.058081, val_acc: 25.963346
+Epoch [349], train_loss: 0.060220, val_loss: 0.058053, val_acc: 25.960270
+Epoch [350], train_loss: 0.060157, val_loss: 0.058278, val_acc: 25.908575
+Epoch [351], train_loss: 0.060021, val_loss: 0.058087, val_acc: 25.973883
+Epoch [352], train_loss: 0.060194, val_loss: 0.058267, val_acc: 25.905401
+Epoch [353], train_loss: 0.060083, val_loss: 0.058162, val_acc: 25.932194
+Epoch [354], train_loss: 0.060214, val_loss: 0.058211, val_acc: 25.972927
+Epoch [355], train_loss: 0.060206, val_loss: 0.058169, val_acc: 25.943216
+Epoch [356], train_loss: 0.060145, val_loss: 0.058115, val_acc: 25.943691
+Epoch [357], train_loss: 0.060156, val_loss: 0.058352, val_acc: 25.879213
+Epoch [358], train_loss: 0.060207, val_loss: 0.058157, val_acc: 25.934341
+Epoch [359], train_loss: 0.060315, val_loss: 0.058244, val_acc: 25.893244
+Epoch [360], train_loss: 0.060133, val_loss: 0.058188, val_acc: 25.905476
+Epoch [361], train_loss: 0.060211, val_loss: 0.058146, val_acc: 25.946224
+Epoch [362], train_loss: 0.060269, val_loss: 0.058148, val_acc: 25.916197
+Epoch [363], train_loss: 0.060048, val_loss: 0.058153, val_acc: 25.941914
+Epoch [364], train_loss: 0.060217, val_loss: 0.058111, val_acc: 25.968643
+Epoch [365], train_loss: 0.060132, val_loss: 0.058162, val_acc: 25.931334
+Epoch [366], train_loss: 0.060205, val_loss: 0.058101, val_acc: 25.967287
+Epoch [367], train_loss: 0.060105, val_loss: 0.058139, val_acc: 25.933992
+Epoch [368], train_loss: 0.060210, val_loss: 0.058171, val_acc: 25.876911
+Epoch [369], train_loss: 0.060090, val_loss: 0.058168, val_acc: 25.917988
+Epoch [370], train_loss: 0.060057, val_loss: 0.058108, val_acc: 25.903482
+Epoch [371], train_loss: 0.060074, val_loss: 0.058133, val_acc: 25.936796
+Epoch [372], train_loss: 0.060041, val_loss: 0.058063, val_acc: 25.958492
+Epoch [373], train_loss: 0.060131, val_loss: 0.058187, val_acc: 25.916042
+Epoch [374], train_loss: 0.060040, val_loss: 0.057950, val_acc: 25.987978
+Epoch [375], train_loss: 0.060180, val_loss: 0.058097, val_acc: 25.958277
+Epoch [376], train_loss: 0.060242, val_loss: 0.058097, val_acc: 25.956314
+Epoch [377], train_loss: 0.060216, val_loss: 0.058133, val_acc: 25.913862
+Epoch [378], train_loss: 0.060153, val_loss: 0.057942, val_acc: 25.982477
+Epoch [379], train_loss: 0.060081, val_loss: 0.058161, val_acc: 25.929825
+Epoch [380], train_loss: 0.060101, val_loss: 0.058152, val_acc: 25.867653
+Epoch [381], train_loss: 0.060127, val_loss: 0.058180, val_acc: 25.938051
+Epoch [382], train_loss: 0.060071, val_loss: 0.058056, val_acc: 25.942778
+Epoch [383], train_loss: 0.060143, val_loss: 0.058214, val_acc: 25.925026
+Epoch [384], train_loss: 0.060112, val_loss: 0.058362, val_acc: 25.933607
+Epoch [385], train_loss: 0.060030, val_loss: 0.058100, val_acc: 25.941755
+Epoch [386], train_loss: 0.060041, val_loss: 0.058005, val_acc: 25.944679
+Epoch [387], train_loss: 0.059991, val_loss: 0.057970, val_acc: 25.970356
+Epoch [388], train_loss: 0.060122, val_loss: 0.057946, val_acc: 26.019823
+Epoch [389], train_loss: 0.060035, val_loss: 0.058076, val_acc: 25.902674
+Epoch [390], train_loss: 0.060144, val_loss: 0.057962, val_acc: 26.010729
+Epoch [391], train_loss: 0.060210, val_loss: 0.058193, val_acc: 25.877043
+Epoch [392], train_loss: 0.060010, val_loss: 0.058122, val_acc: 25.976521
+Epoch [393], train_loss: 0.060082, val_loss: 0.058168, val_acc: 25.910345
+Epoch [394], train_loss: 0.059935, val_loss: 0.058241, val_acc: 25.939054
+Epoch [395], train_loss: 0.059990, val_loss: 0.058108, val_acc: 25.906111
+Epoch [396], train_loss: 0.060066, val_loss: 0.058118, val_acc: 25.887335
+Epoch [397], train_loss: 0.059947, val_loss: 0.058317, val_acc: 25.883705
+Epoch [398], train_loss: 0.060025, val_loss: 0.058185, val_acc: 25.937740
+Epoch [399], train_loss: 0.059996, val_loss: 0.057937, val_acc: 25.986935
+Epoch [400], train_loss: 0.060131, val_loss: 0.057909, val_acc: 26.021635
+Epoch [401], train_loss: 0.060007, val_loss: 0.058159, val_acc: 25.925871
+Epoch [402], train_loss: 0.060071, val_loss: 0.057960, val_acc: 26.009508
+Epoch [403], train_loss: 0.060112, val_loss: 0.058112, val_acc: 25.957903
+Epoch [404], train_loss: 0.059903, val_loss: 0.058159, val_acc: 25.915257
+Epoch [405], train_loss: 0.060112, val_loss: 0.057964, val_acc: 25.976870
+Epoch [406], train_loss: 0.060063, val_loss: 0.058215, val_acc: 25.926292
+Epoch [407], train_loss: 0.059884, val_loss: 0.058061, val_acc: 25.990828
+Epoch [408], train_loss: 0.059973, val_loss: 0.057979, val_acc: 26.039272
+Epoch [409], train_loss: 0.059907, val_loss: 0.058157, val_acc: 25.958357
+Epoch [410], train_loss: 0.060034, val_loss: 0.058225, val_acc: 25.927040
+Epoch [411], train_loss: 0.060004, val_loss: 0.058168, val_acc: 25.899591
+Epoch [412], train_loss: 0.060156, val_loss: 0.058129, val_acc: 25.932482
+Epoch [413], train_loss: 0.059929, val_loss: 0.058049, val_acc: 25.961611
+Epoch [414], train_loss: 0.060015, val_loss: 0.058143, val_acc: 25.911711
+Epoch [415], train_loss: 0.060013, val_loss: 0.058206, val_acc: 25.935034
+Epoch [416], train_loss: 0.059899, val_loss: 0.058098, val_acc: 25.955774
+Epoch [417], train_loss: 0.059954, val_loss: 0.058123, val_acc: 25.959047
+Epoch [418], train_loss: 0.059960, val_loss: 0.058154, val_acc: 25.949053
+Epoch [419], train_loss: 0.060042, val_loss: 0.058020, val_acc: 25.953371
+Epoch [420], train_loss: 0.060000, val_loss: 0.058057, val_acc: 25.970051
+Epoch [421], train_loss: 0.059911, val_loss: 0.058153, val_acc: 25.931351
+Epoch [422], train_loss: 0.059885, val_loss: 0.058009, val_acc: 25.997168
+Epoch [423], train_loss: 0.059846, val_loss: 0.057997, val_acc: 25.972408
+Epoch [424], train_loss: 0.059864, val_loss: 0.058123, val_acc: 25.965736
+Epoch [425], train_loss: 0.059894, val_loss: 0.058227, val_acc: 25.930443
+Epoch [426], train_loss: 0.059839, val_loss: 0.057991, val_acc: 25.972864
+Epoch [427], train_loss: 0.060033, val_loss: 0.058266, val_acc: 25.918861
+Epoch [428], train_loss: 0.059977, val_loss: 0.058013, val_acc: 26.008915
+Epoch [429], train_loss: 0.060006, val_loss: 0.058208, val_acc: 25.933764
+Epoch [430], train_loss: 0.059881, val_loss: 0.058190, val_acc: 25.927336
+Epoch [431], train_loss: 0.059910, val_loss: 0.057982, val_acc: 26.007673
+Epoch [432], train_loss: 0.059875, val_loss: 0.058177, val_acc: 25.973862
+Epoch [433], train_loss: 0.059791, val_loss: 0.058256, val_acc: 25.920185
+Epoch [434], train_loss: 0.059977, val_loss: 0.058061, val_acc: 25.995794
+Epoch [435], train_loss: 0.059844, val_loss: 0.057981, val_acc: 25.989773
+Epoch [436], train_loss: 0.059974, val_loss: 0.058214, val_acc: 25.946314
+Epoch [437], train_loss: 0.059950, val_loss: 0.058188, val_acc: 25.975012
+Epoch [438], train_loss: 0.059951, val_loss: 0.058184, val_acc: 25.970778
+Epoch [439], train_loss: 0.059919, val_loss: 0.058129, val_acc: 25.957840
+Epoch [440], train_loss: 0.059891, val_loss: 0.058006, val_acc: 25.914568
+Epoch [441], train_loss: 0.059983, val_loss: 0.058108, val_acc: 25.941221
+Epoch [442], train_loss: 0.059911, val_loss: 0.058117, val_acc: 25.869869
+Epoch [443], train_loss: 0.059910, val_loss: 0.057984, val_acc: 25.998249
+Epoch [444], train_loss: 0.060029, val_loss: 0.058142, val_acc: 25.974924
+Epoch [445], train_loss: 0.059837, val_loss: 0.058128, val_acc: 25.924515
+Epoch [446], train_loss: 0.059883, val_loss: 0.058272, val_acc: 25.936625
+Epoch [447], train_loss: 0.059774, val_loss: 0.057925, val_acc: 25.986137
+Epoch [448], train_loss: 0.059873, val_loss: 0.058084, val_acc: 25.938740
+Epoch [449], train_loss: 0.059901, val_loss: 0.058054, val_acc: 25.976643
+Epoch [450], train_loss: 0.059874, val_loss: 0.057981, val_acc: 25.974348
+Epoch [451], train_loss: 0.059859, val_loss: 0.058087, val_acc: 25.939938
+Epoch [452], train_loss: 0.059950, val_loss: 0.057933, val_acc: 26.007137
+Epoch [453], train_loss: 0.059902, val_loss: 0.058083, val_acc: 25.933821
+Epoch [454], train_loss: 0.059859, val_loss: 0.057957, val_acc: 25.967098
+Epoch [455], train_loss: 0.059884, val_loss: 0.058037, val_acc: 25.981483
+Epoch [456], train_loss: 0.059935, val_loss: 0.058170, val_acc: 25.964993
+Epoch [457], train_loss: 0.059905, val_loss: 0.057924, val_acc: 26.001562
+Epoch [458], train_loss: 0.059914, val_loss: 0.058014, val_acc: 25.968838
+Epoch [459], train_loss: 0.059858, val_loss: 0.058063, val_acc: 25.963037
+Epoch [460], train_loss: 0.059923, val_loss: 0.058083, val_acc: 25.967203
+Epoch [461], train_loss: 0.059851, val_loss: 0.058020, val_acc: 25.975498
+Epoch [462], train_loss: 0.059952, val_loss: 0.058056, val_acc: 25.980471
+Epoch [463], train_loss: 0.060002, val_loss: 0.057842, val_acc: 26.071817
+Epoch [464], train_loss: 0.059991, val_loss: 0.057922, val_acc: 26.032833
+Epoch [465], train_loss: 0.059749, val_loss: 0.058076, val_acc: 25.961687
+Epoch [466], train_loss: 0.059841, val_loss: 0.058064, val_acc: 25.954117
+Epoch [467], train_loss: 0.059861, val_loss: 0.058103, val_acc: 25.917839
+Epoch [468], train_loss: 0.059833, val_loss: 0.058125, val_acc: 25.925312
+Epoch [469], train_loss: 0.059796, val_loss: 0.058023, val_acc: 25.940989
+Epoch [470], train_loss: 0.059755, val_loss: 0.057992, val_acc: 25.988991
+Epoch [471], train_loss: 0.059860, val_loss: 0.058052, val_acc: 25.969732
+Epoch [472], train_loss: 0.059835, val_loss: 0.057989, val_acc: 25.964121
+Epoch [473], train_loss: 0.059893, val_loss: 0.058178, val_acc: 25.927900
+Epoch [474], train_loss: 0.059872, val_loss: 0.058171, val_acc: 25.927570
+Epoch [475], train_loss: 0.059647, val_loss: 0.057927, val_acc: 25.997189
+Epoch [476], train_loss: 0.059781, val_loss: 0.058110, val_acc: 25.948336
+Epoch [477], train_loss: 0.059833, val_loss: 0.058137, val_acc: 25.949900
+Epoch [478], train_loss: 0.059906, val_loss: 0.058196, val_acc: 25.926039
+Epoch [479], train_loss: 0.059690, val_loss: 0.057874, val_acc: 26.009199
+Epoch [480], train_loss: 0.059843, val_loss: 0.058050, val_acc: 25.954634
+Epoch [481], train_loss: 0.060049, val_loss: 0.058274, val_acc: 25.884235
+Epoch [482], train_loss: 0.059861, val_loss: 0.058103, val_acc: 25.966772
+Epoch [483], train_loss: 0.059767, val_loss: 0.057981, val_acc: 26.001377
+Epoch [484], train_loss: 0.059799, val_loss: 0.058069, val_acc: 25.937248
+Epoch [485], train_loss: 0.059742, val_loss: 0.058101, val_acc: 25.934471
+Epoch [486], train_loss: 0.059909, val_loss: 0.058105, val_acc: 25.976131
+Epoch [487], train_loss: 0.059825, val_loss: 0.058120, val_acc: 25.978569
+Epoch [488], train_loss: 0.059875, val_loss: 0.058073, val_acc: 25.962690
+Epoch [489], train_loss: 0.059891, val_loss: 0.058138, val_acc: 25.953409
+Epoch [490], train_loss: 0.059702, val_loss: 0.057988, val_acc: 25.993372
+Epoch [491], train_loss: 0.059786, val_loss: 0.057750, val_acc: 26.069557
+Epoch [492], train_loss: 0.059972, val_loss: 0.058131, val_acc: 25.953314
+Epoch [493], train_loss: 0.059831, val_loss: 0.058026, val_acc: 25.941370
+Epoch [494], train_loss: 0.059833, val_loss: 0.058071, val_acc: 25.934679
+Epoch [495], train_loss: 0.059787, val_loss: 0.057971, val_acc: 25.971458
+Epoch [496], train_loss: 0.059730, val_loss: 0.057899, val_acc: 26.054489
+Epoch [497], train_loss: 0.059871, val_loss: 0.058200, val_acc: 25.974079
+Epoch [498], train_loss: 0.059698, val_loss: 0.057952, val_acc: 26.004602
+Epoch [499], train_loss: 0.059744, val_loss: 0.058284, val_acc: 25.908760
+Epoch [500], train_loss: 0.059771, val_loss: 0.058027, val_acc: 25.951756
+Epoch [501], train_loss: 0.059721, val_loss: 0.058216, val_acc: 25.930252
+Epoch [502], train_loss: 0.059816, val_loss: 0.058337, val_acc: 25.919054
+Epoch [503], train_loss: 0.059803, val_loss: 0.058064, val_acc: 25.959946
+Epoch [504], train_loss: 0.059747, val_loss: 0.058304, val_acc: 25.949739
+Epoch [505], train_loss: 0.059762, val_loss: 0.057925, val_acc: 25.994648
+Epoch [506], train_loss: 0.059817, val_loss: 0.058064, val_acc: 25.993980
+Epoch [507], train_loss: 0.059771, val_loss: 0.058073, val_acc: 25.938238
+Epoch [508], train_loss: 0.059765, val_loss: 0.057997, val_acc: 25.999508
+Epoch [509], train_loss: 0.059786, val_loss: 0.058048, val_acc: 25.948431
+Epoch [510], train_loss: 0.059779, val_loss: 0.058028, val_acc: 25.942778
+Epoch [511], train_loss: 0.059799, val_loss: 0.058086, val_acc: 25.980536
+Epoch [512], train_loss: 0.059709, val_loss: 0.058038, val_acc: 25.952246
+Epoch [513], train_loss: 0.059845, val_loss: 0.058099, val_acc: 25.989019
+Epoch [514], train_loss: 0.059726, val_loss: 0.057811, val_acc: 26.070593
+Epoch [515], train_loss: 0.059789, val_loss: 0.057952, val_acc: 26.014719
+Epoch [516], train_loss: 0.059687, val_loss: 0.057953, val_acc: 26.013748
+Epoch [517], train_loss: 0.059641, val_loss: 0.058078, val_acc: 25.937000
+Epoch [518], train_loss: 0.059776, val_loss: 0.058101, val_acc: 25.941393
+Epoch [519], train_loss: 0.059752, val_loss: 0.057959, val_acc: 25.949202
+Epoch [520], train_loss: 0.059785, val_loss: 0.058045, val_acc: 25.949022
+Epoch [521], train_loss: 0.059716, val_loss: 0.058000, val_acc: 25.979734
+Epoch [522], train_loss: 0.059675, val_loss: 0.057828, val_acc: 26.014063
+Epoch [523], train_loss: 0.059730, val_loss: 0.057990, val_acc: 25.975925
+Epoch [524], train_loss: 0.059664, val_loss: 0.058023, val_acc: 25.960249
+Epoch [525], train_loss: 0.059728, val_loss: 0.058041, val_acc: 25.965281
+Epoch [526], train_loss: 0.059882, val_loss: 0.057983, val_acc: 25.991165
+Epoch [527], train_loss: 0.059735, val_loss: 0.057954, val_acc: 25.942783
+Epoch [528], train_loss: 0.059821, val_loss: 0.057969, val_acc: 25.968845
+Epoch [529], train_loss: 0.059715, val_loss: 0.057883, val_acc: 26.035345
+Epoch [530], train_loss: 0.059701, val_loss: 0.058043, val_acc: 25.904154
+Epoch [531], train_loss: 0.059614, val_loss: 0.058005, val_acc: 25.986912
+Epoch [532], train_loss: 0.059718, val_loss: 0.058100, val_acc: 25.965874
+Epoch [533], train_loss: 0.059648, val_loss: 0.058195, val_acc: 25.915716
+Epoch [534], train_loss: 0.059677, val_loss: 0.058022, val_acc: 25.982538
+Epoch [535], train_loss: 0.059774, val_loss: 0.058228, val_acc: 25.923231
+Epoch [536], train_loss: 0.059654, val_loss: 0.057980, val_acc: 25.958357
+Epoch [537], train_loss: 0.059731, val_loss: 0.057831, val_acc: 26.053873
+Epoch [538], train_loss: 0.059770, val_loss: 0.057941, val_acc: 25.987202
+Epoch [539], train_loss: 0.059640, val_loss: 0.058204, val_acc: 25.931923
+Epoch [540], train_loss: 0.059656, val_loss: 0.057933, val_acc: 26.037683
+Epoch [541], train_loss: 0.059786, val_loss: 0.057987, val_acc: 25.960411
+Epoch [542], train_loss: 0.059692, val_loss: 0.058127, val_acc: 25.976509
+Epoch [543], train_loss: 0.059612, val_loss: 0.058090, val_acc: 25.977089
+Epoch [544], train_loss: 0.059835, val_loss: 0.058021, val_acc: 26.001291
+Epoch [545], train_loss: 0.059863, val_loss: 0.057825, val_acc: 26.030481
+Epoch [546], train_loss: 0.059710, val_loss: 0.057999, val_acc: 25.975183
+Epoch [547], train_loss: 0.059613, val_loss: 0.058133, val_acc: 25.965147
+Epoch [548], train_loss: 0.059780, val_loss: 0.058026, val_acc: 26.036005
+Epoch [549], train_loss: 0.059600, val_loss: 0.058145, val_acc: 25.965681
+Epoch [550], train_loss: 0.059703, val_loss: 0.058181, val_acc: 25.942122
+Epoch [551], train_loss: 0.059723, val_loss: 0.058164, val_acc: 25.972160
+Epoch [552], train_loss: 0.059779, val_loss: 0.057956, val_acc: 26.020945
+Epoch [553], train_loss: 0.059831, val_loss: 0.057948, val_acc: 26.019497
+Epoch [554], train_loss: 0.059638, val_loss: 0.058115, val_acc: 25.955601
+Epoch [555], train_loss: 0.059707, val_loss: 0.057864, val_acc: 25.995829
+Epoch [556], train_loss: 0.059559, val_loss: 0.057901, val_acc: 26.003029
+Epoch [557], train_loss: 0.059625, val_loss: 0.058008, val_acc: 25.988531
+Epoch [558], train_loss: 0.059796, val_loss: 0.058127, val_acc: 25.960363
+Epoch [559], train_loss: 0.059648, val_loss: 0.057983, val_acc: 25.970814
+Epoch [560], train_loss: 0.059588, val_loss: 0.058061, val_acc: 25.951229
+Epoch [561], train_loss: 0.059769, val_loss: 0.057920, val_acc: 25.997587
+Epoch [562], train_loss: 0.059607, val_loss: 0.057898, val_acc: 26.013063
+Epoch [563], train_loss: 0.059708, val_loss: 0.058002, val_acc: 26.010176
+Epoch [564], train_loss: 0.059812, val_loss: 0.058318, val_acc: 25.925861
+Epoch [565], train_loss: 0.059950, val_loss: 0.057849, val_acc: 26.057327
+Epoch [566], train_loss: 0.059779, val_loss: 0.058165, val_acc: 25.950920
+Epoch [567], train_loss: 0.059664, val_loss: 0.057937, val_acc: 25.990297
+Epoch [568], train_loss: 0.059645, val_loss: 0.058005, val_acc: 26.008120
+Epoch [569], train_loss: 0.059693, val_loss: 0.058067, val_acc: 25.977583
+Epoch [570], train_loss: 0.059631, val_loss: 0.058118, val_acc: 25.948662
+Epoch [571], train_loss: 0.059723, val_loss: 0.058188, val_acc: 25.935745
+Epoch [572], train_loss: 0.059549, val_loss: 0.057920, val_acc: 26.025908
+Epoch [573], train_loss: 0.059695, val_loss: 0.058037, val_acc: 25.998301
+Epoch [574], train_loss: 0.059643, val_loss: 0.058037, val_acc: 25.953337
+Epoch [575], train_loss: 0.059583, val_loss: 0.057863, val_acc: 25.963699
+Epoch [576], train_loss: 0.059776, val_loss: 0.057996, val_acc: 25.976812
+Epoch [577], train_loss: 0.059673, val_loss: 0.057857, val_acc: 25.970045
+Epoch [578], train_loss: 0.059558, val_loss: 0.058083, val_acc: 25.969379
+Epoch [579], train_loss: 0.059601, val_loss: 0.058022, val_acc: 25.976404
+Epoch [580], train_loss: 0.059540, val_loss: 0.057927, val_acc: 25.971949
+Epoch [581], train_loss: 0.059667, val_loss: 0.058068, val_acc: 25.968391
+Epoch [582], train_loss: 0.059644, val_loss: 0.058141, val_acc: 25.943680
+Epoch [583], train_loss: 0.059706, val_loss: 0.057947, val_acc: 25.965906
+Epoch [584], train_loss: 0.059649, val_loss: 0.057816, val_acc: 26.072136
+Epoch [585], train_loss: 0.059660, val_loss: 0.058030, val_acc: 25.976408
+Epoch [586], train_loss: 0.059688, val_loss: 0.057958, val_acc: 25.995995
+Epoch [587], train_loss: 0.059637, val_loss: 0.057951, val_acc: 25.975407
+Epoch [588], train_loss: 0.059695, val_loss: 0.058171, val_acc: 25.982071
+Epoch [589], train_loss: 0.059689, val_loss: 0.057946, val_acc: 25.991413
+Epoch [590], train_loss: 0.059662, val_loss: 0.057930, val_acc: 26.001869
+Epoch [591], train_loss: 0.059598, val_loss: 0.057978, val_acc: 25.945267
+Epoch [592], train_loss: 0.059617, val_loss: 0.058219, val_acc: 25.931019
+Epoch [593], train_loss: 0.059608, val_loss: 0.057983, val_acc: 25.976154
+Epoch [594], train_loss: 0.059551, val_loss: 0.057985, val_acc: 25.998610
+Epoch [595], train_loss: 0.059510, val_loss: 0.057851, val_acc: 26.016840
+Epoch [596], train_loss: 0.059680, val_loss: 0.057903, val_acc: 25.991407
+Epoch [597], train_loss: 0.059648, val_loss: 0.057763, val_acc: 26.062788
+Epoch [598], train_loss: 0.059628, val_loss: 0.057883, val_acc: 26.049292
+Epoch [599], train_loss: 0.059573, val_loss: 0.057897, val_acc: 26.028984
+Epoch [600], train_loss: 0.059607, val_loss: 0.058073, val_acc: 25.962427
+Epoch [601], train_loss: 0.059527, val_loss: 0.058108, val_acc: 25.973707
+Epoch [602], train_loss: 0.059531, val_loss: 0.058044, val_acc: 25.960798
+Epoch [603], train_loss: 0.059537, val_loss: 0.058021, val_acc: 25.982517
+Epoch [604], train_loss: 0.059728, val_loss: 0.057865, val_acc: 26.050163
+Epoch [605], train_loss: 0.059685, val_loss: 0.058144, val_acc: 25.924437
+Epoch [606], train_loss: 0.059745, val_loss: 0.058004, val_acc: 26.003433
+Epoch [607], train_loss: 0.059556, val_loss: 0.058218, val_acc: 25.918032
+Epoch [608], train_loss: 0.059532, val_loss: 0.058039, val_acc: 25.978312
+Epoch [609], train_loss: 0.059573, val_loss: 0.057910, val_acc: 26.016542
+Epoch [610], train_loss: 0.059631, val_loss: 0.058074, val_acc: 25.952366
+Epoch [611], train_loss: 0.059489, val_loss: 0.057819, val_acc: 26.029076
+Epoch [612], train_loss: 0.059633, val_loss: 0.057951, val_acc: 25.997898
+Epoch [613], train_loss: 0.059622, val_loss: 0.058170, val_acc: 25.958780
+Epoch [614], train_loss: 0.059579, val_loss: 0.057954, val_acc: 26.020411
+Epoch [615], train_loss: 0.059596, val_loss: 0.058138, val_acc: 25.961655
+Epoch [616], train_loss: 0.059554, val_loss: 0.057986, val_acc: 25.999649
+Epoch [617], train_loss: 0.059680, val_loss: 0.058029, val_acc: 25.975641
+Epoch [618], train_loss: 0.059601, val_loss: 0.058353, val_acc: 25.899195
+Epoch [619], train_loss: 0.059628, val_loss: 0.058106, val_acc: 25.953342
+Epoch [620], train_loss: 0.059654, val_loss: 0.058236, val_acc: 25.943623
+Epoch [621], train_loss: 0.059610, val_loss: 0.057868, val_acc: 26.002840
+Epoch [622], train_loss: 0.059700, val_loss: 0.057982, val_acc: 25.971249
+Epoch [623], train_loss: 0.059585, val_loss: 0.058101, val_acc: 25.990566
+Epoch [624], train_loss: 0.059644, val_loss: 0.058011, val_acc: 26.022026
+Epoch [625], train_loss: 0.059731, val_loss: 0.058050, val_acc: 25.972757
+Epoch [626], train_loss: 0.059489, val_loss: 0.058077, val_acc: 25.978254
+Epoch [627], train_loss: 0.059646, val_loss: 0.058046, val_acc: 25.978569
+Epoch [628], train_loss: 0.059612, val_loss: 0.058170, val_acc: 25.972837
+Epoch [629], train_loss: 0.059606, val_loss: 0.058051, val_acc: 26.001255
+Epoch [630], train_loss: 0.059560, val_loss: 0.058070, val_acc: 25.995569
+Epoch [631], train_loss: 0.059485, val_loss: 0.058018, val_acc: 26.005447
+Epoch [632], train_loss: 0.059531, val_loss: 0.058042, val_acc: 25.949028
+Epoch [633], train_loss: 0.059527, val_loss: 0.058175, val_acc: 25.937843
+Epoch [634], train_loss: 0.059515, val_loss: 0.057902, val_acc: 26.029547
+Epoch [635], train_loss: 0.059624, val_loss: 0.057892, val_acc: 26.012316
+Epoch [636], train_loss: 0.059519, val_loss: 0.057988, val_acc: 26.017757
+Epoch [637], train_loss: 0.059560, val_loss: 0.058200, val_acc: 25.945074
+Epoch [638], train_loss: 0.059606, val_loss: 0.057846, val_acc: 26.049631
+Epoch [639], train_loss: 0.059612, val_loss: 0.057947, val_acc: 26.009642
+Epoch [640], train_loss: 0.059556, val_loss: 0.058074, val_acc: 25.973356
+Epoch [641], train_loss: 0.059500, val_loss: 0.057981, val_acc: 25.991474
+Epoch [642], train_loss: 0.059489, val_loss: 0.057952, val_acc: 25.965508
+Epoch [643], train_loss: 0.059656, val_loss: 0.057941, val_acc: 25.975582
+Epoch [644], train_loss: 0.059522, val_loss: 0.058031, val_acc: 25.995045
+Epoch [645], train_loss: 0.059598, val_loss: 0.058076, val_acc: 25.951283
+Epoch [646], train_loss: 0.059545, val_loss: 0.058025, val_acc: 25.961992
+Epoch [647], train_loss: 0.059570, val_loss: 0.058087, val_acc: 25.993526
+Epoch [648], train_loss: 0.059578, val_loss: 0.057981, val_acc: 25.994495
+Epoch [649], train_loss: 0.059552, val_loss: 0.057873, val_acc: 26.019405
+Epoch [650], train_loss: 0.059474, val_loss: 0.058043, val_acc: 25.965036
+Epoch [651], train_loss: 0.059702, val_loss: 0.058155, val_acc: 25.936041
+Epoch [652], train_loss: 0.059448, val_loss: 0.058165, val_acc: 25.970621
+Epoch [653], train_loss: 0.059633, val_loss: 0.057904, val_acc: 26.013657
+Epoch [654], train_loss: 0.059620, val_loss: 0.058180, val_acc: 25.957493
+Epoch [655], train_loss: 0.059491, val_loss: 0.057940, val_acc: 26.019234
+Epoch [656], train_loss: 0.059489, val_loss: 0.058014, val_acc: 26.009029
+Epoch [657], train_loss: 0.059487, val_loss: 0.058071, val_acc: 25.964544
+Epoch [658], train_loss: 0.059540, val_loss: 0.057995, val_acc: 26.022871
+Epoch [659], train_loss: 0.059469, val_loss: 0.057873, val_acc: 25.997267
+Epoch [660], train_loss: 0.059584, val_loss: 0.057900, val_acc: 26.010725
+Epoch [661], train_loss: 0.059511, val_loss: 0.057979, val_acc: 26.005556
+Epoch [662], train_loss: 0.059510, val_loss: 0.057991, val_acc: 25.993996
+Epoch [663], train_loss: 0.059683, val_loss: 0.058040, val_acc: 25.969915
+Epoch [664], train_loss: 0.059520, val_loss: 0.058067, val_acc: 25.967505
+Epoch [665], train_loss: 0.059585, val_loss: 0.058013, val_acc: 25.983973
+Epoch [666], train_loss: 0.059527, val_loss: 0.057830, val_acc: 26.049608
+Epoch [667], train_loss: 0.059528, val_loss: 0.058116, val_acc: 25.962406
+Epoch [668], train_loss: 0.059578, val_loss: 0.058178, val_acc: 25.959120
+Epoch [669], train_loss: 0.059590, val_loss: 0.058121, val_acc: 25.957544
+Epoch [670], train_loss: 0.059434, val_loss: 0.057947, val_acc: 25.984470
+Epoch [671], train_loss: 0.059484, val_loss: 0.057953, val_acc: 26.002064
+Epoch [672], train_loss: 0.059529, val_loss: 0.057940, val_acc: 26.027824
+Epoch [673], train_loss: 0.059441, val_loss: 0.058282, val_acc: 25.942759
+Epoch [674], train_loss: 0.059505, val_loss: 0.058115, val_acc: 25.934965
+Epoch [675], train_loss: 0.059585, val_loss: 0.058225, val_acc: 25.967033
+Epoch [676], train_loss: 0.059550, val_loss: 0.057952, val_acc: 25.975113
+Epoch [677], train_loss: 0.059506, val_loss: 0.057933, val_acc: 25.998569
+Epoch [678], train_loss: 0.059539, val_loss: 0.057851, val_acc: 26.053377
+Epoch [679], train_loss: 0.059567, val_loss: 0.057973, val_acc: 25.994606
+Epoch [680], train_loss: 0.059388, val_loss: 0.057951, val_acc: 25.984541
+Epoch [681], train_loss: 0.059495, val_loss: 0.057933, val_acc: 26.014717
+Epoch [682], train_loss: 0.059559, val_loss: 0.057996, val_acc: 25.988583
+Epoch [683], train_loss: 0.059493, val_loss: 0.058037, val_acc: 25.977297
+Epoch [684], train_loss: 0.059524, val_loss: 0.057793, val_acc: 26.046284
+Epoch [685], train_loss: 0.059455, val_loss: 0.057875, val_acc: 26.015165
+Epoch [686], train_loss: 0.059462, val_loss: 0.058257, val_acc: 25.960041
+Epoch [687], train_loss: 0.059372, val_loss: 0.058009, val_acc: 25.987970
+Epoch [688], train_loss: 0.059503, val_loss: 0.058122, val_acc: 25.981880
+Epoch [689], train_loss: 0.059519, val_loss: 0.058043, val_acc: 25.982344
+Epoch [690], train_loss: 0.059576, val_loss: 0.057960, val_acc: 26.027617
+Epoch [691], train_loss: 0.059497, val_loss: 0.058157, val_acc: 26.008146
+Epoch [692], train_loss: 0.059538, val_loss: 0.058135, val_acc: 25.964718
+Epoch [693], train_loss: 0.059367, val_loss: 0.058030, val_acc: 25.979250
+Epoch [694], train_loss: 0.059505, val_loss: 0.057874, val_acc: 26.056965
+Epoch [695], train_loss: 0.059417, val_loss: 0.057982, val_acc: 25.952694
+Epoch [696], train_loss: 0.059521, val_loss: 0.057722, val_acc: 26.059404
+Epoch [697], train_loss: 0.059456, val_loss: 0.058033, val_acc: 25.978573
+Epoch [698], train_loss: 0.059433, val_loss: 0.057833, val_acc: 26.042421
+Epoch [699], train_loss: 0.059458, val_loss: 0.058011, val_acc: 25.973423
+Epoch [700], train_loss: 0.059540, val_loss: 0.057941, val_acc: 25.992741
+Epoch [701], train_loss: 0.059400, val_loss: 0.057884, val_acc: 26.003433
+Epoch [702], train_loss: 0.059416, val_loss: 0.058010, val_acc: 25.982866
+Epoch [703], train_loss: 0.059461, val_loss: 0.058012, val_acc: 25.949656
+Epoch [704], train_loss: 0.059501, val_loss: 0.057984, val_acc: 26.030964
+Epoch [705], train_loss: 0.059446, val_loss: 0.057816, val_acc: 26.013208
+Epoch [706], train_loss: 0.059404, val_loss: 0.057915, val_acc: 26.014841
+Epoch [707], train_loss: 0.059394, val_loss: 0.058077, val_acc: 25.961887
+Epoch [708], train_loss: 0.059447, val_loss: 0.057906, val_acc: 26.029518
+Epoch [709], train_loss: 0.059583, val_loss: 0.058029, val_acc: 25.986307
+Epoch [710], train_loss: 0.059537, val_loss: 0.058140, val_acc: 25.920843
+Epoch [711], train_loss: 0.059443, val_loss: 0.057914, val_acc: 25.984766
+Epoch [712], train_loss: 0.059471, val_loss: 0.057947, val_acc: 26.022511
+Epoch [713], train_loss: 0.059528, val_loss: 0.058021, val_acc: 25.954966
+Epoch [714], train_loss: 0.059417, val_loss: 0.057970, val_acc: 26.022612
+Epoch [715], train_loss: 0.059522, val_loss: 0.057778, val_acc: 26.068937
+Epoch [716], train_loss: 0.059539, val_loss: 0.058078, val_acc: 25.998110
+Epoch [717], train_loss: 0.059456, val_loss: 0.058073, val_acc: 25.958067
+Epoch [718], train_loss: 0.059513, val_loss: 0.057822, val_acc: 26.025448
+Epoch [719], train_loss: 0.059384, val_loss: 0.057804, val_acc: 26.033445
+Epoch [720], train_loss: 0.059379, val_loss: 0.057928, val_acc: 26.019506
+Epoch [721], train_loss: 0.059518, val_loss: 0.057986, val_acc: 25.999266
+Epoch [722], train_loss: 0.059432, val_loss: 0.058151, val_acc: 25.956507
+Epoch [723], train_loss: 0.059456, val_loss: 0.058005, val_acc: 26.006626
+Epoch [724], train_loss: 0.059541, val_loss: 0.058104, val_acc: 25.975197
+Epoch [725], train_loss: 0.059488, val_loss: 0.058116, val_acc: 25.964649
+Epoch [726], train_loss: 0.059441, val_loss: 0.057849, val_acc: 25.996168
+Epoch [727], train_loss: 0.059385, val_loss: 0.058011, val_acc: 26.009007
+Epoch [728], train_loss: 0.059440, val_loss: 0.058001, val_acc: 25.980183
+Epoch [729], train_loss: 0.059416, val_loss: 0.057788, val_acc: 26.066319
+Epoch [730], train_loss: 0.059461, val_loss: 0.058063, val_acc: 25.971949
+Epoch [731], train_loss: 0.059545, val_loss: 0.058206, val_acc: 25.962788
+Epoch [732], train_loss: 0.059415, val_loss: 0.058185, val_acc: 25.966398
+Epoch [733], train_loss: 0.059377, val_loss: 0.057900, val_acc: 26.012335
+Epoch [734], train_loss: 0.059476, val_loss: 0.058073, val_acc: 25.976460
+Epoch [735], train_loss: 0.059463, val_loss: 0.058090, val_acc: 25.998249
+Epoch [736], train_loss: 0.059404, val_loss: 0.057908, val_acc: 25.974028
+Epoch [737], train_loss: 0.059468, val_loss: 0.058152, val_acc: 25.953484
+Epoch [738], train_loss: 0.059639, val_loss: 0.057837, val_acc: 26.030588
+Epoch [739], train_loss: 0.059446, val_loss: 0.057929, val_acc: 26.004210
+Epoch [740], train_loss: 0.059362, val_loss: 0.058048, val_acc: 26.020985
+Epoch [741], train_loss: 0.059355, val_loss: 0.057956, val_acc: 25.991596
+Epoch [742], train_loss: 0.059269, val_loss: 0.058010, val_acc: 25.986944
+Epoch [743], train_loss: 0.059315, val_loss: 0.057893, val_acc: 26.031340
+Epoch [744], train_loss: 0.059467, val_loss: 0.057831, val_acc: 26.054276
+Epoch [745], train_loss: 0.059398, val_loss: 0.057873, val_acc: 26.023903
+Epoch [746], train_loss: 0.059445, val_loss: 0.058027, val_acc: 25.978712
+Epoch [747], train_loss: 0.059294, val_loss: 0.057856, val_acc: 26.009672
+Epoch [748], train_loss: 0.059498, val_loss: 0.058264, val_acc: 25.919395
+Epoch [749], train_loss: 0.059404, val_loss: 0.057816, val_acc: 25.993061
+Epoch [750], train_loss: 0.059427, val_loss: 0.058217, val_acc: 25.958675
+Epoch [751], train_loss: 0.059440, val_loss: 0.057884, val_acc: 25.990086
+Epoch [752], train_loss: 0.059521, val_loss: 0.058016, val_acc: 25.970385
+Epoch [753], train_loss: 0.059407, val_loss: 0.058132, val_acc: 25.974150
+Epoch [754], train_loss: 0.059483, val_loss: 0.058012, val_acc: 26.000706
+Epoch [755], train_loss: 0.059436, val_loss: 0.058098, val_acc: 25.975132
+Epoch [756], train_loss: 0.059494, val_loss: 0.058028, val_acc: 25.971699
+Epoch [757], train_loss: 0.059406, val_loss: 0.058328, val_acc: 25.914412
+Epoch [758], train_loss: 0.059394, val_loss: 0.057971, val_acc: 25.999897
+Epoch [759], train_loss: 0.059414, val_loss: 0.057914, val_acc: 26.051441
+Epoch [760], train_loss: 0.059379, val_loss: 0.058004, val_acc: 25.971800
+Epoch [761], train_loss: 0.059393, val_loss: 0.057974, val_acc: 26.024096
+Epoch [762], train_loss: 0.059464, val_loss: 0.058051, val_acc: 25.996292
+Epoch [763], train_loss: 0.059444, val_loss: 0.057917, val_acc: 26.014387
+Epoch [764], train_loss: 0.059429, val_loss: 0.058016, val_acc: 25.998398
+Epoch [765], train_loss: 0.059410, val_loss: 0.058037, val_acc: 25.988390
+Epoch [766], train_loss: 0.059364, val_loss: 0.058003, val_acc: 26.011162
+Epoch [767], train_loss: 0.059426, val_loss: 0.057946, val_acc: 26.012087
+Epoch [768], train_loss: 0.059291, val_loss: 0.058038, val_acc: 25.948004
+Epoch [769], train_loss: 0.059380, val_loss: 0.057935, val_acc: 25.981352
+Epoch [770], train_loss: 0.059351, val_loss: 0.057955, val_acc: 26.024061
+Epoch [771], train_loss: 0.059486, val_loss: 0.057962, val_acc: 26.016537
+Epoch [772], train_loss: 0.059440, val_loss: 0.057876, val_acc: 26.026081
+Epoch [773], train_loss: 0.059512, val_loss: 0.058074, val_acc: 25.980967
+Epoch [774], train_loss: 0.059297, val_loss: 0.057923, val_acc: 25.991041
+Epoch [775], train_loss: 0.059416, val_loss: 0.057851, val_acc: 26.058643
+Epoch [776], train_loss: 0.059442, val_loss: 0.057757, val_acc: 26.064154
+Epoch [777], train_loss: 0.059332, val_loss: 0.058063, val_acc: 25.979292
+Epoch [778], train_loss: 0.059408, val_loss: 0.057921, val_acc: 26.029995
+Epoch [779], train_loss: 0.059384, val_loss: 0.057860, val_acc: 26.043531
+Epoch [780], train_loss: 0.059381, val_loss: 0.057814, val_acc: 26.062981
+Epoch [781], train_loss: 0.059346, val_loss: 0.057913, val_acc: 25.992342
+Epoch [782], train_loss: 0.059337, val_loss: 0.057965, val_acc: 26.008720
+Epoch [783], train_loss: 0.059496, val_loss: 0.058194, val_acc: 25.926365
+Epoch [784], train_loss: 0.059466, val_loss: 0.057872, val_acc: 26.026039
+Epoch [785], train_loss: 0.059379, val_loss: 0.057995, val_acc: 25.995319
+Epoch [786], train_loss: 0.059383, val_loss: 0.057849, val_acc: 26.018595
+Epoch [787], train_loss: 0.059424, val_loss: 0.058016, val_acc: 25.947664
+Epoch [788], train_loss: 0.059584, val_loss: 0.058103, val_acc: 25.961296
+Epoch [789], train_loss: 0.059433, val_loss: 0.057970, val_acc: 26.009813
+Epoch [790], train_loss: 0.059361, val_loss: 0.057959, val_acc: 26.018137
+Epoch [791], train_loss: 0.059454, val_loss: 0.057803, val_acc: 26.009819
+Epoch [792], train_loss: 0.059308, val_loss: 0.058010, val_acc: 26.010870
+Epoch [793], train_loss: 0.059281, val_loss: 0.058036, val_acc: 25.970911
+Epoch [794], train_loss: 0.059384, val_loss: 0.057949, val_acc: 25.995481
+Epoch [795], train_loss: 0.059404, val_loss: 0.057892, val_acc: 26.021320
+Epoch [796], train_loss: 0.059417, val_loss: 0.057892, val_acc: 26.001726
+Epoch [797], train_loss: 0.059523, val_loss: 0.057892, val_acc: 25.991920
+Epoch [798], train_loss: 0.059318, val_loss: 0.057890, val_acc: 26.022039
+Epoch [799], train_loss: 0.059389, val_loss: 0.057934, val_acc: 25.938078
+Epoch [800], train_loss: 0.059384, val_loss: 0.057872, val_acc: 26.030125
+Epoch [801], train_loss: 0.059330, val_loss: 0.057761, val_acc: 26.062941
+Epoch [802], train_loss: 0.059419, val_loss: 0.057891, val_acc: 26.016054
+Epoch [803], train_loss: 0.059402, val_loss: 0.058074, val_acc: 25.997862
+Epoch [804], train_loss: 0.059309, val_loss: 0.057932, val_acc: 25.979338
+Epoch [805], train_loss: 0.059350, val_loss: 0.057856, val_acc: 25.998674
+Epoch [806], train_loss: 0.059431, val_loss: 0.058007, val_acc: 26.004593
+Epoch [807], train_loss: 0.059366, val_loss: 0.057934, val_acc: 26.011593
+Epoch [808], train_loss: 0.059489, val_loss: 0.058170, val_acc: 25.948341
+Epoch [809], train_loss: 0.059410, val_loss: 0.057972, val_acc: 26.016911
+Epoch [810], train_loss: 0.059309, val_loss: 0.057880, val_acc: 26.040188
+Epoch [811], train_loss: 0.059544, val_loss: 0.058502, val_acc: 25.957085
+Epoch [812], train_loss: 0.059338, val_loss: 0.057930, val_acc: 26.010941
+Epoch [813], train_loss: 0.059409, val_loss: 0.057959, val_acc: 26.026363
+Epoch [814], train_loss: 0.059387, val_loss: 0.058001, val_acc: 25.995693
+Epoch [815], train_loss: 0.059383, val_loss: 0.058121, val_acc: 25.960451
+Epoch [816], train_loss: 0.059454, val_loss: 0.057879, val_acc: 26.035244
+Epoch [817], train_loss: 0.059354, val_loss: 0.057831, val_acc: 26.020279
+Epoch [818], train_loss: 0.059448, val_loss: 0.057910, val_acc: 26.013288
+Epoch [819], train_loss: 0.059325, val_loss: 0.057820, val_acc: 25.995499
+Epoch [820], train_loss: 0.059303, val_loss: 0.058044, val_acc: 25.977365
+Epoch [821], train_loss: 0.059391, val_loss: 0.057987, val_acc: 25.993357
+Epoch [822], train_loss: 0.059344, val_loss: 0.057922, val_acc: 26.038380
+Epoch [823], train_loss: 0.059420, val_loss: 0.057850, val_acc: 26.008867
+Epoch [824], train_loss: 0.059310, val_loss: 0.057780, val_acc: 26.044966
+Epoch [825], train_loss: 0.059283, val_loss: 0.058014, val_acc: 25.984934
+Epoch [826], train_loss: 0.059333, val_loss: 0.057903, val_acc: 26.024179
+Epoch [827], train_loss: 0.059356, val_loss: 0.057947, val_acc: 25.994667
+Epoch [828], train_loss: 0.059436, val_loss: 0.058111, val_acc: 25.985254
+Epoch [829], train_loss: 0.059332, val_loss: 0.057911, val_acc: 25.988358
+Epoch [830], train_loss: 0.059294, val_loss: 0.058015, val_acc: 25.990650
+Epoch [831], train_loss: 0.059246, val_loss: 0.058072, val_acc: 25.962776
+Epoch [832], train_loss: 0.059424, val_loss: 0.058131, val_acc: 25.944107
+Epoch [833], train_loss: 0.059345, val_loss: 0.057862, val_acc: 26.041924
+Epoch [834], train_loss: 0.059376, val_loss: 0.058031, val_acc: 25.994759
+Epoch [835], train_loss: 0.059402, val_loss: 0.057733, val_acc: 26.054600
+Epoch [836], train_loss: 0.059352, val_loss: 0.057910, val_acc: 26.004541
+Epoch [837], train_loss: 0.059363, val_loss: 0.058013, val_acc: 25.974583
+Epoch [838], train_loss: 0.059371, val_loss: 0.057967, val_acc: 25.995836
+Epoch [839], train_loss: 0.059351, val_loss: 0.057907, val_acc: 26.013847
+Epoch [840], train_loss: 0.059383, val_loss: 0.057904, val_acc: 26.022642
+Epoch [841], train_loss: 0.059309, val_loss: 0.057861, val_acc: 26.021107
+Epoch [842], train_loss: 0.059434, val_loss: 0.057833, val_acc: 26.019724
+Epoch [843], train_loss: 0.059249, val_loss: 0.057907, val_acc: 26.019129
+Epoch [844], train_loss: 0.059341, val_loss: 0.057989, val_acc: 25.984781
+Epoch [845], train_loss: 0.059224, val_loss: 0.057987, val_acc: 25.982597
+Epoch [846], train_loss: 0.059340, val_loss: 0.057794, val_acc: 26.060871
+Epoch [847], train_loss: 0.059317, val_loss: 0.058308, val_acc: 25.902796
+Epoch [848], train_loss: 0.059416, val_loss: 0.057896, val_acc: 25.976610
+Epoch [849], train_loss: 0.059314, val_loss: 0.057908, val_acc: 26.006260
+Epoch [850], train_loss: 0.059306, val_loss: 0.057736, val_acc: 26.084511
+Epoch [851], train_loss: 0.059351, val_loss: 0.057696, val_acc: 26.087400
+Epoch [852], train_loss: 0.059336, val_loss: 0.057994, val_acc: 25.963860
+Epoch [853], train_loss: 0.059278, val_loss: 0.058155, val_acc: 25.962126
+Epoch [854], train_loss: 0.059244, val_loss: 0.057889, val_acc: 26.000147
+Epoch [855], train_loss: 0.059284, val_loss: 0.057826, val_acc: 26.023586
+Epoch [856], train_loss: 0.059307, val_loss: 0.057795, val_acc: 26.008329
+Epoch [857], train_loss: 0.059388, val_loss: 0.057956, val_acc: 26.007412
+Epoch [858], train_loss: 0.059399, val_loss: 0.058041, val_acc: 25.990740
+Epoch [859], train_loss: 0.059417, val_loss: 0.057752, val_acc: 26.072489
+Epoch [860], train_loss: 0.059236, val_loss: 0.057992, val_acc: 26.000578
+Epoch [861], train_loss: 0.059362, val_loss: 0.057949, val_acc: 26.015581
+Epoch [862], train_loss: 0.059312, val_loss: 0.057788, val_acc: 26.029333
+Epoch [863], train_loss: 0.059419, val_loss: 0.057737, val_acc: 26.079412
+Epoch [864], train_loss: 0.059278, val_loss: 0.057945, val_acc: 25.993555
+Epoch [865], train_loss: 0.059386, val_loss: 0.057774, val_acc: 26.035669
+Epoch [866], train_loss: 0.059375, val_loss: 0.057925, val_acc: 26.000725
+Epoch [867], train_loss: 0.059241, val_loss: 0.057909, val_acc: 25.978277
+Epoch [868], train_loss: 0.059313, val_loss: 0.057826, val_acc: 26.048933
+Epoch [869], train_loss: 0.059274, val_loss: 0.057799, val_acc: 26.043705
+Epoch [870], train_loss: 0.059281, val_loss: 0.057845, val_acc: 26.053556
+Epoch [871], train_loss: 0.059264, val_loss: 0.058037, val_acc: 25.993118
+Epoch [872], train_loss: 0.059281, val_loss: 0.057831, val_acc: 26.013260
+Epoch [873], train_loss: 0.059214, val_loss: 0.057869, val_acc: 26.015987
+Epoch [874], train_loss: 0.059269, val_loss: 0.058016, val_acc: 25.986301
+Epoch [875], train_loss: 0.059325, val_loss: 0.058192, val_acc: 25.972277
+Epoch [876], train_loss: 0.059272, val_loss: 0.057905, val_acc: 26.038658
+Epoch [877], train_loss: 0.059315, val_loss: 0.058064, val_acc: 25.964546
+Epoch [878], train_loss: 0.059294, val_loss: 0.057760, val_acc: 26.022018
+Epoch [879], train_loss: 0.059340, val_loss: 0.057977, val_acc: 26.020140
+Epoch [880], train_loss: 0.059299, val_loss: 0.057892, val_acc: 26.058378
+Epoch [881], train_loss: 0.059208, val_loss: 0.057955, val_acc: 26.002691
+Epoch [882], train_loss: 0.059319, val_loss: 0.058008, val_acc: 25.966969
+Epoch [883], train_loss: 0.059160, val_loss: 0.057910, val_acc: 26.046370
+Epoch [884], train_loss: 0.059276, val_loss: 0.057950, val_acc: 25.997974
+Epoch [885], train_loss: 0.059332, val_loss: 0.058111, val_acc: 25.980967
+Epoch [886], train_loss: 0.059206, val_loss: 0.058223, val_acc: 25.977449
+Epoch [887], train_loss: 0.059310, val_loss: 0.057892, val_acc: 26.014082
+Epoch [888], train_loss: 0.059304, val_loss: 0.057985, val_acc: 26.027138
+Epoch [889], train_loss: 0.059236, val_loss: 0.057947, val_acc: 26.004339
+Epoch [890], train_loss: 0.059244, val_loss: 0.057995, val_acc: 26.019222
+Epoch [891], train_loss: 0.059337, val_loss: 0.057995, val_acc: 26.005461
+Epoch [892], train_loss: 0.059323, val_loss: 0.057803, val_acc: 26.044920
+Epoch [893], train_loss: 0.059179, val_loss: 0.057849, val_acc: 26.035402
+Epoch [894], train_loss: 0.059253, val_loss: 0.057930, val_acc: 26.034931
+Epoch [895], train_loss: 0.059404, val_loss: 0.057828, val_acc: 26.025770
+Epoch [896], train_loss: 0.059291, val_loss: 0.057752, val_acc: 26.052006
+Epoch [897], train_loss: 0.059372, val_loss: 0.057827, val_acc: 26.084715
+Epoch [898], train_loss: 0.059155, val_loss: 0.058135, val_acc: 25.980736
+Epoch [899], train_loss: 0.059336, val_loss: 0.057835, val_acc: 26.015730
+Epoch [900], train_loss: 0.059275, val_loss: 0.058001, val_acc: 26.007483
+Epoch [901], train_loss: 0.059249, val_loss: 0.057880, val_acc: 26.004292
+Epoch [902], train_loss: 0.059328, val_loss: 0.057944, val_acc: 26.016270
+Epoch [903], train_loss: 0.059293, val_loss: 0.058154, val_acc: 25.969233
+Epoch [904], train_loss: 0.059312, val_loss: 0.057799, val_acc: 26.054785
+Epoch [905], train_loss: 0.059444, val_loss: 0.057995, val_acc: 26.009256
+Epoch [906], train_loss: 0.059347, val_loss: 0.057753, val_acc: 26.053728
+Epoch [907], train_loss: 0.059295, val_loss: 0.058019, val_acc: 25.988413
+Epoch [908], train_loss: 0.059398, val_loss: 0.057938, val_acc: 26.000147
+Epoch [909], train_loss: 0.059300, val_loss: 0.058031, val_acc: 25.982521
+Epoch [910], train_loss: 0.059323, val_loss: 0.057764, val_acc: 26.042505
+Epoch [911], train_loss: 0.059282, val_loss: 0.057992, val_acc: 25.981413
+Epoch [912], train_loss: 0.059307, val_loss: 0.057794, val_acc: 26.013067
+Epoch [913], train_loss: 0.059328, val_loss: 0.058179, val_acc: 25.987148
+Epoch [914], train_loss: 0.059285, val_loss: 0.058051, val_acc: 25.989906
+Epoch [915], train_loss: 0.059273, val_loss: 0.057774, val_acc: 26.061979
+Epoch [916], train_loss: 0.059454, val_loss: 0.057832, val_acc: 26.062595
+Epoch [917], train_loss: 0.059283, val_loss: 0.057821, val_acc: 26.027216
+Epoch [918], train_loss: 0.059128, val_loss: 0.057735, val_acc: 26.049406
+Epoch [919], train_loss: 0.059287, val_loss: 0.058146, val_acc: 25.972837
+Epoch [920], train_loss: 0.059188, val_loss: 0.058016, val_acc: 25.918903
+Epoch [921], train_loss: 0.059315, val_loss: 0.057927, val_acc: 26.027716
+Epoch [922], train_loss: 0.059238, val_loss: 0.058097, val_acc: 25.986525
+Epoch [923], train_loss: 0.059260, val_loss: 0.057925, val_acc: 26.038824
+Epoch [924], train_loss: 0.059309, val_loss: 0.057860, val_acc: 26.013279
+Epoch [925], train_loss: 0.059220, val_loss: 0.057850, val_acc: 26.020159
+Epoch [926], train_loss: 0.059180, val_loss: 0.057820, val_acc: 26.030838
+Epoch [927], train_loss: 0.059309, val_loss: 0.057890, val_acc: 26.027399
+Epoch [928], train_loss: 0.059199, val_loss: 0.057926, val_acc: 25.995794
+Epoch [929], train_loss: 0.059054, val_loss: 0.057939, val_acc: 25.991255
+Epoch [930], train_loss: 0.059102, val_loss: 0.057954, val_acc: 26.002344
+Epoch [931], train_loss: 0.059204, val_loss: 0.057870, val_acc: 26.033073
+Epoch [932], train_loss: 0.059161, val_loss: 0.058079, val_acc: 25.970646
+Epoch [933], train_loss: 0.059260, val_loss: 0.057848, val_acc: 26.034090
+Epoch [934], train_loss: 0.059314, val_loss: 0.058043, val_acc: 25.990339
+Epoch [935], train_loss: 0.059224, val_loss: 0.058040, val_acc: 25.995691
+Epoch [936], train_loss: 0.059248, val_loss: 0.058090, val_acc: 26.007372
+Epoch [937], train_loss: 0.059195, val_loss: 0.058213, val_acc: 25.976320
+Epoch [938], train_loss: 0.059274, val_loss: 0.058079, val_acc: 25.997499
+Epoch [939], train_loss: 0.059167, val_loss: 0.057902, val_acc: 26.019444
+Epoch [940], train_loss: 0.059250, val_loss: 0.057859, val_acc: 26.039322
+Epoch [941], train_loss: 0.059277, val_loss: 0.057997, val_acc: 26.001108
+Epoch [942], train_loss: 0.059263, val_loss: 0.057955, val_acc: 26.012424
+Epoch [943], train_loss: 0.059237, val_loss: 0.058017, val_acc: 26.032900
+Epoch [944], train_loss: 0.059253, val_loss: 0.057743, val_acc: 26.056293
+Epoch [945], train_loss: 0.059186, val_loss: 0.057865, val_acc: 26.026962
+Epoch [946], train_loss: 0.059274, val_loss: 0.057980, val_acc: 25.965309
+Epoch [947], train_loss: 0.059092, val_loss: 0.058066, val_acc: 26.006456
+Epoch [948], train_loss: 0.059188, val_loss: 0.057822, val_acc: 26.016180
+Epoch [949], train_loss: 0.059199, val_loss: 0.058005, val_acc: 25.996000
+Epoch [950], train_loss: 0.059231, val_loss: 0.058022, val_acc: 25.993410
+Epoch [951], train_loss: 0.059153, val_loss: 0.057966, val_acc: 26.024305
+Epoch [952], train_loss: 0.059225, val_loss: 0.057997, val_acc: 26.005234
+Epoch [953], train_loss: 0.059264, val_loss: 0.058129, val_acc: 25.980339
+Epoch [954], train_loss: 0.059229, val_loss: 0.057976, val_acc: 26.001541
+Epoch [955], train_loss: 0.059340, val_loss: 0.058016, val_acc: 26.014715
+Epoch [956], train_loss: 0.059103, val_loss: 0.057963, val_acc: 26.015184
+Epoch [957], train_loss: 0.059334, val_loss: 0.057838, val_acc: 26.045750
+Epoch [958], train_loss: 0.059143, val_loss: 0.057951, val_acc: 26.018879
+Epoch [959], train_loss: 0.059133, val_loss: 0.058071, val_acc: 25.978298
+Epoch [960], train_loss: 0.059287, val_loss: 0.058172, val_acc: 25.959488
+Epoch [961], train_loss: 0.059106, val_loss: 0.058024, val_acc: 25.991276
+Epoch [962], train_loss: 0.059242, val_loss: 0.058117, val_acc: 25.967104
+Epoch [963], train_loss: 0.059197, val_loss: 0.057861, val_acc: 26.017746
+Epoch [964], train_loss: 0.059252, val_loss: 0.058096, val_acc: 25.996719
+Epoch [965], train_loss: 0.059142, val_loss: 0.057823, val_acc: 26.051720
+Epoch [966], train_loss: 0.059265, val_loss: 0.058248, val_acc: 25.947727
+Epoch [967], train_loss: 0.059136, val_loss: 0.058206, val_acc: 25.967037
+Epoch [968], train_loss: 0.059163, val_loss: 0.057922, val_acc: 26.025448
+Epoch [969], train_loss: 0.059241, val_loss: 0.058003, val_acc: 25.984377
+Epoch [970], train_loss: 0.059203, val_loss: 0.057998, val_acc: 25.988022
+Epoch [971], train_loss: 0.059289, val_loss: 0.058080, val_acc: 26.003542
+Epoch [972], train_loss: 0.059247, val_loss: 0.058073, val_acc: 26.001143
+Epoch [973], train_loss: 0.059281, val_loss: 0.058018, val_acc: 26.004025
+Epoch [974], train_loss: 0.059211, val_loss: 0.057976, val_acc: 26.015963
+Epoch [975], train_loss: 0.059248, val_loss: 0.058010, val_acc: 26.008633
+Epoch [976], train_loss: 0.059194, val_loss: 0.058226, val_acc: 25.950901
+Epoch [977], train_loss: 0.059216, val_loss: 0.057949, val_acc: 26.040937
+Epoch [978], train_loss: 0.059188, val_loss: 0.058139, val_acc: 26.003687
+Epoch [979], train_loss: 0.059108, val_loss: 0.057893, val_acc: 26.003588
+Epoch [980], train_loss: 0.059279, val_loss: 0.057913, val_acc: 26.009537
+Epoch [981], train_loss: 0.059196, val_loss: 0.057879, val_acc: 26.022928
+Epoch [982], train_loss: 0.059221, val_loss: 0.057892, val_acc: 26.001440
+Epoch [983], train_loss: 0.059188, val_loss: 0.057913, val_acc: 26.029821
+Epoch [984], train_loss: 0.059202, val_loss: 0.058046, val_acc: 25.990301
+Epoch [985], train_loss: 0.059132, val_loss: 0.057895, val_acc: 26.015053
+Epoch [986], train_loss: 0.059260, val_loss: 0.057881, val_acc: 26.033566
+Epoch [987], train_loss: 0.059232, val_loss: 0.057954, val_acc: 26.010902
+Epoch [988], train_loss: 0.059137, val_loss: 0.057921, val_acc: 26.008226
+Epoch [989], train_loss: 0.059217, val_loss: 0.057731, val_acc: 26.058144
+Epoch [990], train_loss: 0.059134, val_loss: 0.058061, val_acc: 25.995493
+Epoch [991], train_loss: 0.059110, val_loss: 0.057781, val_acc: 26.030693
+Epoch [992], train_loss: 0.059118, val_loss: 0.057667, val_acc: 26.062492
+Epoch [993], train_loss: 0.059241, val_loss: 0.058005, val_acc: 26.000889
+Epoch [994], train_loss: 0.059286, val_loss: 0.058215, val_acc: 25.946360
+Epoch [995], train_loss: 0.059184, val_loss: 0.058055, val_acc: 25.988310
+Epoch [996], train_loss: 0.059095, val_loss: 0.057851, val_acc: 26.037157
+Epoch [997], train_loss: 0.059187, val_loss: 0.058108, val_acc: 25.988693
+Epoch [998], train_loss: 0.059244, val_loss: 0.057886, val_acc: 26.037477
+Epoch [999], train_loss: 0.059115, val_loss: 0.057826, val_acc: 26.056402
+python3 ./UNet_V15.py  23123.25s user 21609.33s system 99% cpu 12:25:46.60 total
diff --git a/UNet/Sim_logs/UNet_64_V16_25621936.log b/UNet/Sim_logs/UNet_64_V16_25621936.log
new file mode 100644
index 0000000000000000000000000000000000000000..36a8f64b40745f648c944d41e272fd4fa20a96e0
--- /dev/null
+++ b/UNet/Sim_logs/UNet_64_V16_25621936.log
@@ -0,0 +1,2195 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 10000
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 7
+ seed is: 373686838
+Epoch [0], train_loss: 0.168845, val_loss: 0.174461, val_acc: 4.991922
+Epoch [1], train_loss: 0.160490, val_loss: 0.158861, val_acc: 5.748340
+Epoch [2], train_loss: 0.154067, val_loss: 0.146968, val_acc: 6.964906
+Epoch [3], train_loss: 0.150147, val_loss: 0.152965, val_acc: 6.339164
+Epoch [4], train_loss: 0.146124, val_loss: 0.147941, val_acc: 6.695533
+Epoch [5], train_loss: 0.143041, val_loss: 0.143946, val_acc: 6.976186
+Epoch [6], train_loss: 0.140420, val_loss: 0.140474, val_acc: 7.403573
+Epoch [7], train_loss: 0.137796, val_loss: 0.143013, val_acc: 7.029576
+Epoch [8], train_loss: 0.135639, val_loss: 0.136906, val_acc: 7.630105
+Epoch [9], train_loss: 0.133776, val_loss: 0.133256, val_acc: 8.205185
+Epoch [10], train_loss: 0.131316, val_loss: 0.129475, val_acc: 8.671863
+Epoch [11], train_loss: 0.129881, val_loss: 0.132729, val_acc: 8.295653
+Epoch [12], train_loss: 0.127881, val_loss: 0.125653, val_acc: 9.271655
+Epoch [13], train_loss: 0.125599, val_loss: 0.125013, val_acc: 9.352047
+Epoch [14], train_loss: 0.124153, val_loss: 0.122846, val_acc: 9.650599
+Epoch [15], train_loss: 0.122395, val_loss: 0.117944, val_acc: 10.497877
+Epoch [16], train_loss: 0.120386, val_loss: 0.120860, val_acc: 10.022120
+Epoch [17], train_loss: 0.119065, val_loss: 0.115866, val_acc: 10.897371
+Epoch [18], train_loss: 0.118068, val_loss: 0.111300, val_acc: 11.667582
+Epoch [19], train_loss: 0.115243, val_loss: 0.109912, val_acc: 11.887475
+Epoch [20], train_loss: 0.114397, val_loss: 0.107813, val_acc: 12.294865
+Epoch [21], train_loss: 0.112897, val_loss: 0.106368, val_acc: 12.460942
+Epoch [22], train_loss: 0.110867, val_loss: 0.104946, val_acc: 12.750984
+Epoch [23], train_loss: 0.109809, val_loss: 0.103502, val_acc: 12.951015
+Epoch [24], train_loss: 0.108018, val_loss: 0.102153, val_acc: 13.131045
+Epoch [25], train_loss: 0.107899, val_loss: 0.102124, val_acc: 13.154884
+Epoch [26], train_loss: 0.105885, val_loss: 0.095906, val_acc: 13.804475
+Epoch [27], train_loss: 0.104420, val_loss: 0.097204, val_acc: 13.655335
+Epoch [28], train_loss: 0.102939, val_loss: 0.095530, val_acc: 13.852544
+Epoch [29], train_loss: 0.101860, val_loss: 0.093362, val_acc: 14.049843
+Epoch [30], train_loss: 0.100890, val_loss: 0.092600, val_acc: 14.182172
+Epoch [31], train_loss: 0.099719, val_loss: 0.090632, val_acc: 14.409969
+Epoch [32], train_loss: 0.098327, val_loss: 0.089589, val_acc: 14.496942
+Epoch [33], train_loss: 0.096867, val_loss: 0.088985, val_acc: 14.536060
+Epoch [34], train_loss: 0.096223, val_loss: 0.088073, val_acc: 14.744159
+Epoch [35], train_loss: 0.094762, val_loss: 0.087037, val_acc: 14.740526
+Epoch [36], train_loss: 0.093395, val_loss: 0.085093, val_acc: 15.035982
+Epoch [37], train_loss: 0.092797, val_loss: 0.083605, val_acc: 15.302075
+Epoch [38], train_loss: 0.091715, val_loss: 0.084173, val_acc: 15.240026
+Epoch [39], train_loss: 0.091150, val_loss: 0.083489, val_acc: 15.291345
+Epoch [40], train_loss: 0.089857, val_loss: 0.081211, val_acc: 15.801810
+Epoch [41], train_loss: 0.089152, val_loss: 0.080770, val_acc: 15.815366
+Epoch [42], train_loss: 0.088393, val_loss: 0.079911, val_acc: 16.136654
+Epoch [43], train_loss: 0.087435, val_loss: 0.078912, val_acc: 16.454351
+Epoch [44], train_loss: 0.086672, val_loss: 0.079232, val_acc: 16.414976
+Epoch [45], train_loss: 0.086117, val_loss: 0.078681, val_acc: 16.599716
+Epoch [46], train_loss: 0.085056, val_loss: 0.077527, val_acc: 16.781765
+Epoch [47], train_loss: 0.084237, val_loss: 0.076665, val_acc: 17.045750
+Epoch [48], train_loss: 0.083864, val_loss: 0.076611, val_acc: 17.285725
+Epoch [49], train_loss: 0.082603, val_loss: 0.075919, val_acc: 17.462597
+Epoch [50], train_loss: 0.081973, val_loss: 0.074137, val_acc: 17.890280
+Epoch [51], train_loss: 0.081515, val_loss: 0.074942, val_acc: 17.827368
+Epoch [52], train_loss: 0.080928, val_loss: 0.073240, val_acc: 18.221487
+Epoch [53], train_loss: 0.080167, val_loss: 0.073352, val_acc: 18.612915
+Epoch [54], train_loss: 0.080018, val_loss: 0.073797, val_acc: 18.820251
+Epoch [55], train_loss: 0.079149, val_loss: 0.072629, val_acc: 19.473537
+Epoch [56], train_loss: 0.078476, val_loss: 0.072519, val_acc: 19.546112
+Epoch [57], train_loss: 0.078477, val_loss: 0.073096, val_acc: 19.554127
+Epoch [58], train_loss: 0.077790, val_loss: 0.071741, val_acc: 19.710539
+Epoch [59], train_loss: 0.077225, val_loss: 0.071269, val_acc: 19.978628
+Epoch [60], train_loss: 0.077028, val_loss: 0.071073, val_acc: 20.128162
+Epoch [61], train_loss: 0.076416, val_loss: 0.070394, val_acc: 20.485897
+Epoch [62], train_loss: 0.075879, val_loss: 0.069525, val_acc: 21.188440
+Epoch [63], train_loss: 0.076028, val_loss: 0.069980, val_acc: 21.139072
+Epoch [64], train_loss: 0.075390, val_loss: 0.069336, val_acc: 21.290510
+Epoch [65], train_loss: 0.074887, val_loss: 0.069228, val_acc: 21.671934
+Epoch [66], train_loss: 0.074657, val_loss: 0.068860, val_acc: 21.723038
+Epoch [67], train_loss: 0.074263, val_loss: 0.068707, val_acc: 21.880920
+Epoch [68], train_loss: 0.074412, val_loss: 0.068355, val_acc: 22.054317
+Epoch [69], train_loss: 0.074295, val_loss: 0.068851, val_acc: 22.037310
+Epoch [70], train_loss: 0.073386, val_loss: 0.067292, val_acc: 22.682919
+Epoch [71], train_loss: 0.073210, val_loss: 0.067792, val_acc: 22.663933
+Epoch [72], train_loss: 0.073375, val_loss: 0.067759, val_acc: 22.656836
+Epoch [73], train_loss: 0.072964, val_loss: 0.066224, val_acc: 23.509735
+Epoch [74], train_loss: 0.073008, val_loss: 0.066896, val_acc: 23.233366
+Epoch [75], train_loss: 0.072821, val_loss: 0.066734, val_acc: 23.291636
+Epoch [76], train_loss: 0.072793, val_loss: 0.066866, val_acc: 23.249365
+Epoch [77], train_loss: 0.072346, val_loss: 0.066872, val_acc: 23.498217
+Epoch [78], train_loss: 0.072357, val_loss: 0.065861, val_acc: 23.629913
+Epoch [79], train_loss: 0.071909, val_loss: 0.066869, val_acc: 23.358932
+Epoch [80], train_loss: 0.071685, val_loss: 0.066157, val_acc: 23.656683
+Epoch [81], train_loss: 0.071651, val_loss: 0.065861, val_acc: 23.883972
+Epoch [82], train_loss: 0.071605, val_loss: 0.066119, val_acc: 23.884579
+Epoch [83], train_loss: 0.071378, val_loss: 0.066088, val_acc: 23.908697
+Epoch [84], train_loss: 0.071599, val_loss: 0.066181, val_acc: 23.776138
+Epoch [85], train_loss: 0.070976, val_loss: 0.065175, val_acc: 24.173101
+Epoch [86], train_loss: 0.070865, val_loss: 0.065166, val_acc: 23.976282
+Epoch [87], train_loss: 0.070797, val_loss: 0.065676, val_acc: 24.096777
+Epoch [88], train_loss: 0.071225, val_loss: 0.066024, val_acc: 23.937283
+Epoch [89], train_loss: 0.071501, val_loss: 0.065698, val_acc: 23.952152
+Epoch [90], train_loss: 0.070420, val_loss: 0.065764, val_acc: 24.036730
+Epoch [91], train_loss: 0.070524, val_loss: 0.065867, val_acc: 24.097422
+Epoch [92], train_loss: 0.071298, val_loss: 0.066153, val_acc: 24.079206
+Epoch [93], train_loss: 0.070597, val_loss: 0.065544, val_acc: 24.244892
+Epoch [94], train_loss: 0.070413, val_loss: 0.065321, val_acc: 24.313107
+Epoch [95], train_loss: 0.070449, val_loss: 0.065265, val_acc: 24.309532
+Epoch [96], train_loss: 0.070530, val_loss: 0.064856, val_acc: 24.429749
+Epoch [97], train_loss: 0.070251, val_loss: 0.064822, val_acc: 24.376297
+Epoch [98], train_loss: 0.070318, val_loss: 0.064975, val_acc: 24.300858
+Epoch [99], train_loss: 0.069945, val_loss: 0.065159, val_acc: 24.242945
+Epoch [100], train_loss: 0.070604, val_loss: 0.064513, val_acc: 24.533222
+Epoch [101], train_loss: 0.070065, val_loss: 0.064851, val_acc: 24.484571
+Epoch [102], train_loss: 0.069775, val_loss: 0.064632, val_acc: 24.509977
+Epoch [103], train_loss: 0.069728, val_loss: 0.063950, val_acc: 24.595066
+Epoch [104], train_loss: 0.070094, val_loss: 0.064348, val_acc: 24.517954
+Epoch [105], train_loss: 0.069866, val_loss: 0.064469, val_acc: 24.512754
+Epoch [106], train_loss: 0.070263, val_loss: 0.065508, val_acc: 24.321589
+Epoch [107], train_loss: 0.069494, val_loss: 0.063861, val_acc: 24.686893
+Epoch [108], train_loss: 0.069861, val_loss: 0.065328, val_acc: 24.454823
+Epoch [109], train_loss: 0.069419, val_loss: 0.064537, val_acc: 24.549656
+Epoch [110], train_loss: 0.069497, val_loss: 0.064224, val_acc: 24.672031
+Epoch [111], train_loss: 0.069224, val_loss: 0.064501, val_acc: 24.560562
+Epoch [112], train_loss: 0.069122, val_loss: 0.063822, val_acc: 24.677557
+Epoch [113], train_loss: 0.069378, val_loss: 0.063532, val_acc: 24.740559
+Epoch [114], train_loss: 0.069430, val_loss: 0.063873, val_acc: 24.555901
+Epoch [115], train_loss: 0.069483, val_loss: 0.064191, val_acc: 24.538288
+Epoch [116], train_loss: 0.069532, val_loss: 0.064531, val_acc: 24.505613
+Epoch [117], train_loss: 0.069088, val_loss: 0.063613, val_acc: 24.803665
+Epoch [118], train_loss: 0.068883, val_loss: 0.063499, val_acc: 24.756615
+Epoch [119], train_loss: 0.069114, val_loss: 0.063349, val_acc: 24.794662
+Epoch [120], train_loss: 0.068853, val_loss: 0.064031, val_acc: 24.772987
+Epoch [121], train_loss: 0.068974, val_loss: 0.063456, val_acc: 24.773870
+Epoch [122], train_loss: 0.068661, val_loss: 0.063517, val_acc: 24.738514
+Epoch [123], train_loss: 0.068518, val_loss: 0.063750, val_acc: 24.688797
+Epoch [124], train_loss: 0.068542, val_loss: 0.063282, val_acc: 24.784706
+Epoch [125], train_loss: 0.068653, val_loss: 0.063485, val_acc: 24.708849
+Epoch [126], train_loss: 0.068558, val_loss: 0.063978, val_acc: 24.723951
+Epoch [127], train_loss: 0.068403, val_loss: 0.063310, val_acc: 24.787109
+Epoch [128], train_loss: 0.068355, val_loss: 0.063425, val_acc: 24.761827
+Epoch [129], train_loss: 0.068250, val_loss: 0.062995, val_acc: 24.926975
+Epoch [130], train_loss: 0.068374, val_loss: 0.063174, val_acc: 24.876902
+Epoch [131], train_loss: 0.068699, val_loss: 0.063776, val_acc: 24.746531
+Epoch [132], train_loss: 0.068230, val_loss: 0.063328, val_acc: 24.725134
+Epoch [133], train_loss: 0.068203, val_loss: 0.063426, val_acc: 24.720770
+Epoch [134], train_loss: 0.068276, val_loss: 0.063171, val_acc: 24.800743
+Epoch [135], train_loss: 0.068162, val_loss: 0.063139, val_acc: 24.837925
+Epoch [136], train_loss: 0.068011, val_loss: 0.063159, val_acc: 24.817072
+Epoch [137], train_loss: 0.067701, val_loss: 0.062862, val_acc: 24.878607
+Epoch [138], train_loss: 0.068041, val_loss: 0.063452, val_acc: 24.801052
+Epoch [139], train_loss: 0.067926, val_loss: 0.062905, val_acc: 24.956762
+Epoch [140], train_loss: 0.067990, val_loss: 0.063055, val_acc: 24.969976
+Epoch [141], train_loss: 0.067735, val_loss: 0.063045, val_acc: 24.918095
+Epoch [142], train_loss: 0.067940, val_loss: 0.062787, val_acc: 24.985184
+Epoch [143], train_loss: 0.067872, val_loss: 0.063397, val_acc: 24.812614
+Epoch [144], train_loss: 0.067370, val_loss: 0.062865, val_acc: 24.942064
+Epoch [145], train_loss: 0.067507, val_loss: 0.062711, val_acc: 24.950436
+Epoch [146], train_loss: 0.067449, val_loss: 0.062504, val_acc: 25.032307
+Epoch [147], train_loss: 0.067772, val_loss: 0.062830, val_acc: 24.906591
+Epoch [148], train_loss: 0.067577, val_loss: 0.063057, val_acc: 24.957636
+Epoch [149], train_loss: 0.067007, val_loss: 0.062616, val_acc: 25.011036
+Epoch [150], train_loss: 0.067342, val_loss: 0.062695, val_acc: 24.942703
+Epoch [151], train_loss: 0.067143, val_loss: 0.062302, val_acc: 25.045923
+Epoch [152], train_loss: 0.067542, val_loss: 0.062510, val_acc: 25.025803
+Epoch [153], train_loss: 0.067316, val_loss: 0.062260, val_acc: 25.075682
+Epoch [154], train_loss: 0.067538, val_loss: 0.062755, val_acc: 24.994501
+Epoch [155], train_loss: 0.066779, val_loss: 0.062410, val_acc: 25.087215
+Epoch [156], train_loss: 0.067031, val_loss: 0.062460, val_acc: 25.006536
+Epoch [157], train_loss: 0.067296, val_loss: 0.062932, val_acc: 25.038906
+Epoch [158], train_loss: 0.067229, val_loss: 0.062545, val_acc: 25.028826
+Epoch [159], train_loss: 0.066833, val_loss: 0.062140, val_acc: 25.053858
+Epoch [160], train_loss: 0.067128, val_loss: 0.062655, val_acc: 25.069271
+Epoch [161], train_loss: 0.067056, val_loss: 0.062373, val_acc: 24.916945
+Epoch [162], train_loss: 0.066902, val_loss: 0.062247, val_acc: 25.106918
+Epoch [163], train_loss: 0.066948, val_loss: 0.062061, val_acc: 25.126493
+Epoch [164], train_loss: 0.066537, val_loss: 0.062107, val_acc: 25.089701
+Epoch [165], train_loss: 0.067097, val_loss: 0.062253, val_acc: 25.076899
+Epoch [166], train_loss: 0.067154, val_loss: 0.062599, val_acc: 25.025444
+Epoch [167], train_loss: 0.066606, val_loss: 0.061876, val_acc: 25.118395
+Epoch [168], train_loss: 0.066782, val_loss: 0.062074, val_acc: 25.109901
+Epoch [169], train_loss: 0.066574, val_loss: 0.061836, val_acc: 25.147264
+Epoch [170], train_loss: 0.066489, val_loss: 0.062020, val_acc: 25.110254
+Epoch [171], train_loss: 0.066578, val_loss: 0.062266, val_acc: 25.002304
+Epoch [172], train_loss: 0.066820, val_loss: 0.061756, val_acc: 25.163660
+Epoch [173], train_loss: 0.066619, val_loss: 0.062148, val_acc: 25.079676
+Epoch [174], train_loss: 0.066577, val_loss: 0.062134, val_acc: 25.112118
+Epoch [175], train_loss: 0.066359, val_loss: 0.062340, val_acc: 25.117765
+Epoch [176], train_loss: 0.066439, val_loss: 0.061707, val_acc: 25.141546
+Epoch [177], train_loss: 0.066684, val_loss: 0.062075, val_acc: 25.138575
+Epoch [178], train_loss: 0.066310, val_loss: 0.062005, val_acc: 25.141455
+Epoch [179], train_loss: 0.066448, val_loss: 0.061658, val_acc: 25.230019
+Epoch [180], train_loss: 0.066418, val_loss: 0.062136, val_acc: 25.098301
+Epoch [181], train_loss: 0.066295, val_loss: 0.061932, val_acc: 25.140265
+Epoch [182], train_loss: 0.066135, val_loss: 0.061414, val_acc: 25.228239
+Epoch [183], train_loss: 0.066232, val_loss: 0.061930, val_acc: 25.145504
+Epoch [184], train_loss: 0.066284, val_loss: 0.061271, val_acc: 25.246733
+Epoch [185], train_loss: 0.066339, val_loss: 0.061781, val_acc: 25.135101
+Epoch [186], train_loss: 0.065894, val_loss: 0.061530, val_acc: 25.212845
+Epoch [187], train_loss: 0.066186, val_loss: 0.061329, val_acc: 25.236353
+Epoch [188], train_loss: 0.066077, val_loss: 0.061497, val_acc: 25.193903
+Epoch [189], train_loss: 0.066320, val_loss: 0.061726, val_acc: 25.166857
+Epoch [190], train_loss: 0.066015, val_loss: 0.061386, val_acc: 25.246902
+Epoch [191], train_loss: 0.066062, val_loss: 0.061467, val_acc: 25.241844
+Epoch [192], train_loss: 0.065781, val_loss: 0.061175, val_acc: 25.297987
+Epoch [193], train_loss: 0.065874, val_loss: 0.061224, val_acc: 25.270954
+Epoch [194], train_loss: 0.065780, val_loss: 0.061755, val_acc: 25.222443
+Epoch [195], train_loss: 0.065864, val_loss: 0.061587, val_acc: 25.224098
+Epoch [196], train_loss: 0.065767, val_loss: 0.061183, val_acc: 25.277443
+Epoch [197], train_loss: 0.065790, val_loss: 0.061479, val_acc: 25.259806
+Epoch [198], train_loss: 0.066022, val_loss: 0.061871, val_acc: 25.191118
+Epoch [199], train_loss: 0.065640, val_loss: 0.061609, val_acc: 25.218483
+Epoch [200], train_loss: 0.065724, val_loss: 0.061531, val_acc: 25.249456
+Epoch [201], train_loss: 0.065656, val_loss: 0.061757, val_acc: 25.182392
+Epoch [202], train_loss: 0.065451, val_loss: 0.061348, val_acc: 25.252708
+Epoch [203], train_loss: 0.065479, val_loss: 0.061459, val_acc: 25.276194
+Epoch [204], train_loss: 0.065730, val_loss: 0.060979, val_acc: 25.337156
+Epoch [205], train_loss: 0.065353, val_loss: 0.061259, val_acc: 25.311674
+Epoch [206], train_loss: 0.065516, val_loss: 0.061302, val_acc: 25.267513
+Epoch [207], train_loss: 0.065478, val_loss: 0.061386, val_acc: 25.264492
+Epoch [208], train_loss: 0.065346, val_loss: 0.061487, val_acc: 25.256983
+Epoch [209], train_loss: 0.065490, val_loss: 0.061410, val_acc: 25.260298
+Epoch [210], train_loss: 0.065618, val_loss: 0.061177, val_acc: 25.275518
+Epoch [211], train_loss: 0.065576, val_loss: 0.061081, val_acc: 25.307432
+Epoch [212], train_loss: 0.065403, val_loss: 0.060916, val_acc: 25.359329
+Epoch [213], train_loss: 0.065667, val_loss: 0.061687, val_acc: 25.224136
+Epoch [214], train_loss: 0.065235, val_loss: 0.060925, val_acc: 25.346512
+Epoch [215], train_loss: 0.065297, val_loss: 0.061107, val_acc: 25.284014
+Epoch [216], train_loss: 0.065196, val_loss: 0.061049, val_acc: 25.285082
+Epoch [217], train_loss: 0.065273, val_loss: 0.061465, val_acc: 25.170521
+Epoch [218], train_loss: 0.065257, val_loss: 0.060948, val_acc: 25.296482
+Epoch [219], train_loss: 0.065281, val_loss: 0.060962, val_acc: 25.316101
+Epoch [220], train_loss: 0.065396, val_loss: 0.060780, val_acc: 25.312222
+Epoch [221], train_loss: 0.065096, val_loss: 0.060665, val_acc: 25.377651
+Epoch [222], train_loss: 0.065359, val_loss: 0.060499, val_acc: 25.366423
+Epoch [223], train_loss: 0.065207, val_loss: 0.060843, val_acc: 25.341263
+Epoch [224], train_loss: 0.065282, val_loss: 0.060729, val_acc: 25.322924
+Epoch [225], train_loss: 0.065062, val_loss: 0.060695, val_acc: 25.348856
+Epoch [226], train_loss: 0.065123, val_loss: 0.060596, val_acc: 25.351231
+Epoch [227], train_loss: 0.065330, val_loss: 0.061100, val_acc: 25.255232
+Epoch [228], train_loss: 0.065013, val_loss: 0.060975, val_acc: 25.308685
+Epoch [229], train_loss: 0.065404, val_loss: 0.061197, val_acc: 25.303410
+Epoch [230], train_loss: 0.065072, val_loss: 0.060617, val_acc: 25.385765
+Epoch [231], train_loss: 0.065097, val_loss: 0.060914, val_acc: 25.351851
+Epoch [232], train_loss: 0.065001, val_loss: 0.060861, val_acc: 25.314909
+Epoch [233], train_loss: 0.064705, val_loss: 0.060712, val_acc: 25.322691
+Epoch [234], train_loss: 0.065150, val_loss: 0.061040, val_acc: 25.272348
+Epoch [235], train_loss: 0.064758, val_loss: 0.060790, val_acc: 25.368315
+Epoch [236], train_loss: 0.065177, val_loss: 0.060853, val_acc: 25.342413
+Epoch [237], train_loss: 0.064869, val_loss: 0.060812, val_acc: 25.360016
+Epoch [238], train_loss: 0.064741, val_loss: 0.060582, val_acc: 25.367981
+Epoch [239], train_loss: 0.064872, val_loss: 0.060676, val_acc: 25.321922
+Epoch [240], train_loss: 0.064966, val_loss: 0.060677, val_acc: 25.318613
+Epoch [241], train_loss: 0.064999, val_loss: 0.060703, val_acc: 25.337286
+Epoch [242], train_loss: 0.064980, val_loss: 0.060655, val_acc: 25.374495
+Epoch [243], train_loss: 0.064727, val_loss: 0.060175, val_acc: 25.394575
+Epoch [244], train_loss: 0.064740, val_loss: 0.060470, val_acc: 25.329229
+Epoch [245], train_loss: 0.065037, val_loss: 0.060332, val_acc: 25.413174
+Epoch [246], train_loss: 0.064433, val_loss: 0.060482, val_acc: 25.391659
+Epoch [247], train_loss: 0.065027, val_loss: 0.060474, val_acc: 25.373226
+Epoch [248], train_loss: 0.064610, val_loss: 0.060253, val_acc: 25.376287
+Epoch [249], train_loss: 0.064815, val_loss: 0.060536, val_acc: 25.331715
+Epoch [250], train_loss: 0.064806, val_loss: 0.060564, val_acc: 25.347696
+Epoch [251], train_loss: 0.064720, val_loss: 0.060361, val_acc: 25.382393
+Epoch [252], train_loss: 0.064582, val_loss: 0.060126, val_acc: 25.430908
+Epoch [253], train_loss: 0.064788, val_loss: 0.060293, val_acc: 25.396555
+Epoch [254], train_loss: 0.064747, val_loss: 0.060230, val_acc: 25.374662
+Epoch [255], train_loss: 0.064776, val_loss: 0.060245, val_acc: 25.397884
+Epoch [256], train_loss: 0.064832, val_loss: 0.060232, val_acc: 25.441389
+Epoch [257], train_loss: 0.064468, val_loss: 0.060433, val_acc: 25.391323
+Epoch [258], train_loss: 0.064662, val_loss: 0.060154, val_acc: 25.419580
+Epoch [259], train_loss: 0.064684, val_loss: 0.060273, val_acc: 25.400562
+Epoch [260], train_loss: 0.064290, val_loss: 0.060165, val_acc: 25.411112
+Epoch [261], train_loss: 0.064595, val_loss: 0.060251, val_acc: 25.382385
+Epoch [262], train_loss: 0.064397, val_loss: 0.060177, val_acc: 25.432632
+Epoch [263], train_loss: 0.064488, val_loss: 0.060275, val_acc: 25.390247
+Epoch [264], train_loss: 0.064427, val_loss: 0.060197, val_acc: 25.427607
+Epoch [265], train_loss: 0.064511, val_loss: 0.060081, val_acc: 25.422529
+Epoch [266], train_loss: 0.064419, val_loss: 0.060502, val_acc: 25.369303
+Epoch [267], train_loss: 0.064545, val_loss: 0.059895, val_acc: 25.450144
+Epoch [268], train_loss: 0.064395, val_loss: 0.059863, val_acc: 25.462948
+Epoch [269], train_loss: 0.064336, val_loss: 0.060103, val_acc: 25.437332
+Epoch [270], train_loss: 0.064364, val_loss: 0.059924, val_acc: 25.423813
+Epoch [271], train_loss: 0.064288, val_loss: 0.060355, val_acc: 25.388229
+Epoch [272], train_loss: 0.064332, val_loss: 0.060252, val_acc: 25.408432
+Epoch [273], train_loss: 0.064425, val_loss: 0.060340, val_acc: 25.365913
+Epoch [274], train_loss: 0.064453, val_loss: 0.060303, val_acc: 25.404213
+Epoch [275], train_loss: 0.064447, val_loss: 0.060249, val_acc: 25.388481
+Epoch [276], train_loss: 0.064191, val_loss: 0.060414, val_acc: 25.384672
+Epoch [277], train_loss: 0.064410, val_loss: 0.060155, val_acc: 25.411772
+Epoch [278], train_loss: 0.064456, val_loss: 0.059825, val_acc: 25.456631
+Epoch [279], train_loss: 0.064353, val_loss: 0.060208, val_acc: 25.385759
+Epoch [280], train_loss: 0.064283, val_loss: 0.060259, val_acc: 25.364475
+Epoch [281], train_loss: 0.064100, val_loss: 0.059958, val_acc: 25.445553
+Epoch [282], train_loss: 0.064183, val_loss: 0.059899, val_acc: 25.442595
+Epoch [283], train_loss: 0.064208, val_loss: 0.059957, val_acc: 25.416147
+Epoch [284], train_loss: 0.064319, val_loss: 0.060035, val_acc: 25.446562
+Epoch [285], train_loss: 0.064147, val_loss: 0.060259, val_acc: 25.395086
+Epoch [286], train_loss: 0.064012, val_loss: 0.059855, val_acc: 25.440506
+Epoch [287], train_loss: 0.064007, val_loss: 0.060030, val_acc: 25.424801
+Epoch [288], train_loss: 0.063990, val_loss: 0.060113, val_acc: 25.389952
+Epoch [289], train_loss: 0.064182, val_loss: 0.060100, val_acc: 25.429014
+Epoch [290], train_loss: 0.064419, val_loss: 0.060036, val_acc: 25.431650
+Epoch [291], train_loss: 0.064202, val_loss: 0.060118, val_acc: 25.429064
+Epoch [292], train_loss: 0.064142, val_loss: 0.060411, val_acc: 25.351519
+Epoch [293], train_loss: 0.064105, val_loss: 0.059914, val_acc: 25.434738
+Epoch [294], train_loss: 0.064291, val_loss: 0.060310, val_acc: 25.380960
+Epoch [295], train_loss: 0.064068, val_loss: 0.060126, val_acc: 25.401497
+Epoch [296], train_loss: 0.064001, val_loss: 0.060046, val_acc: 25.422312
+Epoch [297], train_loss: 0.064210, val_loss: 0.059999, val_acc: 25.420214
+Epoch [298], train_loss: 0.063766, val_loss: 0.059976, val_acc: 25.429392
+Epoch [299], train_loss: 0.064185, val_loss: 0.059731, val_acc: 25.477137
+Epoch [300], train_loss: 0.064151, val_loss: 0.060423, val_acc: 25.372553
+Epoch [301], train_loss: 0.063746, val_loss: 0.060120, val_acc: 25.418562
+Epoch [302], train_loss: 0.063720, val_loss: 0.059893, val_acc: 25.456732
+Epoch [303], train_loss: 0.064039, val_loss: 0.059945, val_acc: 25.440880
+Epoch [304], train_loss: 0.063754, val_loss: 0.059862, val_acc: 25.457279
+Epoch [305], train_loss: 0.064269, val_loss: 0.059824, val_acc: 25.453789
+Epoch [306], train_loss: 0.064037, val_loss: 0.060153, val_acc: 25.422882
+Epoch [307], train_loss: 0.063936, val_loss: 0.059992, val_acc: 25.433863
+Epoch [308], train_loss: 0.063896, val_loss: 0.060234, val_acc: 25.396101
+Epoch [309], train_loss: 0.063925, val_loss: 0.060117, val_acc: 25.422104
+Epoch [310], train_loss: 0.064103, val_loss: 0.060203, val_acc: 25.392506
+Epoch [311], train_loss: 0.063704, val_loss: 0.059693, val_acc: 25.496056
+Epoch [312], train_loss: 0.063983, val_loss: 0.059714, val_acc: 25.458960
+Epoch [313], train_loss: 0.063957, val_loss: 0.059933, val_acc: 25.435316
+Epoch [314], train_loss: 0.063668, val_loss: 0.060015, val_acc: 25.422380
+Epoch [315], train_loss: 0.063857, val_loss: 0.060045, val_acc: 25.414570
+Epoch [316], train_loss: 0.063828, val_loss: 0.059728, val_acc: 25.489481
+Epoch [317], train_loss: 0.063730, val_loss: 0.059845, val_acc: 25.465725
+Epoch [318], train_loss: 0.063679, val_loss: 0.059779, val_acc: 25.447079
+Epoch [319], train_loss: 0.063562, val_loss: 0.060013, val_acc: 25.418829
+Epoch [320], train_loss: 0.063624, val_loss: 0.059817, val_acc: 25.454243
+Epoch [321], train_loss: 0.063778, val_loss: 0.059830, val_acc: 25.463562
+Epoch [322], train_loss: 0.063851, val_loss: 0.060021, val_acc: 25.426388
+Epoch [323], train_loss: 0.063665, val_loss: 0.059912, val_acc: 25.440132
+Epoch [324], train_loss: 0.063616, val_loss: 0.059975, val_acc: 25.425556
+Epoch [325], train_loss: 0.063850, val_loss: 0.059670, val_acc: 25.495611
+Epoch [326], train_loss: 0.063601, val_loss: 0.059859, val_acc: 25.449091
+Epoch [327], train_loss: 0.063712, val_loss: 0.059842, val_acc: 25.448746
+Epoch [328], train_loss: 0.063902, val_loss: 0.059894, val_acc: 25.445217
+Epoch [329], train_loss: 0.063468, val_loss: 0.059766, val_acc: 25.468855
+Epoch [330], train_loss: 0.063705, val_loss: 0.059732, val_acc: 25.474827
+Epoch [331], train_loss: 0.063879, val_loss: 0.059878, val_acc: 25.445084
+Epoch [332], train_loss: 0.063730, val_loss: 0.059983, val_acc: 25.415167
+Epoch [333], train_loss: 0.063649, val_loss: 0.059815, val_acc: 25.457464
+Epoch [334], train_loss: 0.063607, val_loss: 0.059687, val_acc: 25.482193
+Epoch [335], train_loss: 0.063683, val_loss: 0.059690, val_acc: 25.456230
+Epoch [336], train_loss: 0.063647, val_loss: 0.059557, val_acc: 25.507841
+Epoch [337], train_loss: 0.063675, val_loss: 0.059627, val_acc: 25.492960
+Epoch [338], train_loss: 0.063388, val_loss: 0.059984, val_acc: 25.409569
+Epoch [339], train_loss: 0.063579, val_loss: 0.060059, val_acc: 25.416443
+Epoch [340], train_loss: 0.063459, val_loss: 0.059540, val_acc: 25.496029
+Epoch [341], train_loss: 0.063584, val_loss: 0.059717, val_acc: 25.489845
+Epoch [342], train_loss: 0.063616, val_loss: 0.059722, val_acc: 25.482798
+Epoch [343], train_loss: 0.063620, val_loss: 0.059561, val_acc: 25.494837
+Epoch [344], train_loss: 0.063654, val_loss: 0.059826, val_acc: 25.452356
+Epoch [345], train_loss: 0.063619, val_loss: 0.059774, val_acc: 25.454975
+Epoch [346], train_loss: 0.063523, val_loss: 0.059554, val_acc: 25.502337
+Epoch [347], train_loss: 0.063556, val_loss: 0.059411, val_acc: 25.515003
+Epoch [348], train_loss: 0.063370, val_loss: 0.059993, val_acc: 25.432295
+Epoch [349], train_loss: 0.063497, val_loss: 0.059515, val_acc: 25.508972
+Epoch [350], train_loss: 0.063169, val_loss: 0.059741, val_acc: 25.481380
+Epoch [351], train_loss: 0.063415, val_loss: 0.059741, val_acc: 25.479826
+Epoch [352], train_loss: 0.063516, val_loss: 0.059667, val_acc: 25.506708
+Epoch [353], train_loss: 0.063411, val_loss: 0.059809, val_acc: 25.452602
+Epoch [354], train_loss: 0.063291, val_loss: 0.059825, val_acc: 25.440750
+Epoch [355], train_loss: 0.063520, val_loss: 0.060015, val_acc: 25.404371
+Epoch [356], train_loss: 0.063517, val_loss: 0.059753, val_acc: 25.462336
+Epoch [357], train_loss: 0.063238, val_loss: 0.059335, val_acc: 25.559193
+Epoch [358], train_loss: 0.063386, val_loss: 0.059382, val_acc: 25.507683
+Epoch [359], train_loss: 0.063291, val_loss: 0.059494, val_acc: 25.518024
+Epoch [360], train_loss: 0.063480, val_loss: 0.059886, val_acc: 25.422028
+Epoch [361], train_loss: 0.063378, val_loss: 0.059821, val_acc: 25.448383
+Epoch [362], train_loss: 0.063342, val_loss: 0.059676, val_acc: 25.474169
+Epoch [363], train_loss: 0.063172, val_loss: 0.060014, val_acc: 25.432800
+Epoch [364], train_loss: 0.063149, val_loss: 0.059600, val_acc: 25.470356
+Epoch [365], train_loss: 0.063443, val_loss: 0.059642, val_acc: 25.482615
+Epoch [366], train_loss: 0.063490, val_loss: 0.059619, val_acc: 25.522095
+Epoch [367], train_loss: 0.063495, val_loss: 0.059651, val_acc: 25.469719
+Epoch [368], train_loss: 0.063311, val_loss: 0.059322, val_acc: 25.562712
+Epoch [369], train_loss: 0.063333, val_loss: 0.059559, val_acc: 25.508064
+Epoch [370], train_loss: 0.063250, val_loss: 0.059759, val_acc: 25.464643
+Epoch [371], train_loss: 0.063359, val_loss: 0.059700, val_acc: 25.457441
+Epoch [372], train_loss: 0.063506, val_loss: 0.059505, val_acc: 25.477940
+Epoch [373], train_loss: 0.063237, val_loss: 0.059382, val_acc: 25.515081
+Epoch [374], train_loss: 0.063238, val_loss: 0.059763, val_acc: 25.462269
+Epoch [375], train_loss: 0.063316, val_loss: 0.059455, val_acc: 25.525221
+Epoch [376], train_loss: 0.063221, val_loss: 0.059326, val_acc: 25.530239
+Epoch [377], train_loss: 0.063369, val_loss: 0.059890, val_acc: 25.472713
+Epoch [378], train_loss: 0.063187, val_loss: 0.059729, val_acc: 25.477583
+Epoch [379], train_loss: 0.063486, val_loss: 0.059374, val_acc: 25.540648
+Epoch [380], train_loss: 0.063420, val_loss: 0.059697, val_acc: 25.478836
+Epoch [381], train_loss: 0.063202, val_loss: 0.059461, val_acc: 25.524403
+Epoch [382], train_loss: 0.063205, val_loss: 0.059518, val_acc: 25.484226
+Epoch [383], train_loss: 0.063365, val_loss: 0.059675, val_acc: 25.464109
+Epoch [384], train_loss: 0.063224, val_loss: 0.059514, val_acc: 25.474060
+Epoch [385], train_loss: 0.063390, val_loss: 0.059923, val_acc: 25.451628
+Epoch [386], train_loss: 0.063106, val_loss: 0.059376, val_acc: 25.512455
+Epoch [387], train_loss: 0.063287, val_loss: 0.059399, val_acc: 25.523064
+Epoch [388], train_loss: 0.063158, val_loss: 0.059501, val_acc: 25.506771
+Epoch [389], train_loss: 0.063212, val_loss: 0.059606, val_acc: 25.471428
+Epoch [390], train_loss: 0.063194, val_loss: 0.059723, val_acc: 25.465042
+Epoch [391], train_loss: 0.063267, val_loss: 0.059374, val_acc: 25.515312
+Epoch [392], train_loss: 0.062986, val_loss: 0.059318, val_acc: 25.518927
+Epoch [393], train_loss: 0.063037, val_loss: 0.059470, val_acc: 25.503950
+Epoch [394], train_loss: 0.063232, val_loss: 0.059410, val_acc: 25.523396
+Epoch [395], train_loss: 0.062971, val_loss: 0.059346, val_acc: 25.529020
+Epoch [396], train_loss: 0.063264, val_loss: 0.059662, val_acc: 25.475061
+Epoch [397], train_loss: 0.063085, val_loss: 0.059623, val_acc: 25.490767
+Epoch [398], train_loss: 0.063113, val_loss: 0.059487, val_acc: 25.476089
+Epoch [399], train_loss: 0.063166, val_loss: 0.059561, val_acc: 25.501463
+Epoch [400], train_loss: 0.063042, val_loss: 0.059255, val_acc: 25.556482
+Epoch [401], train_loss: 0.063237, val_loss: 0.059343, val_acc: 25.512871
+Epoch [402], train_loss: 0.063238, val_loss: 0.059416, val_acc: 25.520475
+Epoch [403], train_loss: 0.063102, val_loss: 0.059319, val_acc: 25.519026
+Epoch [404], train_loss: 0.062980, val_loss: 0.059647, val_acc: 25.478821
+Epoch [405], train_loss: 0.063022, val_loss: 0.059280, val_acc: 25.549263
+Epoch [406], train_loss: 0.063037, val_loss: 0.059415, val_acc: 25.508488
+Epoch [407], train_loss: 0.063062, val_loss: 0.059752, val_acc: 25.447065
+Epoch [408], train_loss: 0.062909, val_loss: 0.059688, val_acc: 25.462135
+Epoch [409], train_loss: 0.063121, val_loss: 0.059285, val_acc: 25.499601
+Epoch [410], train_loss: 0.063050, val_loss: 0.059281, val_acc: 25.539076
+Epoch [411], train_loss: 0.063036, val_loss: 0.059361, val_acc: 25.528044
+Epoch [412], train_loss: 0.063053, val_loss: 0.059398, val_acc: 25.512363
+Epoch [413], train_loss: 0.062882, val_loss: 0.059330, val_acc: 25.518162
+Epoch [414], train_loss: 0.062885, val_loss: 0.059416, val_acc: 25.500742
+Epoch [415], train_loss: 0.063106, val_loss: 0.059610, val_acc: 25.504927
+Epoch [416], train_loss: 0.063145, val_loss: 0.059215, val_acc: 25.522274
+Epoch [417], train_loss: 0.063142, val_loss: 0.059437, val_acc: 25.483313
+Epoch [418], train_loss: 0.063174, val_loss: 0.059740, val_acc: 25.443394
+Epoch [419], train_loss: 0.062950, val_loss: 0.059387, val_acc: 25.522022
+Epoch [420], train_loss: 0.062988, val_loss: 0.059477, val_acc: 25.505306
+Epoch [421], train_loss: 0.062964, val_loss: 0.059551, val_acc: 25.499475
+Epoch [422], train_loss: 0.062902, val_loss: 0.059336, val_acc: 25.520981
+Epoch [423], train_loss: 0.063068, val_loss: 0.059253, val_acc: 25.531294
+Epoch [424], train_loss: 0.062834, val_loss: 0.059430, val_acc: 25.475788
+Epoch [425], train_loss: 0.063054, val_loss: 0.059590, val_acc: 25.495689
+Epoch [426], train_loss: 0.062739, val_loss: 0.059078, val_acc: 25.584351
+Epoch [427], train_loss: 0.062874, val_loss: 0.059143, val_acc: 25.586975
+Epoch [428], train_loss: 0.062919, val_loss: 0.059267, val_acc: 25.556005
+Epoch [429], train_loss: 0.062914, val_loss: 0.059249, val_acc: 25.513950
+Epoch [430], train_loss: 0.062820, val_loss: 0.059498, val_acc: 25.502861
+Epoch [431], train_loss: 0.062952, val_loss: 0.059232, val_acc: 25.580482
+Epoch [432], train_loss: 0.062893, val_loss: 0.059161, val_acc: 25.537632
+Epoch [433], train_loss: 0.062856, val_loss: 0.059147, val_acc: 25.558359
+Epoch [434], train_loss: 0.062977, val_loss: 0.059227, val_acc: 25.578262
+Epoch [435], train_loss: 0.063025, val_loss: 0.059381, val_acc: 25.507908
+Epoch [436], train_loss: 0.063101, val_loss: 0.059372, val_acc: 25.518217
+Epoch [437], train_loss: 0.062942, val_loss: 0.059223, val_acc: 25.552660
+Epoch [438], train_loss: 0.062844, val_loss: 0.059452, val_acc: 25.510269
+Epoch [439], train_loss: 0.062887, val_loss: 0.059391, val_acc: 25.519680
+Epoch [440], train_loss: 0.062854, val_loss: 0.059172, val_acc: 25.536190
+Epoch [441], train_loss: 0.063001, val_loss: 0.059529, val_acc: 25.470295
+Epoch [442], train_loss: 0.062944, val_loss: 0.059198, val_acc: 25.543394
+Epoch [443], train_loss: 0.062584, val_loss: 0.059269, val_acc: 25.501534
+Epoch [444], train_loss: 0.062718, val_loss: 0.059072, val_acc: 25.573919
+Epoch [445], train_loss: 0.062668, val_loss: 0.059303, val_acc: 25.491957
+Epoch [446], train_loss: 0.062756, val_loss: 0.059260, val_acc: 25.522255
+Epoch [447], train_loss: 0.062827, val_loss: 0.059226, val_acc: 25.539055
+Epoch [448], train_loss: 0.062861, val_loss: 0.059127, val_acc: 25.536493
+Epoch [449], train_loss: 0.062613, val_loss: 0.059076, val_acc: 25.555376
+Epoch [450], train_loss: 0.062689, val_loss: 0.059236, val_acc: 25.536697
+Epoch [451], train_loss: 0.062720, val_loss: 0.059497, val_acc: 25.455891
+Epoch [452], train_loss: 0.062828, val_loss: 0.059376, val_acc: 25.510839
+Epoch [453], train_loss: 0.062964, val_loss: 0.059186, val_acc: 25.543200
+Epoch [454], train_loss: 0.062825, val_loss: 0.059431, val_acc: 25.528221
+Epoch [455], train_loss: 0.062941, val_loss: 0.059046, val_acc: 25.581446
+Epoch [456], train_loss: 0.063002, val_loss: 0.059352, val_acc: 25.515120
+Epoch [457], train_loss: 0.062891, val_loss: 0.059190, val_acc: 25.548374
+Epoch [458], train_loss: 0.062814, val_loss: 0.059202, val_acc: 25.538364
+Epoch [459], train_loss: 0.062894, val_loss: 0.059423, val_acc: 25.509956
+Epoch [460], train_loss: 0.062763, val_loss: 0.058946, val_acc: 25.613649
+Epoch [461], train_loss: 0.062774, val_loss: 0.059136, val_acc: 25.541599
+Epoch [462], train_loss: 0.062762, val_loss: 0.059314, val_acc: 25.511473
+Epoch [463], train_loss: 0.062730, val_loss: 0.059189, val_acc: 25.516819
+Epoch [464], train_loss: 0.062671, val_loss: 0.059294, val_acc: 25.539482
+Epoch [465], train_loss: 0.062644, val_loss: 0.059219, val_acc: 25.533867
+Epoch [466], train_loss: 0.062867, val_loss: 0.059255, val_acc: 25.543058
+Epoch [467], train_loss: 0.062780, val_loss: 0.059263, val_acc: 25.527157
+Epoch [468], train_loss: 0.062696, val_loss: 0.059249, val_acc: 25.500992
+Epoch [469], train_loss: 0.062809, val_loss: 0.059333, val_acc: 25.481577
+Epoch [470], train_loss: 0.062821, val_loss: 0.059240, val_acc: 25.561466
+Epoch [471], train_loss: 0.062633, val_loss: 0.059197, val_acc: 25.541437
+Epoch [472], train_loss: 0.062749, val_loss: 0.059193, val_acc: 25.548298
+Epoch [473], train_loss: 0.062848, val_loss: 0.059135, val_acc: 25.566277
+Epoch [474], train_loss: 0.062575, val_loss: 0.059340, val_acc: 25.509762
+Epoch [475], train_loss: 0.062750, val_loss: 0.059273, val_acc: 25.524963
+Epoch [476], train_loss: 0.062725, val_loss: 0.059244, val_acc: 25.523127
+Epoch [477], train_loss: 0.062532, val_loss: 0.059141, val_acc: 25.574013
+Epoch [478], train_loss: 0.062626, val_loss: 0.059232, val_acc: 25.529846
+Epoch [479], train_loss: 0.062817, val_loss: 0.059114, val_acc: 25.554167
+Epoch [480], train_loss: 0.062449, val_loss: 0.058982, val_acc: 25.578377
+Epoch [481], train_loss: 0.062745, val_loss: 0.059327, val_acc: 25.535847
+Epoch [482], train_loss: 0.062853, val_loss: 0.059005, val_acc: 25.609087
+Epoch [483], train_loss: 0.062478, val_loss: 0.059109, val_acc: 25.544107
+Epoch [484], train_loss: 0.062593, val_loss: 0.059245, val_acc: 25.539423
+Epoch [485], train_loss: 0.062520, val_loss: 0.059125, val_acc: 25.555664
+Epoch [486], train_loss: 0.062652, val_loss: 0.059096, val_acc: 25.547449
+Epoch [487], train_loss: 0.062724, val_loss: 0.059197, val_acc: 25.507761
+Epoch [488], train_loss: 0.062654, val_loss: 0.059316, val_acc: 25.529690
+Epoch [489], train_loss: 0.062529, val_loss: 0.059034, val_acc: 25.576187
+Epoch [490], train_loss: 0.062663, val_loss: 0.059455, val_acc: 25.480270
+Epoch [491], train_loss: 0.062700, val_loss: 0.059217, val_acc: 25.531034
+Epoch [492], train_loss: 0.062710, val_loss: 0.059290, val_acc: 25.524136
+Epoch [493], train_loss: 0.062595, val_loss: 0.059185, val_acc: 25.538134
+Epoch [494], train_loss: 0.062760, val_loss: 0.059230, val_acc: 25.511606
+Epoch [495], train_loss: 0.062490, val_loss: 0.059121, val_acc: 25.564463
+Epoch [496], train_loss: 0.062503, val_loss: 0.059581, val_acc: 25.459900
+Epoch [497], train_loss: 0.062751, val_loss: 0.059016, val_acc: 25.575798
+Epoch [498], train_loss: 0.062531, val_loss: 0.059200, val_acc: 25.537708
+Epoch [499], train_loss: 0.062359, val_loss: 0.059192, val_acc: 25.510719
+Epoch [500], train_loss: 0.062451, val_loss: 0.059002, val_acc: 25.569098
+Epoch [501], train_loss: 0.062663, val_loss: 0.059350, val_acc: 25.482386
+Epoch [502], train_loss: 0.062450, val_loss: 0.059036, val_acc: 25.567097
+Epoch [503], train_loss: 0.062814, val_loss: 0.059146, val_acc: 25.544777
+Epoch [504], train_loss: 0.062616, val_loss: 0.059313, val_acc: 25.524229
+Epoch [505], train_loss: 0.062583, val_loss: 0.059081, val_acc: 25.555962
+Epoch [506], train_loss: 0.062470, val_loss: 0.059178, val_acc: 25.510674
+Epoch [507], train_loss: 0.062433, val_loss: 0.059127, val_acc: 25.502808
+Epoch [508], train_loss: 0.062412, val_loss: 0.059023, val_acc: 25.553038
+Epoch [509], train_loss: 0.062460, val_loss: 0.059047, val_acc: 25.536497
+Epoch [510], train_loss: 0.062526, val_loss: 0.059015, val_acc: 25.546711
+Epoch [511], train_loss: 0.062428, val_loss: 0.059150, val_acc: 25.508955
+Epoch [512], train_loss: 0.062339, val_loss: 0.058924, val_acc: 25.571671
+Epoch [513], train_loss: 0.062645, val_loss: 0.059293, val_acc: 25.490519
+Epoch [514], train_loss: 0.062523, val_loss: 0.058926, val_acc: 25.578812
+Epoch [515], train_loss: 0.062485, val_loss: 0.059183, val_acc: 25.530708
+Epoch [516], train_loss: 0.062476, val_loss: 0.059155, val_acc: 25.517975
+Epoch [517], train_loss: 0.062461, val_loss: 0.059142, val_acc: 25.496759
+Epoch [518], train_loss: 0.062699, val_loss: 0.059178, val_acc: 25.528297
+Epoch [519], train_loss: 0.062560, val_loss: 0.059035, val_acc: 25.581957
+Epoch [520], train_loss: 0.062355, val_loss: 0.059070, val_acc: 25.574944
+Epoch [521], train_loss: 0.062494, val_loss: 0.059113, val_acc: 25.533503
+Epoch [522], train_loss: 0.062378, val_loss: 0.059131, val_acc: 25.512049
+Epoch [523], train_loss: 0.062340, val_loss: 0.059142, val_acc: 25.534382
+Epoch [524], train_loss: 0.062254, val_loss: 0.059004, val_acc: 25.590563
+Epoch [525], train_loss: 0.062581, val_loss: 0.058970, val_acc: 25.605585
+Epoch [526], train_loss: 0.062298, val_loss: 0.058984, val_acc: 25.583084
+Epoch [527], train_loss: 0.062318, val_loss: 0.059167, val_acc: 25.532373
+Epoch [528], train_loss: 0.062542, val_loss: 0.059113, val_acc: 25.543623
+Epoch [529], train_loss: 0.062504, val_loss: 0.059050, val_acc: 25.550148
+Epoch [530], train_loss: 0.062418, val_loss: 0.059142, val_acc: 25.507465
+Epoch [531], train_loss: 0.062342, val_loss: 0.059032, val_acc: 25.545734
+Epoch [532], train_loss: 0.062430, val_loss: 0.059140, val_acc: 25.524326
+Epoch [533], train_loss: 0.062651, val_loss: 0.059057, val_acc: 25.563778
+Epoch [534], train_loss: 0.062528, val_loss: 0.059117, val_acc: 25.553467
+Epoch [535], train_loss: 0.062365, val_loss: 0.059066, val_acc: 25.523830
+Epoch [536], train_loss: 0.062425, val_loss: 0.058970, val_acc: 25.545738
+Epoch [537], train_loss: 0.062515, val_loss: 0.059076, val_acc: 25.525640
+Epoch [538], train_loss: 0.062512, val_loss: 0.059149, val_acc: 25.497025
+Epoch [539], train_loss: 0.062332, val_loss: 0.058899, val_acc: 25.623304
+Epoch [540], train_loss: 0.062251, val_loss: 0.059143, val_acc: 25.551756
+Epoch [541], train_loss: 0.062496, val_loss: 0.059218, val_acc: 25.482552
+Epoch [542], train_loss: 0.062323, val_loss: 0.059047, val_acc: 25.532303
+Epoch [543], train_loss: 0.062435, val_loss: 0.059199, val_acc: 25.531744
+Epoch [544], train_loss: 0.062558, val_loss: 0.058974, val_acc: 25.594343
+Epoch [545], train_loss: 0.062269, val_loss: 0.058975, val_acc: 25.502560
+Epoch [546], train_loss: 0.062445, val_loss: 0.059220, val_acc: 25.493528
+Epoch [547], train_loss: 0.062343, val_loss: 0.059271, val_acc: 25.520746
+Epoch [548], train_loss: 0.062146, val_loss: 0.059036, val_acc: 25.512989
+Epoch [549], train_loss: 0.062350, val_loss: 0.059115, val_acc: 25.496647
+Epoch [550], train_loss: 0.062327, val_loss: 0.059062, val_acc: 25.514503
+Epoch [551], train_loss: 0.062400, val_loss: 0.059097, val_acc: 25.545708
+Epoch [552], train_loss: 0.062540, val_loss: 0.059193, val_acc: 25.548975
+Epoch [553], train_loss: 0.062277, val_loss: 0.059001, val_acc: 25.553032
+Epoch [554], train_loss: 0.062153, val_loss: 0.059060, val_acc: 25.523582
+Epoch [555], train_loss: 0.062122, val_loss: 0.059094, val_acc: 25.555561
+Epoch [556], train_loss: 0.062298, val_loss: 0.058967, val_acc: 25.588333
+Epoch [557], train_loss: 0.062432, val_loss: 0.059111, val_acc: 25.529518
+Epoch [558], train_loss: 0.062314, val_loss: 0.059010, val_acc: 25.574795
+Epoch [559], train_loss: 0.062383, val_loss: 0.058846, val_acc: 25.608042
+Epoch [560], train_loss: 0.062400, val_loss: 0.059026, val_acc: 25.547907
+Epoch [561], train_loss: 0.062450, val_loss: 0.059203, val_acc: 25.518723
+Epoch [562], train_loss: 0.062489, val_loss: 0.059064, val_acc: 25.539871
+Epoch [563], train_loss: 0.062390, val_loss: 0.059017, val_acc: 25.592976
+Epoch [564], train_loss: 0.062355, val_loss: 0.059113, val_acc: 25.471443
+Epoch [565], train_loss: 0.062351, val_loss: 0.058970, val_acc: 25.581940
+Epoch [566], train_loss: 0.062251, val_loss: 0.059279, val_acc: 25.441395
+Epoch [567], train_loss: 0.062324, val_loss: 0.059128, val_acc: 25.546925
+Epoch [568], train_loss: 0.062337, val_loss: 0.058886, val_acc: 25.611244
+Epoch [569], train_loss: 0.062195, val_loss: 0.058826, val_acc: 25.637173
+Epoch [570], train_loss: 0.062155, val_loss: 0.059124, val_acc: 25.520452
+Epoch [571], train_loss: 0.062086, val_loss: 0.059025, val_acc: 25.544743
+Epoch [572], train_loss: 0.062223, val_loss: 0.058940, val_acc: 25.521561
+Epoch [573], train_loss: 0.062163, val_loss: 0.058964, val_acc: 25.569323
+Epoch [574], train_loss: 0.062252, val_loss: 0.059062, val_acc: 25.532978
+Epoch [575], train_loss: 0.061967, val_loss: 0.058909, val_acc: 25.539448
+Epoch [576], train_loss: 0.062261, val_loss: 0.058940, val_acc: 25.532764
+Epoch [577], train_loss: 0.062341, val_loss: 0.059022, val_acc: 25.533890
+Epoch [578], train_loss: 0.062161, val_loss: 0.058877, val_acc: 25.547735
+Epoch [579], train_loss: 0.062228, val_loss: 0.058838, val_acc: 25.567717
+Epoch [580], train_loss: 0.062295, val_loss: 0.059137, val_acc: 25.534410
+Epoch [581], train_loss: 0.062149, val_loss: 0.059050, val_acc: 25.526340
+Epoch [582], train_loss: 0.062234, val_loss: 0.058926, val_acc: 25.570967
+Epoch [583], train_loss: 0.062191, val_loss: 0.058892, val_acc: 25.559170
+Epoch [584], train_loss: 0.062319, val_loss: 0.059045, val_acc: 25.543541
+Epoch [585], train_loss: 0.062229, val_loss: 0.059041, val_acc: 25.519794
+Epoch [586], train_loss: 0.062339, val_loss: 0.058959, val_acc: 25.546497
+Epoch [587], train_loss: 0.062290, val_loss: 0.058921, val_acc: 25.580564
+Epoch [588], train_loss: 0.062270, val_loss: 0.058997, val_acc: 25.565840
+Epoch [589], train_loss: 0.062119, val_loss: 0.059017, val_acc: 25.551174
+Epoch [590], train_loss: 0.062083, val_loss: 0.058991, val_acc: 25.534660
+Epoch [591], train_loss: 0.062218, val_loss: 0.058887, val_acc: 25.526176
+Epoch [592], train_loss: 0.062332, val_loss: 0.058783, val_acc: 25.648584
+Epoch [593], train_loss: 0.062132, val_loss: 0.058832, val_acc: 25.550241
+Epoch [594], train_loss: 0.062249, val_loss: 0.058892, val_acc: 25.586493
+Epoch [595], train_loss: 0.062086, val_loss: 0.058911, val_acc: 25.572098
+Epoch [596], train_loss: 0.062034, val_loss: 0.058770, val_acc: 25.634972
+Epoch [597], train_loss: 0.062209, val_loss: 0.058893, val_acc: 25.553864
+Epoch [598], train_loss: 0.062221, val_loss: 0.059041, val_acc: 25.571995
+Epoch [599], train_loss: 0.062263, val_loss: 0.058792, val_acc: 25.625593
+Epoch [600], train_loss: 0.062137, val_loss: 0.058995, val_acc: 25.576210
+Epoch [601], train_loss: 0.062247, val_loss: 0.058960, val_acc: 25.584806
+Epoch [602], train_loss: 0.062170, val_loss: 0.058896, val_acc: 25.610304
+Epoch [603], train_loss: 0.062363, val_loss: 0.058938, val_acc: 25.582441
+Epoch [604], train_loss: 0.062046, val_loss: 0.058741, val_acc: 25.590971
+Epoch [605], train_loss: 0.062309, val_loss: 0.059003, val_acc: 25.554010
+Epoch [606], train_loss: 0.062226, val_loss: 0.059005, val_acc: 25.543360
+Epoch [607], train_loss: 0.062069, val_loss: 0.059111, val_acc: 25.518223
+Epoch [608], train_loss: 0.062119, val_loss: 0.058985, val_acc: 25.580215
+Epoch [609], train_loss: 0.062186, val_loss: 0.059069, val_acc: 25.520369
+Epoch [610], train_loss: 0.062208, val_loss: 0.058883, val_acc: 25.584351
+Epoch [611], train_loss: 0.062186, val_loss: 0.058982, val_acc: 25.552683
+Epoch [612], train_loss: 0.062098, val_loss: 0.058994, val_acc: 25.491978
+Epoch [613], train_loss: 0.062239, val_loss: 0.058809, val_acc: 25.575264
+Epoch [614], train_loss: 0.062146, val_loss: 0.058888, val_acc: 25.544245
+Epoch [615], train_loss: 0.062016, val_loss: 0.058982, val_acc: 25.490301
+Epoch [616], train_loss: 0.062331, val_loss: 0.058919, val_acc: 25.548590
+Epoch [617], train_loss: 0.062239, val_loss: 0.058860, val_acc: 25.622749
+Epoch [618], train_loss: 0.061857, val_loss: 0.058733, val_acc: 25.591520
+Epoch [619], train_loss: 0.062169, val_loss: 0.058891, val_acc: 25.596149
+Epoch [620], train_loss: 0.062194, val_loss: 0.058999, val_acc: 25.561646
+Epoch [621], train_loss: 0.062165, val_loss: 0.058968, val_acc: 25.550419
+Epoch [622], train_loss: 0.062178, val_loss: 0.058836, val_acc: 25.576136
+Epoch [623], train_loss: 0.062212, val_loss: 0.059033, val_acc: 25.561750
+Epoch [624], train_loss: 0.061939, val_loss: 0.058860, val_acc: 25.598351
+Epoch [625], train_loss: 0.062103, val_loss: 0.058897, val_acc: 25.556675
+Epoch [626], train_loss: 0.062074, val_loss: 0.058798, val_acc: 25.628016
+Epoch [627], train_loss: 0.062089, val_loss: 0.058855, val_acc: 25.569603
+Epoch [628], train_loss: 0.062144, val_loss: 0.058775, val_acc: 25.602322
+Epoch [629], train_loss: 0.062159, val_loss: 0.058865, val_acc: 25.624699
+Epoch [630], train_loss: 0.062056, val_loss: 0.058906, val_acc: 25.583532
+Epoch [631], train_loss: 0.062165, val_loss: 0.058972, val_acc: 25.549637
+Epoch [632], train_loss: 0.062143, val_loss: 0.058880, val_acc: 25.587591
+Epoch [633], train_loss: 0.061984, val_loss: 0.058720, val_acc: 25.627672
+Epoch [634], train_loss: 0.062043, val_loss: 0.058840, val_acc: 25.612923
+Epoch [635], train_loss: 0.062198, val_loss: 0.058857, val_acc: 25.541105
+Epoch [636], train_loss: 0.062048, val_loss: 0.058650, val_acc: 25.642550
+Epoch [637], train_loss: 0.062125, val_loss: 0.058871, val_acc: 25.524620
+Epoch [638], train_loss: 0.062165, val_loss: 0.058932, val_acc: 25.544819
+Epoch [639], train_loss: 0.062019, val_loss: 0.058815, val_acc: 25.558708
+Epoch [640], train_loss: 0.062083, val_loss: 0.058886, val_acc: 25.550308
+Epoch [641], train_loss: 0.061951, val_loss: 0.058857, val_acc: 25.571682
+Epoch [642], train_loss: 0.061838, val_loss: 0.058872, val_acc: 25.534031
+Epoch [643], train_loss: 0.061947, val_loss: 0.059000, val_acc: 25.544392
+Epoch [644], train_loss: 0.062149, val_loss: 0.058814, val_acc: 25.575483
+Epoch [645], train_loss: 0.062174, val_loss: 0.058753, val_acc: 25.629961
+Epoch [646], train_loss: 0.062140, val_loss: 0.058825, val_acc: 25.604397
+Epoch [647], train_loss: 0.062090, val_loss: 0.058865, val_acc: 25.598141
+Epoch [648], train_loss: 0.061973, val_loss: 0.058850, val_acc: 25.561811
+Epoch [649], train_loss: 0.062069, val_loss: 0.058803, val_acc: 25.560453
+Epoch [650], train_loss: 0.062200, val_loss: 0.058969, val_acc: 25.554155
+Epoch [651], train_loss: 0.062010, val_loss: 0.058749, val_acc: 25.630659
+Epoch [652], train_loss: 0.062055, val_loss: 0.058719, val_acc: 25.605261
+Epoch [653], train_loss: 0.062268, val_loss: 0.058879, val_acc: 25.566910
+Epoch [654], train_loss: 0.062000, val_loss: 0.058756, val_acc: 25.549957
+Epoch [655], train_loss: 0.062363, val_loss: 0.058973, val_acc: 25.552454
+Epoch [656], train_loss: 0.062029, val_loss: 0.058922, val_acc: 25.574455
+Epoch [657], train_loss: 0.061950, val_loss: 0.058891, val_acc: 25.558960
+Epoch [658], train_loss: 0.062244, val_loss: 0.058709, val_acc: 25.690962
+Epoch [659], train_loss: 0.062075, val_loss: 0.058802, val_acc: 25.604216
+Epoch [660], train_loss: 0.061819, val_loss: 0.058956, val_acc: 25.516804
+Epoch [661], train_loss: 0.061984, val_loss: 0.058827, val_acc: 25.598089
+Epoch [662], train_loss: 0.061982, val_loss: 0.058958, val_acc: 25.560633
+Epoch [663], train_loss: 0.062094, val_loss: 0.058854, val_acc: 25.568588
+Epoch [664], train_loss: 0.062022, val_loss: 0.058751, val_acc: 25.623640
+Epoch [665], train_loss: 0.061883, val_loss: 0.058861, val_acc: 25.599098
+Epoch [666], train_loss: 0.062076, val_loss: 0.058689, val_acc: 25.591141
+Epoch [667], train_loss: 0.062048, val_loss: 0.058746, val_acc: 25.576111
+Epoch [668], train_loss: 0.062103, val_loss: 0.058869, val_acc: 25.580423
+Epoch [669], train_loss: 0.061967, val_loss: 0.058868, val_acc: 25.544800
+Epoch [670], train_loss: 0.062109, val_loss: 0.058905, val_acc: 25.533934
+Epoch [671], train_loss: 0.061993, val_loss: 0.058927, val_acc: 25.547764
+Epoch [672], train_loss: 0.061961, val_loss: 0.058653, val_acc: 25.660801
+Epoch [673], train_loss: 0.062165, val_loss: 0.058788, val_acc: 25.572779
+Epoch [674], train_loss: 0.061893, val_loss: 0.058964, val_acc: 25.573727
+Epoch [675], train_loss: 0.061992, val_loss: 0.058860, val_acc: 25.591736
+Epoch [676], train_loss: 0.061882, val_loss: 0.058798, val_acc: 25.565130
+Epoch [677], train_loss: 0.061965, val_loss: 0.058764, val_acc: 25.599506
+Epoch [678], train_loss: 0.062096, val_loss: 0.058680, val_acc: 25.577717
+Epoch [679], train_loss: 0.062110, val_loss: 0.058826, val_acc: 25.589832
+Epoch [680], train_loss: 0.061942, val_loss: 0.058751, val_acc: 25.581640
+Epoch [681], train_loss: 0.061839, val_loss: 0.058771, val_acc: 25.628452
+Epoch [682], train_loss: 0.061945, val_loss: 0.058667, val_acc: 25.598619
+Epoch [683], train_loss: 0.061958, val_loss: 0.058853, val_acc: 25.561747
+Epoch [684], train_loss: 0.061942, val_loss: 0.058894, val_acc: 25.568613
+Epoch [685], train_loss: 0.061920, val_loss: 0.058936, val_acc: 25.556097
+Epoch [686], train_loss: 0.061814, val_loss: 0.058713, val_acc: 25.585304
+Epoch [687], train_loss: 0.062033, val_loss: 0.058741, val_acc: 25.621052
+Epoch [688], train_loss: 0.062136, val_loss: 0.058914, val_acc: 25.542273
+Epoch [689], train_loss: 0.061981, val_loss: 0.058855, val_acc: 25.575178
+Epoch [690], train_loss: 0.062113, val_loss: 0.058905, val_acc: 25.577290
+Epoch [691], train_loss: 0.061995, val_loss: 0.058774, val_acc: 25.584040
+Epoch [692], train_loss: 0.061897, val_loss: 0.058911, val_acc: 25.567266
+Epoch [693], train_loss: 0.062009, val_loss: 0.058869, val_acc: 25.532759
+Epoch [694], train_loss: 0.061861, val_loss: 0.058654, val_acc: 25.620430
+Epoch [695], train_loss: 0.062107, val_loss: 0.058908, val_acc: 25.564529
+Epoch [696], train_loss: 0.062045, val_loss: 0.058855, val_acc: 25.518639
+Epoch [697], train_loss: 0.062187, val_loss: 0.058776, val_acc: 25.575516
+Epoch [698], train_loss: 0.061968, val_loss: 0.058665, val_acc: 25.632601
+Epoch [699], train_loss: 0.062066, val_loss: 0.058671, val_acc: 25.635588
+Epoch [700], train_loss: 0.061990, val_loss: 0.058701, val_acc: 25.631044
+Epoch [701], train_loss: 0.061930, val_loss: 0.058734, val_acc: 25.561388
+Epoch [702], train_loss: 0.061872, val_loss: 0.058850, val_acc: 25.591375
+Epoch [703], train_loss: 0.061985, val_loss: 0.058848, val_acc: 25.555332
+Epoch [704], train_loss: 0.061968, val_loss: 0.058705, val_acc: 25.642382
+Epoch [705], train_loss: 0.061843, val_loss: 0.058659, val_acc: 25.660149
+Epoch [706], train_loss: 0.062025, val_loss: 0.058719, val_acc: 25.609764
+Epoch [707], train_loss: 0.061965, val_loss: 0.058835, val_acc: 25.566374
+Epoch [708], train_loss: 0.061766, val_loss: 0.058783, val_acc: 25.603376
+Epoch [709], train_loss: 0.061793, val_loss: 0.058789, val_acc: 25.588139
+Epoch [710], train_loss: 0.061767, val_loss: 0.058822, val_acc: 25.535593
+Epoch [711], train_loss: 0.061919, val_loss: 0.058793, val_acc: 25.592356
+Epoch [712], train_loss: 0.061880, val_loss: 0.058645, val_acc: 25.595133
+Epoch [713], train_loss: 0.061808, val_loss: 0.058818, val_acc: 25.612251
+Epoch [714], train_loss: 0.061867, val_loss: 0.058589, val_acc: 25.650692
+Epoch [715], train_loss: 0.061901, val_loss: 0.058739, val_acc: 25.625179
+Epoch [716], train_loss: 0.062000, val_loss: 0.058767, val_acc: 25.612947
+Epoch [717], train_loss: 0.061879, val_loss: 0.058658, val_acc: 25.642353
+Epoch [718], train_loss: 0.061878, val_loss: 0.058717, val_acc: 25.599859
+Epoch [719], train_loss: 0.061874, val_loss: 0.058776, val_acc: 25.578117
+Epoch [720], train_loss: 0.062188, val_loss: 0.058653, val_acc: 25.662565
+Epoch [721], train_loss: 0.061639, val_loss: 0.058783, val_acc: 25.537712
+Epoch [722], train_loss: 0.061839, val_loss: 0.058671, val_acc: 25.601131
+Epoch [723], train_loss: 0.061831, val_loss: 0.058710, val_acc: 25.561262
+Epoch [724], train_loss: 0.061902, val_loss: 0.058758, val_acc: 25.539131
+Epoch [725], train_loss: 0.061890, val_loss: 0.058581, val_acc: 25.654781
+Epoch [726], train_loss: 0.061913, val_loss: 0.058610, val_acc: 25.671623
+Epoch [727], train_loss: 0.061796, val_loss: 0.058564, val_acc: 25.624851
+Epoch [728], train_loss: 0.061994, val_loss: 0.058662, val_acc: 25.651628
+Epoch [729], train_loss: 0.061676, val_loss: 0.058622, val_acc: 25.536566
+Epoch [730], train_loss: 0.061820, val_loss: 0.058603, val_acc: 25.619236
+Epoch [731], train_loss: 0.061791, val_loss: 0.058653, val_acc: 25.605467
+Epoch [732], train_loss: 0.061740, val_loss: 0.058678, val_acc: 25.604002
+Epoch [733], train_loss: 0.061765, val_loss: 0.058787, val_acc: 25.553360
+Epoch [734], train_loss: 0.061751, val_loss: 0.058547, val_acc: 25.625542
+Epoch [735], train_loss: 0.061670, val_loss: 0.058663, val_acc: 25.598406
+Epoch [736], train_loss: 0.061771, val_loss: 0.058543, val_acc: 25.568998
+Epoch [737], train_loss: 0.061744, val_loss: 0.058564, val_acc: 25.690140
+Epoch [738], train_loss: 0.061832, val_loss: 0.058786, val_acc: 25.567993
+Epoch [739], train_loss: 0.061825, val_loss: 0.058652, val_acc: 25.575714
+Epoch [740], train_loss: 0.061761, val_loss: 0.058679, val_acc: 25.640417
+Epoch [741], train_loss: 0.061708, val_loss: 0.058824, val_acc: 25.545763
+Epoch [742], train_loss: 0.061734, val_loss: 0.058741, val_acc: 25.587864
+Epoch [743], train_loss: 0.061650, val_loss: 0.058688, val_acc: 25.608574
+Epoch [744], train_loss: 0.061768, val_loss: 0.058915, val_acc: 25.531647
+Epoch [745], train_loss: 0.061798, val_loss: 0.058728, val_acc: 25.628748
+Epoch [746], train_loss: 0.061819, val_loss: 0.058658, val_acc: 25.620119
+Epoch [747], train_loss: 0.061922, val_loss: 0.058771, val_acc: 25.590054
+Epoch [748], train_loss: 0.061814, val_loss: 0.058607, val_acc: 25.643694
+Epoch [749], train_loss: 0.061759, val_loss: 0.058797, val_acc: 25.624987
+Epoch [750], train_loss: 0.061698, val_loss: 0.058686, val_acc: 25.630003
+Epoch [751], train_loss: 0.061741, val_loss: 0.058770, val_acc: 25.624250
+Epoch [752], train_loss: 0.061806, val_loss: 0.058640, val_acc: 25.645847
+Epoch [753], train_loss: 0.061734, val_loss: 0.058564, val_acc: 25.683535
+Epoch [754], train_loss: 0.061841, val_loss: 0.058840, val_acc: 25.492058
+Epoch [755], train_loss: 0.061926, val_loss: 0.058758, val_acc: 25.600193
+Epoch [756], train_loss: 0.061767, val_loss: 0.058701, val_acc: 25.602615
+Epoch [757], train_loss: 0.061587, val_loss: 0.058630, val_acc: 25.663692
+Epoch [758], train_loss: 0.061862, val_loss: 0.058672, val_acc: 25.599718
+Epoch [759], train_loss: 0.061677, val_loss: 0.058679, val_acc: 25.664110
+Epoch [760], train_loss: 0.061727, val_loss: 0.058752, val_acc: 25.547636
+Epoch [761], train_loss: 0.061969, val_loss: 0.058598, val_acc: 25.644585
+Epoch [762], train_loss: 0.061837, val_loss: 0.058791, val_acc: 25.598925
+Epoch [763], train_loss: 0.061778, val_loss: 0.058614, val_acc: 25.684402
+Epoch [764], train_loss: 0.061798, val_loss: 0.058726, val_acc: 25.669968
+Epoch [765], train_loss: 0.061577, val_loss: 0.058622, val_acc: 25.677448
+Epoch [766], train_loss: 0.061719, val_loss: 0.058729, val_acc: 25.621296
+Epoch [767], train_loss: 0.061798, val_loss: 0.058732, val_acc: 25.598108
+Epoch [768], train_loss: 0.061692, val_loss: 0.058858, val_acc: 25.607903
+Epoch [769], train_loss: 0.061626, val_loss: 0.058748, val_acc: 25.589901
+Epoch [770], train_loss: 0.061686, val_loss: 0.058676, val_acc: 25.539892
+Epoch [771], train_loss: 0.061861, val_loss: 0.058730, val_acc: 25.604841
+Epoch [772], train_loss: 0.061799, val_loss: 0.058863, val_acc: 25.549604
+Epoch [773], train_loss: 0.061923, val_loss: 0.058679, val_acc: 25.604006
+Epoch [774], train_loss: 0.061805, val_loss: 0.058722, val_acc: 25.597052
+Epoch [775], train_loss: 0.061754, val_loss: 0.058719, val_acc: 25.610514
+Epoch [776], train_loss: 0.061761, val_loss: 0.058789, val_acc: 25.563036
+Epoch [777], train_loss: 0.061698, val_loss: 0.058786, val_acc: 25.600679
+Epoch [778], train_loss: 0.061796, val_loss: 0.058620, val_acc: 25.660557
+Epoch [779], train_loss: 0.061648, val_loss: 0.058753, val_acc: 25.627171
+Epoch [780], train_loss: 0.061797, val_loss: 0.058660, val_acc: 25.649908
+Epoch [781], train_loss: 0.061770, val_loss: 0.058694, val_acc: 25.596432
+Epoch [782], train_loss: 0.061759, val_loss: 0.058865, val_acc: 25.559238
+Epoch [783], train_loss: 0.061740, val_loss: 0.058634, val_acc: 25.647234
+Epoch [784], train_loss: 0.061568, val_loss: 0.058647, val_acc: 25.653788
+Epoch [785], train_loss: 0.061691, val_loss: 0.058688, val_acc: 25.592564
+Epoch [786], train_loss: 0.061765, val_loss: 0.058707, val_acc: 25.623987
+Epoch [787], train_loss: 0.061562, val_loss: 0.058700, val_acc: 25.592670
+Epoch [788], train_loss: 0.061578, val_loss: 0.058699, val_acc: 25.620640
+Epoch [789], train_loss: 0.061588, val_loss: 0.058620, val_acc: 25.680277
+Epoch [790], train_loss: 0.061759, val_loss: 0.058612, val_acc: 25.633226
+Epoch [791], train_loss: 0.061900, val_loss: 0.058717, val_acc: 25.544504
+Epoch [792], train_loss: 0.061519, val_loss: 0.058662, val_acc: 25.611334
+Epoch [793], train_loss: 0.061498, val_loss: 0.058575, val_acc: 25.627119
+Epoch [794], train_loss: 0.061724, val_loss: 0.058614, val_acc: 25.668116
+Epoch [795], train_loss: 0.061661, val_loss: 0.058739, val_acc: 25.603163
+Epoch [796], train_loss: 0.061666, val_loss: 0.058648, val_acc: 25.648676
+Epoch [797], train_loss: 0.061671, val_loss: 0.058830, val_acc: 25.614931
+Epoch [798], train_loss: 0.061658, val_loss: 0.058781, val_acc: 25.559654
+Epoch [799], train_loss: 0.061885, val_loss: 0.058666, val_acc: 25.614887
+Epoch [800], train_loss: 0.061705, val_loss: 0.058680, val_acc: 25.568001
+Epoch [801], train_loss: 0.061764, val_loss: 0.058568, val_acc: 25.656422
+Epoch [802], train_loss: 0.061761, val_loss: 0.058653, val_acc: 25.595985
+Epoch [803], train_loss: 0.061723, val_loss: 0.058581, val_acc: 25.694807
+Epoch [804], train_loss: 0.061497, val_loss: 0.058612, val_acc: 25.644951
+Epoch [805], train_loss: 0.061470, val_loss: 0.058576, val_acc: 25.675564
+Epoch [806], train_loss: 0.061696, val_loss: 0.058714, val_acc: 25.614428
+Epoch [807], train_loss: 0.061764, val_loss: 0.058562, val_acc: 25.679420
+Epoch [808], train_loss: 0.061648, val_loss: 0.058780, val_acc: 25.584660
+Epoch [809], train_loss: 0.061725, val_loss: 0.058709, val_acc: 25.554529
+Epoch [810], train_loss: 0.061730, val_loss: 0.058571, val_acc: 25.650803
+Epoch [811], train_loss: 0.061874, val_loss: 0.058801, val_acc: 25.566832
+Epoch [812], train_loss: 0.061599, val_loss: 0.058570, val_acc: 25.652836
+Epoch [813], train_loss: 0.061591, val_loss: 0.058727, val_acc: 25.660301
+Epoch [814], train_loss: 0.061797, val_loss: 0.058552, val_acc: 25.714731
+Epoch [815], train_loss: 0.061620, val_loss: 0.058739, val_acc: 25.573097
+Epoch [816], train_loss: 0.061795, val_loss: 0.058640, val_acc: 25.575321
+Epoch [817], train_loss: 0.061885, val_loss: 0.058647, val_acc: 25.626719
+Epoch [818], train_loss: 0.061535, val_loss: 0.058646, val_acc: 25.582201
+Epoch [819], train_loss: 0.061557, val_loss: 0.058564, val_acc: 25.648359
+Epoch [820], train_loss: 0.061833, val_loss: 0.058511, val_acc: 25.669538
+Epoch [821], train_loss: 0.061826, val_loss: 0.058625, val_acc: 25.620235
+Epoch [822], train_loss: 0.061561, val_loss: 0.058694, val_acc: 25.594614
+Epoch [823], train_loss: 0.061744, val_loss: 0.058716, val_acc: 25.622644
+Epoch [824], train_loss: 0.061691, val_loss: 0.058595, val_acc: 25.663038
+Epoch [825], train_loss: 0.061676, val_loss: 0.058606, val_acc: 25.645485
+Epoch [826], train_loss: 0.061421, val_loss: 0.058812, val_acc: 25.566187
+Epoch [827], train_loss: 0.061682, val_loss: 0.058632, val_acc: 25.579170
+Epoch [828], train_loss: 0.061640, val_loss: 0.058660, val_acc: 25.639725
+Epoch [829], train_loss: 0.061511, val_loss: 0.058491, val_acc: 25.674454
+Epoch [830], train_loss: 0.061679, val_loss: 0.058656, val_acc: 25.647863
+Epoch [831], train_loss: 0.061595, val_loss: 0.058618, val_acc: 25.645828
+Epoch [832], train_loss: 0.061730, val_loss: 0.058614, val_acc: 25.640150
+Epoch [833], train_loss: 0.061573, val_loss: 0.058545, val_acc: 25.693264
+Epoch [834], train_loss: 0.061861, val_loss: 0.058781, val_acc: 25.542679
+Epoch [835], train_loss: 0.061732, val_loss: 0.058614, val_acc: 25.580036
+Epoch [836], train_loss: 0.061731, val_loss: 0.058672, val_acc: 25.629398
+Epoch [837], train_loss: 0.061700, val_loss: 0.058679, val_acc: 25.592896
+Epoch [838], train_loss: 0.061554, val_loss: 0.058611, val_acc: 25.608860
+Epoch [839], train_loss: 0.061622, val_loss: 0.058455, val_acc: 25.762409
+Epoch [840], train_loss: 0.061688, val_loss: 0.058656, val_acc: 25.599724
+Epoch [841], train_loss: 0.061613, val_loss: 0.058656, val_acc: 25.598841
+Epoch [842], train_loss: 0.061813, val_loss: 0.058722, val_acc: 25.585020
+Epoch [843], train_loss: 0.061624, val_loss: 0.058646, val_acc: 25.587036
+Epoch [844], train_loss: 0.061420, val_loss: 0.058743, val_acc: 25.580452
+Epoch [845], train_loss: 0.061435, val_loss: 0.058599, val_acc: 25.633297
+Epoch [846], train_loss: 0.061560, val_loss: 0.058654, val_acc: 25.591681
+Epoch [847], train_loss: 0.061717, val_loss: 0.058642, val_acc: 25.607925
+Epoch [848], train_loss: 0.061749, val_loss: 0.058692, val_acc: 25.599039
+Epoch [849], train_loss: 0.061750, val_loss: 0.058722, val_acc: 25.565680
+Epoch [850], train_loss: 0.061502, val_loss: 0.058555, val_acc: 25.652248
+Epoch [851], train_loss: 0.061485, val_loss: 0.058701, val_acc: 25.587357
+Epoch [852], train_loss: 0.061563, val_loss: 0.058474, val_acc: 25.737980
+Epoch [853], train_loss: 0.061829, val_loss: 0.058581, val_acc: 25.681435
+Epoch [854], train_loss: 0.061677, val_loss: 0.058597, val_acc: 25.627123
+Epoch [855], train_loss: 0.061721, val_loss: 0.058611, val_acc: 25.628874
+Epoch [856], train_loss: 0.061722, val_loss: 0.058575, val_acc: 25.653349
+Epoch [857], train_loss: 0.061416, val_loss: 0.058768, val_acc: 25.528522
+Epoch [858], train_loss: 0.061617, val_loss: 0.058660, val_acc: 25.589930
+Epoch [859], train_loss: 0.061597, val_loss: 0.058648, val_acc: 25.636208
+Epoch [860], train_loss: 0.061423, val_loss: 0.058577, val_acc: 25.614775
+Epoch [861], train_loss: 0.061512, val_loss: 0.058610, val_acc: 25.656191
+Epoch [862], train_loss: 0.061722, val_loss: 0.058586, val_acc: 25.649906
+Epoch [863], train_loss: 0.061583, val_loss: 0.058526, val_acc: 25.690413
+Epoch [864], train_loss: 0.061660, val_loss: 0.058598, val_acc: 25.613802
+Epoch [865], train_loss: 0.061550, val_loss: 0.058585, val_acc: 25.643379
+Epoch [866], train_loss: 0.061274, val_loss: 0.058656, val_acc: 25.497818
+Epoch [867], train_loss: 0.061566, val_loss: 0.058739, val_acc: 25.563425
+Epoch [868], train_loss: 0.061597, val_loss: 0.058663, val_acc: 25.546412
+Epoch [869], train_loss: 0.061478, val_loss: 0.058681, val_acc: 25.617119
+Epoch [870], train_loss: 0.061568, val_loss: 0.058562, val_acc: 25.589775
+Epoch [871], train_loss: 0.061567, val_loss: 0.058676, val_acc: 25.597046
+Epoch [872], train_loss: 0.061502, val_loss: 0.058566, val_acc: 25.656490
+Epoch [873], train_loss: 0.061530, val_loss: 0.058569, val_acc: 25.640242
+Epoch [874], train_loss: 0.061435, val_loss: 0.058618, val_acc: 25.639528
+Epoch [875], train_loss: 0.061486, val_loss: 0.058614, val_acc: 25.691984
+Epoch [876], train_loss: 0.061604, val_loss: 0.058688, val_acc: 25.592285
+Epoch [877], train_loss: 0.061404, val_loss: 0.058525, val_acc: 25.604973
+Epoch [878], train_loss: 0.061511, val_loss: 0.058747, val_acc: 25.495888
+Epoch [879], train_loss: 0.061700, val_loss: 0.058532, val_acc: 25.625853
+Epoch [880], train_loss: 0.061639, val_loss: 0.058684, val_acc: 25.608004
+Epoch [881], train_loss: 0.061399, val_loss: 0.058645, val_acc: 25.593815
+Epoch [882], train_loss: 0.061694, val_loss: 0.058496, val_acc: 25.668877
+Epoch [883], train_loss: 0.061478, val_loss: 0.058533, val_acc: 25.660093
+Epoch [884], train_loss: 0.061507, val_loss: 0.058568, val_acc: 25.633387
+Epoch [885], train_loss: 0.061415, val_loss: 0.058469, val_acc: 25.685320
+Epoch [886], train_loss: 0.061659, val_loss: 0.058508, val_acc: 25.657009
+Epoch [887], train_loss: 0.061567, val_loss: 0.058527, val_acc: 25.700447
+Epoch [888], train_loss: 0.061579, val_loss: 0.058557, val_acc: 25.600832
+Epoch [889], train_loss: 0.061343, val_loss: 0.058574, val_acc: 25.647335
+Epoch [890], train_loss: 0.061412, val_loss: 0.058617, val_acc: 25.623854
+Epoch [891], train_loss: 0.061457, val_loss: 0.058508, val_acc: 25.695210
+Epoch [892], train_loss: 0.061546, val_loss: 0.058475, val_acc: 25.675428
+Epoch [893], train_loss: 0.061741, val_loss: 0.058650, val_acc: 25.611303
+Epoch [894], train_loss: 0.061500, val_loss: 0.058686, val_acc: 25.578131
+Epoch [895], train_loss: 0.061503, val_loss: 0.058630, val_acc: 25.652981
+Epoch [896], train_loss: 0.061644, val_loss: 0.058484, val_acc: 25.645687
+Epoch [897], train_loss: 0.061658, val_loss: 0.058599, val_acc: 25.607363
+Epoch [898], train_loss: 0.061517, val_loss: 0.058521, val_acc: 25.701555
+Epoch [899], train_loss: 0.061555, val_loss: 0.058448, val_acc: 25.735777
+Epoch [900], train_loss: 0.061408, val_loss: 0.058537, val_acc: 25.637438
+Epoch [901], train_loss: 0.061812, val_loss: 0.058479, val_acc: 25.637867
+Epoch [902], train_loss: 0.061582, val_loss: 0.058500, val_acc: 25.620502
+Epoch [903], train_loss: 0.061576, val_loss: 0.058508, val_acc: 25.668575
+Epoch [904], train_loss: 0.061647, val_loss: 0.058635, val_acc: 25.627771
+Epoch [905], train_loss: 0.061330, val_loss: 0.058492, val_acc: 25.644869
+Epoch [906], train_loss: 0.061414, val_loss: 0.058509, val_acc: 25.674759
+Epoch [907], train_loss: 0.061444, val_loss: 0.058522, val_acc: 25.648806
+Epoch [908], train_loss: 0.061228, val_loss: 0.058531, val_acc: 25.658575
+Epoch [909], train_loss: 0.061635, val_loss: 0.058604, val_acc: 25.671755
+Epoch [910], train_loss: 0.061485, val_loss: 0.058481, val_acc: 25.676401
+Epoch [911], train_loss: 0.061552, val_loss: 0.058533, val_acc: 25.669071
+Epoch [912], train_loss: 0.061451, val_loss: 0.058517, val_acc: 25.677168
+Epoch [913], train_loss: 0.061407, val_loss: 0.058563, val_acc: 25.591816
+Epoch [914], train_loss: 0.061400, val_loss: 0.058523, val_acc: 25.612026
+Epoch [915], train_loss: 0.061311, val_loss: 0.058444, val_acc: 25.722633
+Epoch [916], train_loss: 0.061336, val_loss: 0.058376, val_acc: 25.717262
+Epoch [917], train_loss: 0.061363, val_loss: 0.058468, val_acc: 25.695511
+Epoch [918], train_loss: 0.061399, val_loss: 0.058486, val_acc: 25.668758
+Epoch [919], train_loss: 0.061484, val_loss: 0.058474, val_acc: 25.684206
+Epoch [920], train_loss: 0.061575, val_loss: 0.058599, val_acc: 25.637941
+Epoch [921], train_loss: 0.061433, val_loss: 0.058454, val_acc: 25.734316
+Epoch [922], train_loss: 0.061688, val_loss: 0.058553, val_acc: 25.633295
+Epoch [923], train_loss: 0.061485, val_loss: 0.058435, val_acc: 25.744438
+Epoch [924], train_loss: 0.061367, val_loss: 0.058476, val_acc: 25.675404
+Epoch [925], train_loss: 0.061473, val_loss: 0.058582, val_acc: 25.662790
+Epoch [926], train_loss: 0.061547, val_loss: 0.058489, val_acc: 25.641411
+Epoch [927], train_loss: 0.061516, val_loss: 0.058591, val_acc: 25.614120
+Epoch [928], train_loss: 0.061491, val_loss: 0.058562, val_acc: 25.667429
+Epoch [929], train_loss: 0.061592, val_loss: 0.058512, val_acc: 25.658968
+Epoch [930], train_loss: 0.061361, val_loss: 0.058471, val_acc: 25.720037
+Epoch [931], train_loss: 0.061436, val_loss: 0.058468, val_acc: 25.705896
+Epoch [932], train_loss: 0.061334, val_loss: 0.058555, val_acc: 25.635929
+Epoch [933], train_loss: 0.061391, val_loss: 0.058509, val_acc: 25.620405
+Epoch [934], train_loss: 0.061582, val_loss: 0.058555, val_acc: 25.645348
+Epoch [935], train_loss: 0.061613, val_loss: 0.058315, val_acc: 25.738592
+Epoch [936], train_loss: 0.061466, val_loss: 0.058493, val_acc: 25.674086
+Epoch [937], train_loss: 0.061349, val_loss: 0.058462, val_acc: 25.678534
+Epoch [938], train_loss: 0.061386, val_loss: 0.058445, val_acc: 25.654728
+Epoch [939], train_loss: 0.061624, val_loss: 0.058658, val_acc: 25.583458
+Epoch [940], train_loss: 0.061481, val_loss: 0.058499, val_acc: 25.660280
+Epoch [941], train_loss: 0.061397, val_loss: 0.058565, val_acc: 25.617619
+Epoch [942], train_loss: 0.061533, val_loss: 0.058477, val_acc: 25.657877
+Epoch [943], train_loss: 0.061421, val_loss: 0.058550, val_acc: 25.676994
+Epoch [944], train_loss: 0.061443, val_loss: 0.058568, val_acc: 25.623682
+Epoch [945], train_loss: 0.061372, val_loss: 0.058509, val_acc: 25.621180
+Epoch [946], train_loss: 0.061521, val_loss: 0.058480, val_acc: 25.668404
+Epoch [947], train_loss: 0.061446, val_loss: 0.058479, val_acc: 25.641649
+Epoch [948], train_loss: 0.061460, val_loss: 0.058499, val_acc: 25.638119
+Epoch [949], train_loss: 0.061435, val_loss: 0.058410, val_acc: 25.681301
+Epoch [950], train_loss: 0.061588, val_loss: 0.058644, val_acc: 25.610897
+Epoch [951], train_loss: 0.061320, val_loss: 0.058529, val_acc: 25.608238
+Epoch [952], train_loss: 0.061501, val_loss: 0.058518, val_acc: 25.654627
+Epoch [953], train_loss: 0.061399, val_loss: 0.058473, val_acc: 25.618618
+Epoch [954], train_loss: 0.061413, val_loss: 0.058603, val_acc: 25.636002
+Epoch [955], train_loss: 0.061446, val_loss: 0.058589, val_acc: 25.591572
+Epoch [956], train_loss: 0.061339, val_loss: 0.058511, val_acc: 25.652338
+Epoch [957], train_loss: 0.061416, val_loss: 0.058539, val_acc: 25.650576
+Epoch [958], train_loss: 0.061469, val_loss: 0.058427, val_acc: 25.739481
+Epoch [959], train_loss: 0.061270, val_loss: 0.058345, val_acc: 25.716915
+Epoch [960], train_loss: 0.061470, val_loss: 0.058552, val_acc: 25.622911
+Epoch [961], train_loss: 0.061361, val_loss: 0.058479, val_acc: 25.656412
+Epoch [962], train_loss: 0.061466, val_loss: 0.058581, val_acc: 25.630905
+Epoch [963], train_loss: 0.061513, val_loss: 0.058467, val_acc: 25.680605
+Epoch [964], train_loss: 0.061448, val_loss: 0.058489, val_acc: 25.613697
+Epoch [965], train_loss: 0.061386, val_loss: 0.058544, val_acc: 25.616621
+Epoch [966], train_loss: 0.061344, val_loss: 0.058402, val_acc: 25.662010
+Epoch [967], train_loss: 0.061363, val_loss: 0.058421, val_acc: 25.714445
+Epoch [968], train_loss: 0.061437, val_loss: 0.058644, val_acc: 25.615223
+Epoch [969], train_loss: 0.061320, val_loss: 0.058481, val_acc: 25.692307
+Epoch [970], train_loss: 0.061383, val_loss: 0.058537, val_acc: 25.585045
+Epoch [971], train_loss: 0.061281, val_loss: 0.058403, val_acc: 25.676929
+Epoch [972], train_loss: 0.061644, val_loss: 0.058343, val_acc: 25.731583
+Epoch [973], train_loss: 0.061383, val_loss: 0.058465, val_acc: 25.684586
+Epoch [974], train_loss: 0.061594, val_loss: 0.058511, val_acc: 25.649899
+Epoch [975], train_loss: 0.061456, val_loss: 0.058285, val_acc: 25.770969
+Epoch [976], train_loss: 0.061382, val_loss: 0.058524, val_acc: 25.662159
+Epoch [977], train_loss: 0.061432, val_loss: 0.058401, val_acc: 25.668941
+Epoch [978], train_loss: 0.061409, val_loss: 0.058431, val_acc: 25.614273
+Epoch [979], train_loss: 0.061294, val_loss: 0.058629, val_acc: 25.665472
+Epoch [980], train_loss: 0.061437, val_loss: 0.058491, val_acc: 25.642326
+Epoch [981], train_loss: 0.061332, val_loss: 0.058325, val_acc: 25.780003
+Epoch [982], train_loss: 0.061396, val_loss: 0.058360, val_acc: 25.747776
+Epoch [983], train_loss: 0.061431, val_loss: 0.058498, val_acc: 25.633457
+Epoch [984], train_loss: 0.061494, val_loss: 0.058496, val_acc: 25.623795
+Epoch [985], train_loss: 0.061238, val_loss: 0.058450, val_acc: 25.663118
+Epoch [986], train_loss: 0.061459, val_loss: 0.058538, val_acc: 25.632891
+Epoch [987], train_loss: 0.061479, val_loss: 0.058564, val_acc: 25.586542
+Epoch [988], train_loss: 0.061590, val_loss: 0.058527, val_acc: 25.627998
+Epoch [989], train_loss: 0.061477, val_loss: 0.058427, val_acc: 25.647627
+Epoch [990], train_loss: 0.061275, val_loss: 0.058388, val_acc: 25.697241
+Epoch [991], train_loss: 0.061376, val_loss: 0.058333, val_acc: 25.732111
+Epoch [992], train_loss: 0.061623, val_loss: 0.058556, val_acc: 25.594183
+Epoch [993], train_loss: 0.061392, val_loss: 0.058595, val_acc: 25.633558
+Epoch [994], train_loss: 0.061539, val_loss: 0.058489, val_acc: 25.629187
+Epoch [995], train_loss: 0.061474, val_loss: 0.058488, val_acc: 25.645372
+Epoch [996], train_loss: 0.061243, val_loss: 0.058515, val_acc: 25.647219
+Epoch [997], train_loss: 0.061364, val_loss: 0.058489, val_acc: 25.663162
+Epoch [998], train_loss: 0.061525, val_loss: 0.058412, val_acc: 25.673496
+Epoch [999], train_loss: 0.061490, val_loss: 0.058489, val_acc: 25.631830
+Epoch [1000], train_loss: 0.061246, val_loss: 0.058476, val_acc: 25.624235
+Epoch [1001], train_loss: 0.061242, val_loss: 0.058510, val_acc: 25.669981
+Epoch [1002], train_loss: 0.061412, val_loss: 0.058483, val_acc: 25.643303
+Epoch [1003], train_loss: 0.061438, val_loss: 0.058510, val_acc: 25.692793
+Epoch [1004], train_loss: 0.061250, val_loss: 0.058421, val_acc: 25.652990
+Epoch [1005], train_loss: 0.061211, val_loss: 0.058370, val_acc: 25.718672
+Epoch [1006], train_loss: 0.061154, val_loss: 0.058645, val_acc: 25.554737
+Epoch [1007], train_loss: 0.061398, val_loss: 0.058383, val_acc: 25.684904
+Epoch [1008], train_loss: 0.061146, val_loss: 0.058366, val_acc: 25.649836
+Epoch [1009], train_loss: 0.061164, val_loss: 0.058249, val_acc: 25.796473
+Epoch [1010], train_loss: 0.061563, val_loss: 0.058319, val_acc: 25.652760
+Epoch [1011], train_loss: 0.061241, val_loss: 0.058529, val_acc: 25.619280
+Epoch [1012], train_loss: 0.061200, val_loss: 0.058396, val_acc: 25.661650
+Epoch [1013], train_loss: 0.061279, val_loss: 0.058418, val_acc: 25.697445
+Epoch [1014], train_loss: 0.061378, val_loss: 0.058414, val_acc: 25.702459
+Epoch [1015], train_loss: 0.061310, val_loss: 0.058633, val_acc: 25.553221
+Epoch [1016], train_loss: 0.061342, val_loss: 0.058421, val_acc: 25.600939
+Epoch [1017], train_loss: 0.061351, val_loss: 0.058507, val_acc: 25.592239
+Epoch [1018], train_loss: 0.061344, val_loss: 0.058387, val_acc: 25.655516
+Epoch [1019], train_loss: 0.061384, val_loss: 0.058404, val_acc: 25.675512
+Epoch [1020], train_loss: 0.061421, val_loss: 0.058497, val_acc: 25.611216
+Epoch [1021], train_loss: 0.061433, val_loss: 0.058340, val_acc: 25.688536
+Epoch [1022], train_loss: 0.061218, val_loss: 0.058368, val_acc: 25.671505
+Epoch [1023], train_loss: 0.061358, val_loss: 0.058445, val_acc: 25.652775
+Epoch [1024], train_loss: 0.061401, val_loss: 0.058269, val_acc: 25.775450
+Epoch [1025], train_loss: 0.061340, val_loss: 0.058284, val_acc: 25.790354
+Epoch [1026], train_loss: 0.061269, val_loss: 0.058379, val_acc: 25.699976
+Epoch [1027], train_loss: 0.061395, val_loss: 0.058363, val_acc: 25.638851
+Epoch [1028], train_loss: 0.061127, val_loss: 0.058385, val_acc: 25.683901
+Epoch [1029], train_loss: 0.061177, val_loss: 0.058438, val_acc: 25.641209
+Epoch [1030], train_loss: 0.061313, val_loss: 0.058454, val_acc: 25.656664
+Epoch [1031], train_loss: 0.061343, val_loss: 0.058448, val_acc: 25.669594
+Epoch [1032], train_loss: 0.061389, val_loss: 0.058400, val_acc: 25.615370
+Epoch [1033], train_loss: 0.061373, val_loss: 0.058430, val_acc: 25.667458
+Epoch [1034], train_loss: 0.061260, val_loss: 0.058413, val_acc: 25.660070
+Epoch [1035], train_loss: 0.061338, val_loss: 0.058448, val_acc: 25.661465
+Epoch [1036], train_loss: 0.061454, val_loss: 0.058458, val_acc: 25.643011
+Epoch [1037], train_loss: 0.061452, val_loss: 0.058464, val_acc: 25.637659
+Epoch [1038], train_loss: 0.061454, val_loss: 0.058379, val_acc: 25.647387
+Epoch [1039], train_loss: 0.061164, val_loss: 0.058427, val_acc: 25.645983
+Epoch [1040], train_loss: 0.061344, val_loss: 0.058321, val_acc: 25.745682
+Epoch [1041], train_loss: 0.061319, val_loss: 0.058473, val_acc: 25.651535
+Epoch [1042], train_loss: 0.061449, val_loss: 0.058451, val_acc: 25.671738
+Epoch [1043], train_loss: 0.061276, val_loss: 0.058335, val_acc: 25.721888
+Epoch [1044], train_loss: 0.061415, val_loss: 0.058349, val_acc: 25.709549
+Epoch [1045], train_loss: 0.061197, val_loss: 0.058361, val_acc: 25.697874
+Epoch [1046], train_loss: 0.061293, val_loss: 0.058316, val_acc: 25.692051
+Epoch [1047], train_loss: 0.061203, val_loss: 0.058390, val_acc: 25.698887
+Epoch [1048], train_loss: 0.061493, val_loss: 0.058398, val_acc: 25.671888
+Epoch [1049], train_loss: 0.061115, val_loss: 0.058473, val_acc: 25.624762
+Epoch [1050], train_loss: 0.061306, val_loss: 0.058469, val_acc: 25.617189
+Epoch [1051], train_loss: 0.061288, val_loss: 0.058395, val_acc: 25.676275
+Epoch [1052], train_loss: 0.061210, val_loss: 0.058473, val_acc: 25.630486
+Epoch [1053], train_loss: 0.061294, val_loss: 0.058390, val_acc: 25.688807
+Epoch [1054], train_loss: 0.061381, val_loss: 0.058366, val_acc: 25.662298
+Epoch [1055], train_loss: 0.061220, val_loss: 0.058471, val_acc: 25.642179
+Epoch [1056], train_loss: 0.061364, val_loss: 0.058419, val_acc: 25.644035
+Epoch [1057], train_loss: 0.061226, val_loss: 0.058479, val_acc: 25.557787
+Epoch [1058], train_loss: 0.061175, val_loss: 0.058401, val_acc: 25.637056
+Epoch [1059], train_loss: 0.061162, val_loss: 0.058281, val_acc: 25.715691
+Epoch [1060], train_loss: 0.061291, val_loss: 0.058436, val_acc: 25.685911
+Epoch [1061], train_loss: 0.061421, val_loss: 0.058357, val_acc: 25.652758
+Epoch [1062], train_loss: 0.061185, val_loss: 0.058389, val_acc: 25.641243
+Epoch [1063], train_loss: 0.061371, val_loss: 0.058478, val_acc: 25.661840
+Epoch [1064], train_loss: 0.061263, val_loss: 0.058367, val_acc: 25.659189
+Epoch [1065], train_loss: 0.061319, val_loss: 0.058455, val_acc: 25.636879
+Epoch [1066], train_loss: 0.061409, val_loss: 0.058356, val_acc: 25.646614
+Epoch [1067], train_loss: 0.061249, val_loss: 0.058407, val_acc: 25.696495
+Epoch [1068], train_loss: 0.061298, val_loss: 0.058359, val_acc: 25.720182
+Epoch [1069], train_loss: 0.061308, val_loss: 0.058415, val_acc: 25.720291
+Epoch [1070], train_loss: 0.061484, val_loss: 0.058345, val_acc: 25.733530
+Epoch [1071], train_loss: 0.061333, val_loss: 0.058415, val_acc: 25.638548
+Epoch [1072], train_loss: 0.061291, val_loss: 0.058329, val_acc: 25.711685
+Epoch [1073], train_loss: 0.061145, val_loss: 0.058349, val_acc: 25.681694
+Epoch [1074], train_loss: 0.061388, val_loss: 0.058457, val_acc: 25.654650
+Epoch [1075], train_loss: 0.061257, val_loss: 0.058266, val_acc: 25.769346
+Epoch [1076], train_loss: 0.061281, val_loss: 0.058314, val_acc: 25.726557
+Epoch [1077], train_loss: 0.061244, val_loss: 0.058285, val_acc: 25.739964
+Epoch [1078], train_loss: 0.061245, val_loss: 0.058418, val_acc: 25.604176
+Epoch [1079], train_loss: 0.061140, val_loss: 0.058388, val_acc: 25.691776
+Epoch [1080], train_loss: 0.061372, val_loss: 0.058299, val_acc: 25.732702
+Epoch [1081], train_loss: 0.061412, val_loss: 0.058411, val_acc: 25.596655
+Epoch [1082], train_loss: 0.061306, val_loss: 0.058329, val_acc: 25.693836
+Epoch [1083], train_loss: 0.061122, val_loss: 0.058503, val_acc: 25.596033
+Epoch [1084], train_loss: 0.061192, val_loss: 0.058428, val_acc: 25.659176
+Epoch [1085], train_loss: 0.061193, val_loss: 0.058335, val_acc: 25.687120
+Epoch [1086], train_loss: 0.061249, val_loss: 0.058446, val_acc: 25.671959
+Epoch [1087], train_loss: 0.061335, val_loss: 0.058305, val_acc: 25.740973
+Epoch [1088], train_loss: 0.061091, val_loss: 0.058375, val_acc: 25.658241
+Epoch [1089], train_loss: 0.061297, val_loss: 0.058403, val_acc: 25.647051
+Epoch [1090], train_loss: 0.061178, val_loss: 0.058324, val_acc: 25.713118
+Epoch [1091], train_loss: 0.061150, val_loss: 0.058233, val_acc: 25.730547
+Epoch [1092], train_loss: 0.061246, val_loss: 0.058348, val_acc: 25.699526
+Epoch [1093], train_loss: 0.061139, val_loss: 0.058473, val_acc: 25.636419
+Epoch [1094], train_loss: 0.061151, val_loss: 0.058386, val_acc: 25.679890
+Epoch [1095], train_loss: 0.061119, val_loss: 0.058344, val_acc: 25.653336
+Epoch [1096], train_loss: 0.061073, val_loss: 0.058231, val_acc: 25.764105
+Epoch [1097], train_loss: 0.061465, val_loss: 0.058438, val_acc: 25.682314
+Epoch [1098], train_loss: 0.061313, val_loss: 0.058339, val_acc: 25.749445
+Epoch [1099], train_loss: 0.061218, val_loss: 0.058313, val_acc: 25.708366
+Epoch [1100], train_loss: 0.061230, val_loss: 0.058332, val_acc: 25.668440
+Epoch [1101], train_loss: 0.061006, val_loss: 0.058360, val_acc: 25.686411
+Epoch [1102], train_loss: 0.061077, val_loss: 0.058385, val_acc: 25.643530
+Epoch [1103], train_loss: 0.061270, val_loss: 0.058417, val_acc: 25.676050
+Epoch [1104], train_loss: 0.061245, val_loss: 0.058293, val_acc: 25.696751
+Epoch [1105], train_loss: 0.061191, val_loss: 0.058334, val_acc: 25.697172
+Epoch [1106], train_loss: 0.061354, val_loss: 0.058410, val_acc: 25.637966
+Epoch [1107], train_loss: 0.061224, val_loss: 0.058377, val_acc: 25.672762
+Epoch [1108], train_loss: 0.061338, val_loss: 0.058321, val_acc: 25.736061
+Epoch [1109], train_loss: 0.061271, val_loss: 0.058299, val_acc: 25.740879
+Epoch [1110], train_loss: 0.061114, val_loss: 0.058383, val_acc: 25.607019
+Epoch [1111], train_loss: 0.061160, val_loss: 0.058422, val_acc: 25.665773
+Epoch [1112], train_loss: 0.061419, val_loss: 0.058407, val_acc: 25.695646
+Epoch [1113], train_loss: 0.061325, val_loss: 0.058386, val_acc: 25.715818
+Epoch [1114], train_loss: 0.061094, val_loss: 0.058292, val_acc: 25.722422
+Epoch [1115], train_loss: 0.061352, val_loss: 0.058270, val_acc: 25.735804
+Epoch [1116], train_loss: 0.061282, val_loss: 0.058349, val_acc: 25.729868
+Epoch [1117], train_loss: 0.061065, val_loss: 0.058358, val_acc: 25.727900
+Epoch [1118], train_loss: 0.061148, val_loss: 0.058321, val_acc: 25.701923
+Epoch [1119], train_loss: 0.061523, val_loss: 0.058370, val_acc: 25.675877
+Epoch [1120], train_loss: 0.061280, val_loss: 0.058306, val_acc: 25.703867
+Epoch [1121], train_loss: 0.061046, val_loss: 0.058310, val_acc: 25.700417
+Epoch [1122], train_loss: 0.061422, val_loss: 0.058432, val_acc: 25.645603
+Epoch [1123], train_loss: 0.061219, val_loss: 0.058377, val_acc: 25.650206
+Epoch [1124], train_loss: 0.061228, val_loss: 0.058291, val_acc: 25.722038
+Epoch [1125], train_loss: 0.061098, val_loss: 0.058341, val_acc: 25.707878
+Epoch [1126], train_loss: 0.061253, val_loss: 0.058357, val_acc: 25.718319
+Epoch [1127], train_loss: 0.061144, val_loss: 0.058384, val_acc: 25.688540
+Epoch [1128], train_loss: 0.061113, val_loss: 0.058432, val_acc: 25.622316
+Epoch [1129], train_loss: 0.061238, val_loss: 0.058314, val_acc: 25.665716
+Epoch [1130], train_loss: 0.061201, val_loss: 0.058341, val_acc: 25.703850
+Epoch [1131], train_loss: 0.061141, val_loss: 0.058449, val_acc: 25.627693
+Epoch [1132], train_loss: 0.061163, val_loss: 0.058387, val_acc: 25.636126
+Epoch [1133], train_loss: 0.061276, val_loss: 0.058285, val_acc: 25.692507
+Epoch [1134], train_loss: 0.061322, val_loss: 0.058333, val_acc: 25.723639
+Epoch [1135], train_loss: 0.061285, val_loss: 0.058371, val_acc: 25.615147
+Epoch [1136], train_loss: 0.061214, val_loss: 0.058322, val_acc: 25.684065
+Epoch [1137], train_loss: 0.061289, val_loss: 0.058438, val_acc: 25.611677
+Epoch [1138], train_loss: 0.061141, val_loss: 0.058308, val_acc: 25.695421
+Epoch [1139], train_loss: 0.061316, val_loss: 0.058328, val_acc: 25.759356
+Epoch [1140], train_loss: 0.061242, val_loss: 0.058337, val_acc: 25.650692
+Epoch [1141], train_loss: 0.061295, val_loss: 0.058282, val_acc: 25.722376
+Epoch [1142], train_loss: 0.061337, val_loss: 0.058455, val_acc: 25.547401
+Epoch [1143], train_loss: 0.061315, val_loss: 0.058387, val_acc: 25.703798
+Epoch [1144], train_loss: 0.061287, val_loss: 0.058360, val_acc: 25.628986
+Epoch [1145], train_loss: 0.061063, val_loss: 0.058409, val_acc: 25.651899
+Epoch [1146], train_loss: 0.061373, val_loss: 0.058260, val_acc: 25.696568
+Epoch [1147], train_loss: 0.061249, val_loss: 0.058389, val_acc: 25.636816
+Epoch [1148], train_loss: 0.061094, val_loss: 0.058363, val_acc: 25.687885
+Epoch [1149], train_loss: 0.061369, val_loss: 0.058217, val_acc: 25.774912
+Epoch [1150], train_loss: 0.061152, val_loss: 0.058303, val_acc: 25.710609
+Epoch [1151], train_loss: 0.061266, val_loss: 0.058271, val_acc: 25.696400
+Epoch [1152], train_loss: 0.061086, val_loss: 0.058440, val_acc: 25.618477
+Epoch [1153], train_loss: 0.061061, val_loss: 0.058302, val_acc: 25.694370
+Epoch [1154], train_loss: 0.061157, val_loss: 0.058333, val_acc: 25.659168
+Epoch [1155], train_loss: 0.061142, val_loss: 0.058228, val_acc: 25.706699
+Epoch [1156], train_loss: 0.061365, val_loss: 0.058348, val_acc: 25.689051
+Epoch [1157], train_loss: 0.061225, val_loss: 0.058346, val_acc: 25.643438
+Epoch [1158], train_loss: 0.061107, val_loss: 0.058380, val_acc: 25.686764
+Epoch [1159], train_loss: 0.061183, val_loss: 0.058404, val_acc: 25.642580
+Epoch [1160], train_loss: 0.061279, val_loss: 0.058284, val_acc: 25.740608
+Epoch [1161], train_loss: 0.061307, val_loss: 0.058327, val_acc: 25.684788
+Epoch [1162], train_loss: 0.061204, val_loss: 0.058297, val_acc: 25.684074
+Epoch [1163], train_loss: 0.061237, val_loss: 0.058374, val_acc: 25.686998
+Epoch [1164], train_loss: 0.061195, val_loss: 0.058312, val_acc: 25.696846
+Epoch [1165], train_loss: 0.061191, val_loss: 0.058403, val_acc: 25.634773
+Epoch [1166], train_loss: 0.061118, val_loss: 0.058292, val_acc: 25.700262
+Epoch [1167], train_loss: 0.061248, val_loss: 0.058302, val_acc: 25.731586
+Epoch [1168], train_loss: 0.061291, val_loss: 0.058477, val_acc: 25.606037
+Epoch [1169], train_loss: 0.061220, val_loss: 0.058404, val_acc: 25.718193
+Epoch [1170], train_loss: 0.061139, val_loss: 0.058261, val_acc: 25.722324
+Epoch [1171], train_loss: 0.061260, val_loss: 0.058220, val_acc: 25.736193
+Epoch [1172], train_loss: 0.061185, val_loss: 0.058258, val_acc: 25.699530
+Epoch [1173], train_loss: 0.061231, val_loss: 0.058188, val_acc: 25.814369
+Epoch [1174], train_loss: 0.061269, val_loss: 0.058282, val_acc: 25.744102
+Epoch [1175], train_loss: 0.061162, val_loss: 0.058203, val_acc: 25.679895
+Epoch [1176], train_loss: 0.061147, val_loss: 0.058266, val_acc: 25.681278
+Epoch [1177], train_loss: 0.061225, val_loss: 0.058311, val_acc: 25.726143
+Epoch [1178], train_loss: 0.061043, val_loss: 0.058184, val_acc: 25.759907
+Epoch [1179], train_loss: 0.061115, val_loss: 0.058192, val_acc: 25.749510
+Epoch [1180], train_loss: 0.061323, val_loss: 0.058354, val_acc: 25.691914
+Epoch [1181], train_loss: 0.061143, val_loss: 0.058318, val_acc: 25.669346
+Epoch [1182], train_loss: 0.061019, val_loss: 0.058300, val_acc: 25.661732
+Epoch [1183], train_loss: 0.061273, val_loss: 0.058134, val_acc: 25.828117
+Epoch [1184], train_loss: 0.061284, val_loss: 0.058354, val_acc: 25.699558
+Epoch [1185], train_loss: 0.061222, val_loss: 0.058235, val_acc: 25.773544
+Epoch [1186], train_loss: 0.061156, val_loss: 0.058316, val_acc: 25.665218
+Epoch [1187], train_loss: 0.061100, val_loss: 0.058303, val_acc: 25.688156
+Epoch [1188], train_loss: 0.061087, val_loss: 0.058296, val_acc: 25.721357
+Epoch [1189], train_loss: 0.061078, val_loss: 0.058304, val_acc: 25.704708
+Epoch [1190], train_loss: 0.061155, val_loss: 0.058340, val_acc: 25.656538
+Epoch [1191], train_loss: 0.061014, val_loss: 0.058311, val_acc: 25.710537
+Epoch [1192], train_loss: 0.061210, val_loss: 0.058365, val_acc: 25.663126
+Epoch [1193], train_loss: 0.061107, val_loss: 0.058292, val_acc: 25.727076
+Epoch [1194], train_loss: 0.061013, val_loss: 0.058315, val_acc: 25.618549
+Epoch [1195], train_loss: 0.061312, val_loss: 0.058321, val_acc: 25.723352
+Epoch [1196], train_loss: 0.061041, val_loss: 0.058168, val_acc: 25.787611
+Epoch [1197], train_loss: 0.061032, val_loss: 0.058288, val_acc: 25.711206
+Epoch [1198], train_loss: 0.061016, val_loss: 0.058321, val_acc: 25.670374
+Epoch [1199], train_loss: 0.060988, val_loss: 0.058209, val_acc: 25.743179
+Epoch [1200], train_loss: 0.061273, val_loss: 0.058218, val_acc: 25.756411
+Epoch [1201], train_loss: 0.061095, val_loss: 0.058220, val_acc: 25.692453
+Epoch [1202], train_loss: 0.061159, val_loss: 0.058277, val_acc: 25.757532
+Epoch [1203], train_loss: 0.061056, val_loss: 0.058219, val_acc: 25.724449
+Epoch [1204], train_loss: 0.061082, val_loss: 0.058266, val_acc: 25.668028
+Epoch [1205], train_loss: 0.060894, val_loss: 0.058150, val_acc: 25.686394
+Epoch [1206], train_loss: 0.060966, val_loss: 0.058252, val_acc: 25.713581
+Epoch [1207], train_loss: 0.061183, val_loss: 0.058210, val_acc: 25.693720
+Epoch [1208], train_loss: 0.061180, val_loss: 0.058224, val_acc: 25.733915
+Epoch [1209], train_loss: 0.061210, val_loss: 0.058328, val_acc: 25.693022
+Epoch [1210], train_loss: 0.061182, val_loss: 0.058300, val_acc: 25.681818
+Epoch [1211], train_loss: 0.061259, val_loss: 0.058185, val_acc: 25.736282
+Epoch [1212], train_loss: 0.061301, val_loss: 0.058299, val_acc: 25.665684
+Epoch [1213], train_loss: 0.061155, val_loss: 0.058282, val_acc: 25.688852
+Epoch [1214], train_loss: 0.061153, val_loss: 0.058448, val_acc: 25.627085
+Epoch [1215], train_loss: 0.061057, val_loss: 0.058272, val_acc: 25.708271
+Epoch [1216], train_loss: 0.061189, val_loss: 0.058261, val_acc: 25.723225
+Epoch [1217], train_loss: 0.061124, val_loss: 0.058327, val_acc: 25.638762
+Epoch [1218], train_loss: 0.061181, val_loss: 0.058273, val_acc: 25.718184
+Epoch [1219], train_loss: 0.061165, val_loss: 0.058292, val_acc: 25.740854
+Epoch [1220], train_loss: 0.061227, val_loss: 0.058167, val_acc: 25.695698
+Epoch [1221], train_loss: 0.061108, val_loss: 0.058257, val_acc: 25.766157
+Epoch [1222], train_loss: 0.061035, val_loss: 0.058333, val_acc: 25.707441
+Epoch [1223], train_loss: 0.061248, val_loss: 0.058332, val_acc: 25.584282
+Epoch [1224], train_loss: 0.061138, val_loss: 0.058292, val_acc: 25.690540
+Epoch [1225], train_loss: 0.061165, val_loss: 0.058273, val_acc: 25.687445
+Epoch [1226], train_loss: 0.061181, val_loss: 0.058299, val_acc: 25.665211
+Epoch [1227], train_loss: 0.060947, val_loss: 0.058359, val_acc: 25.657347
+Epoch [1228], train_loss: 0.061099, val_loss: 0.058133, val_acc: 25.795774
+Epoch [1229], train_loss: 0.061052, val_loss: 0.058305, val_acc: 25.678543
+Epoch [1230], train_loss: 0.061226, val_loss: 0.058193, val_acc: 25.783754
+Epoch [1231], train_loss: 0.061313, val_loss: 0.058341, val_acc: 25.689804
+Epoch [1232], train_loss: 0.061179, val_loss: 0.058268, val_acc: 25.688173
+Epoch [1233], train_loss: 0.061149, val_loss: 0.058214, val_acc: 25.745920
+Epoch [1234], train_loss: 0.061166, val_loss: 0.058243, val_acc: 25.692999
+Epoch [1235], train_loss: 0.061113, val_loss: 0.058210, val_acc: 25.728823
+Epoch [1236], train_loss: 0.061150, val_loss: 0.058166, val_acc: 25.802332
+Epoch [1237], train_loss: 0.061228, val_loss: 0.058189, val_acc: 25.759283
+Epoch [1238], train_loss: 0.061193, val_loss: 0.058346, val_acc: 25.717573
+Epoch [1239], train_loss: 0.061122, val_loss: 0.058221, val_acc: 25.783585
+Epoch [1240], train_loss: 0.061142, val_loss: 0.058229, val_acc: 25.723822
+Epoch [1241], train_loss: 0.060973, val_loss: 0.058244, val_acc: 25.737465
+Epoch [1242], train_loss: 0.061118, val_loss: 0.058311, val_acc: 25.659399
+Epoch [1243], train_loss: 0.061114, val_loss: 0.058080, val_acc: 25.814320
+Epoch [1244], train_loss: 0.061083, val_loss: 0.058313, val_acc: 25.697727
+Epoch [1245], train_loss: 0.061284, val_loss: 0.058329, val_acc: 25.622442
+Epoch [1246], train_loss: 0.061202, val_loss: 0.058161, val_acc: 25.782936
+Epoch [1247], train_loss: 0.061165, val_loss: 0.058271, val_acc: 25.693933
+Epoch [1248], train_loss: 0.061053, val_loss: 0.058289, val_acc: 25.727455
+Epoch [1249], train_loss: 0.061174, val_loss: 0.058204, val_acc: 25.729273
+Epoch [1250], train_loss: 0.061192, val_loss: 0.058214, val_acc: 25.726812
+Epoch [1251], train_loss: 0.060944, val_loss: 0.058126, val_acc: 25.812357
+Epoch [1252], train_loss: 0.061284, val_loss: 0.058306, val_acc: 25.678801
+Epoch [1253], train_loss: 0.061006, val_loss: 0.058330, val_acc: 25.639565
+Epoch [1254], train_loss: 0.061051, val_loss: 0.058208, val_acc: 25.737461
+Epoch [1255], train_loss: 0.060911, val_loss: 0.058222, val_acc: 25.736750
+Epoch [1256], train_loss: 0.061140, val_loss: 0.058196, val_acc: 25.757044
+Epoch [1257], train_loss: 0.061368, val_loss: 0.058404, val_acc: 25.631901
+Epoch [1258], train_loss: 0.060945, val_loss: 0.058296, val_acc: 25.712074
+Epoch [1259], train_loss: 0.060868, val_loss: 0.058158, val_acc: 25.758364
+Epoch [1260], train_loss: 0.061188, val_loss: 0.058264, val_acc: 25.696543
+Epoch [1261], train_loss: 0.061201, val_loss: 0.058204, val_acc: 25.740486
+Epoch [1262], train_loss: 0.061044, val_loss: 0.058343, val_acc: 25.639257
+Epoch [1263], train_loss: 0.061040, val_loss: 0.058196, val_acc: 25.700802
+Epoch [1264], train_loss: 0.060964, val_loss: 0.058225, val_acc: 25.715126
+Epoch [1265], train_loss: 0.061001, val_loss: 0.058201, val_acc: 25.745527
+Epoch [1266], train_loss: 0.061148, val_loss: 0.058386, val_acc: 25.643093
+Epoch [1267], train_loss: 0.061169, val_loss: 0.058271, val_acc: 25.684450
+Epoch [1268], train_loss: 0.061182, val_loss: 0.058139, val_acc: 25.805130
+Epoch [1269], train_loss: 0.060911, val_loss: 0.058208, val_acc: 25.670315
+Epoch [1270], train_loss: 0.061164, val_loss: 0.058257, val_acc: 25.719530
+Epoch [1271], train_loss: 0.061209, val_loss: 0.058264, val_acc: 25.734413
+Epoch [1272], train_loss: 0.061107, val_loss: 0.058250, val_acc: 25.689039
+Epoch [1273], train_loss: 0.061075, val_loss: 0.058251, val_acc: 25.706263
+Epoch [1274], train_loss: 0.061135, val_loss: 0.058230, val_acc: 25.711069
+Epoch [1275], train_loss: 0.061244, val_loss: 0.058313, val_acc: 25.625895
+Epoch [1276], train_loss: 0.060943, val_loss: 0.058109, val_acc: 25.805731
+Epoch [1277], train_loss: 0.061044, val_loss: 0.058167, val_acc: 25.762907
+Epoch [1278], train_loss: 0.061213, val_loss: 0.058111, val_acc: 25.811319
+Epoch [1279], train_loss: 0.061159, val_loss: 0.058313, val_acc: 25.685255
+Epoch [1280], train_loss: 0.061116, val_loss: 0.058236, val_acc: 25.695457
+Epoch [1281], train_loss: 0.061106, val_loss: 0.058177, val_acc: 25.732447
+Epoch [1282], train_loss: 0.060992, val_loss: 0.058114, val_acc: 25.708488
+Epoch [1283], train_loss: 0.061059, val_loss: 0.058259, val_acc: 25.745676
+Epoch [1284], train_loss: 0.061075, val_loss: 0.058269, val_acc: 25.662233
+Epoch [1285], train_loss: 0.061040, val_loss: 0.058238, val_acc: 25.642220
+Epoch [1286], train_loss: 0.060972, val_loss: 0.058439, val_acc: 25.653542
+Epoch [1287], train_loss: 0.060948, val_loss: 0.058157, val_acc: 25.744207
+Epoch [1288], train_loss: 0.060937, val_loss: 0.058300, val_acc: 25.670870
+Epoch [1289], train_loss: 0.061290, val_loss: 0.058301, val_acc: 25.720665
+Epoch [1290], train_loss: 0.060950, val_loss: 0.058299, val_acc: 25.605465
+Epoch [1291], train_loss: 0.061298, val_loss: 0.058254, val_acc: 25.702620
+Epoch [1292], train_loss: 0.061183, val_loss: 0.058318, val_acc: 25.666843
+Epoch [1293], train_loss: 0.061022, val_loss: 0.058290, val_acc: 25.698448
+Epoch [1294], train_loss: 0.061081, val_loss: 0.058208, val_acc: 25.695868
+Epoch [1295], train_loss: 0.061141, val_loss: 0.058119, val_acc: 25.751583
+Epoch [1296], train_loss: 0.061104, val_loss: 0.058144, val_acc: 25.793089
+Epoch [1297], train_loss: 0.060825, val_loss: 0.058244, val_acc: 25.642780
+Epoch [1298], train_loss: 0.061067, val_loss: 0.058235, val_acc: 25.682814
+Epoch [1299], train_loss: 0.060959, val_loss: 0.058158, val_acc: 25.758858
+Epoch [1300], train_loss: 0.061191, val_loss: 0.058281, val_acc: 25.605047
+Epoch [1301], train_loss: 0.061017, val_loss: 0.058171, val_acc: 25.722155
+Epoch [1302], train_loss: 0.060944, val_loss: 0.058180, val_acc: 25.765095
+Epoch [1303], train_loss: 0.061028, val_loss: 0.058156, val_acc: 25.711237
+Epoch [1304], train_loss: 0.061010, val_loss: 0.058163, val_acc: 25.793398
+Epoch [1305], train_loss: 0.061119, val_loss: 0.058199, val_acc: 25.645166
+Epoch [1306], train_loss: 0.061012, val_loss: 0.058199, val_acc: 25.763178
+Epoch [1307], train_loss: 0.061036, val_loss: 0.058046, val_acc: 25.841141
+Epoch [1308], train_loss: 0.061281, val_loss: 0.058184, val_acc: 25.725325
+Epoch [1309], train_loss: 0.061065, val_loss: 0.058207, val_acc: 25.683008
+Epoch [1310], train_loss: 0.061012, val_loss: 0.058265, val_acc: 25.650072
+Epoch [1311], train_loss: 0.061015, val_loss: 0.058184, val_acc: 25.769041
+Epoch [1312], train_loss: 0.061078, val_loss: 0.058234, val_acc: 25.717688
+Epoch [1313], train_loss: 0.060904, val_loss: 0.058279, val_acc: 25.684586
+Epoch [1314], train_loss: 0.061040, val_loss: 0.058115, val_acc: 25.771946
+Epoch [1315], train_loss: 0.061076, val_loss: 0.058171, val_acc: 25.742867
+Epoch [1316], train_loss: 0.060934, val_loss: 0.058243, val_acc: 25.664768
+Epoch [1317], train_loss: 0.061114, val_loss: 0.058309, val_acc: 25.641768
+Epoch [1318], train_loss: 0.061152, val_loss: 0.058177, val_acc: 25.742264
+Epoch [1319], train_loss: 0.060946, val_loss: 0.058157, val_acc: 25.740940
+Epoch [1320], train_loss: 0.061067, val_loss: 0.058200, val_acc: 25.752537
+Epoch [1321], train_loss: 0.061054, val_loss: 0.058240, val_acc: 25.688240
+Epoch [1322], train_loss: 0.061056, val_loss: 0.058181, val_acc: 25.718939
+Epoch [1323], train_loss: 0.060845, val_loss: 0.058269, val_acc: 25.656187
+Epoch [1324], train_loss: 0.060951, val_loss: 0.058130, val_acc: 25.800585
+Epoch [1325], train_loss: 0.061337, val_loss: 0.058187, val_acc: 25.749653
+Epoch [1326], train_loss: 0.061123, val_loss: 0.058130, val_acc: 25.769920
+Epoch [1327], train_loss: 0.061052, val_loss: 0.058180, val_acc: 25.745874
+Epoch [1328], train_loss: 0.060851, val_loss: 0.058189, val_acc: 25.698887
+Epoch [1329], train_loss: 0.061074, val_loss: 0.058132, val_acc: 25.726353
+Epoch [1330], train_loss: 0.061125, val_loss: 0.058245, val_acc: 25.697813
+Epoch [1331], train_loss: 0.061081, val_loss: 0.058183, val_acc: 25.723494
+Epoch [1332], train_loss: 0.061061, val_loss: 0.058127, val_acc: 25.777977
+Epoch [1333], train_loss: 0.061178, val_loss: 0.058265, val_acc: 25.707182
+Epoch [1334], train_loss: 0.061346, val_loss: 0.058195, val_acc: 25.730301
+Epoch [1335], train_loss: 0.061051, val_loss: 0.058060, val_acc: 25.799435
+Epoch [1336], train_loss: 0.061264, val_loss: 0.058095, val_acc: 25.764826
+Epoch [1337], train_loss: 0.060986, val_loss: 0.058194, val_acc: 25.685369
+Epoch [1338], train_loss: 0.061199, val_loss: 0.058172, val_acc: 25.740656
+Epoch [1339], train_loss: 0.060961, val_loss: 0.058218, val_acc: 25.750128
+Epoch [1340], train_loss: 0.060998, val_loss: 0.058160, val_acc: 25.731127
+Epoch [1341], train_loss: 0.061067, val_loss: 0.058210, val_acc: 25.728003
+Epoch [1342], train_loss: 0.061064, val_loss: 0.058125, val_acc: 25.761826
+Epoch [1343], train_loss: 0.061101, val_loss: 0.058115, val_acc: 25.747055
+Epoch [1344], train_loss: 0.060936, val_loss: 0.058180, val_acc: 25.710957
+Epoch [1345], train_loss: 0.060966, val_loss: 0.058147, val_acc: 25.749561
+Epoch [1346], train_loss: 0.060888, val_loss: 0.058204, val_acc: 25.698013
+Epoch [1347], train_loss: 0.061004, val_loss: 0.058181, val_acc: 25.733713
+Epoch [1348], train_loss: 0.061053, val_loss: 0.058142, val_acc: 25.771509
+Epoch [1349], train_loss: 0.061032, val_loss: 0.058192, val_acc: 25.676649
+Epoch [1350], train_loss: 0.061034, val_loss: 0.058157, val_acc: 25.778345
+Epoch [1351], train_loss: 0.060929, val_loss: 0.058074, val_acc: 25.809553
+Epoch [1352], train_loss: 0.061097, val_loss: 0.058299, val_acc: 25.686792
+Epoch [1353], train_loss: 0.061135, val_loss: 0.058221, val_acc: 25.720415
+Epoch [1354], train_loss: 0.061133, val_loss: 0.058237, val_acc: 25.732668
+Epoch [1355], train_loss: 0.060915, val_loss: 0.058256, val_acc: 25.688684
+Epoch [1356], train_loss: 0.061000, val_loss: 0.058219, val_acc: 25.729616
+Epoch [1357], train_loss: 0.060854, val_loss: 0.058137, val_acc: 25.722557
+Epoch [1358], train_loss: 0.061108, val_loss: 0.058151, val_acc: 25.795036
+Epoch [1359], train_loss: 0.061006, val_loss: 0.058244, val_acc: 25.705072
+Epoch [1360], train_loss: 0.061263, val_loss: 0.058248, val_acc: 25.688585
+Epoch [1361], train_loss: 0.060796, val_loss: 0.058173, val_acc: 25.676603
+Epoch [1362], train_loss: 0.060993, val_loss: 0.058264, val_acc: 25.615995
+Epoch [1363], train_loss: 0.060857, val_loss: 0.058118, val_acc: 25.772409
+Epoch [1364], train_loss: 0.060963, val_loss: 0.058135, val_acc: 25.753237
+Epoch [1365], train_loss: 0.060869, val_loss: 0.058216, val_acc: 25.659300
+Epoch [1366], train_loss: 0.060784, val_loss: 0.058236, val_acc: 25.728432
+Epoch [1367], train_loss: 0.060953, val_loss: 0.058270, val_acc: 25.690712
+Epoch [1368], train_loss: 0.061011, val_loss: 0.058187, val_acc: 25.738989
+Epoch [1369], train_loss: 0.061022, val_loss: 0.058215, val_acc: 25.761673
+Epoch [1370], train_loss: 0.060969, val_loss: 0.058178, val_acc: 25.695932
+Epoch [1371], train_loss: 0.060945, val_loss: 0.058186, val_acc: 25.692430
+Epoch [1372], train_loss: 0.060980, val_loss: 0.058005, val_acc: 25.863869
+Epoch [1373], train_loss: 0.060880, val_loss: 0.058209, val_acc: 25.670311
+Epoch [1374], train_loss: 0.061101, val_loss: 0.058148, val_acc: 25.761328
+Epoch [1375], train_loss: 0.061001, val_loss: 0.058091, val_acc: 25.772547
+Epoch [1376], train_loss: 0.061162, val_loss: 0.058181, val_acc: 25.702013
+Epoch [1377], train_loss: 0.061088, val_loss: 0.058177, val_acc: 25.751352
+Epoch [1378], train_loss: 0.061074, val_loss: 0.058152, val_acc: 25.763931
+Epoch [1379], train_loss: 0.060923, val_loss: 0.058225, val_acc: 25.600176
+Epoch [1380], train_loss: 0.060899, val_loss: 0.058160, val_acc: 25.742105
+Epoch [1381], train_loss: 0.061270, val_loss: 0.058239, val_acc: 25.721315
+Epoch [1382], train_loss: 0.061077, val_loss: 0.058126, val_acc: 25.755672
+Epoch [1383], train_loss: 0.060946, val_loss: 0.058159, val_acc: 25.657402
+Epoch [1384], train_loss: 0.060819, val_loss: 0.058160, val_acc: 25.720943
+Epoch [1385], train_loss: 0.060891, val_loss: 0.058178, val_acc: 25.733156
+Epoch [1386], train_loss: 0.060883, val_loss: 0.058175, val_acc: 25.726723
+Epoch [1387], train_loss: 0.061001, val_loss: 0.058172, val_acc: 25.667875
+Epoch [1388], train_loss: 0.060880, val_loss: 0.058127, val_acc: 25.719296
+Epoch [1389], train_loss: 0.060957, val_loss: 0.058153, val_acc: 25.764898
+Epoch [1390], train_loss: 0.060913, val_loss: 0.058205, val_acc: 25.686071
+Epoch [1391], train_loss: 0.061105, val_loss: 0.058151, val_acc: 25.769821
+Epoch [1392], train_loss: 0.060925, val_loss: 0.058123, val_acc: 25.712053
+Epoch [1393], train_loss: 0.060920, val_loss: 0.058058, val_acc: 25.776180
+Epoch [1394], train_loss: 0.060936, val_loss: 0.058097, val_acc: 25.774082
+Epoch [1395], train_loss: 0.061143, val_loss: 0.058065, val_acc: 25.825871
+Epoch [1396], train_loss: 0.061180, val_loss: 0.058222, val_acc: 25.758110
+Epoch [1397], train_loss: 0.060953, val_loss: 0.058118, val_acc: 25.754166
+Epoch [1398], train_loss: 0.061128, val_loss: 0.058290, val_acc: 25.652597
+Epoch [1399], train_loss: 0.060974, val_loss: 0.058097, val_acc: 25.794802
+Epoch [1400], train_loss: 0.061157, val_loss: 0.058145, val_acc: 25.734667
+Epoch [1401], train_loss: 0.060925, val_loss: 0.058053, val_acc: 25.775455
+Epoch [1402], train_loss: 0.061030, val_loss: 0.058148, val_acc: 25.716354
+Epoch [1403], train_loss: 0.061037, val_loss: 0.058104, val_acc: 25.752335
+Epoch [1404], train_loss: 0.060971, val_loss: 0.058177, val_acc: 25.701138
+Epoch [1405], train_loss: 0.060861, val_loss: 0.058080, val_acc: 25.757219
+Epoch [1406], train_loss: 0.061073, val_loss: 0.058202, val_acc: 25.695309
+Epoch [1407], train_loss: 0.060956, val_loss: 0.058147, val_acc: 25.708719
+Epoch [1408], train_loss: 0.061028, val_loss: 0.058087, val_acc: 25.796745
+Epoch [1409], train_loss: 0.060755, val_loss: 0.058231, val_acc: 25.676521
+Epoch [1410], train_loss: 0.060989, val_loss: 0.058162, val_acc: 25.742571
+Epoch [1411], train_loss: 0.060979, val_loss: 0.058046, val_acc: 25.850925
+Epoch [1412], train_loss: 0.061094, val_loss: 0.058186, val_acc: 25.703468
+Epoch [1413], train_loss: 0.060955, val_loss: 0.058186, val_acc: 25.676458
+Epoch [1414], train_loss: 0.060750, val_loss: 0.058137, val_acc: 25.739403
+Epoch [1415], train_loss: 0.060670, val_loss: 0.058182, val_acc: 25.698549
+Epoch [1416], train_loss: 0.061015, val_loss: 0.058103, val_acc: 25.814524
+Epoch [1417], train_loss: 0.061128, val_loss: 0.058152, val_acc: 25.767664
+Epoch [1418], train_loss: 0.061171, val_loss: 0.058132, val_acc: 25.767138
+Epoch [1419], train_loss: 0.061072, val_loss: 0.058229, val_acc: 25.616400
+Epoch [1420], train_loss: 0.061008, val_loss: 0.058171, val_acc: 25.747341
+Epoch [1421], train_loss: 0.060873, val_loss: 0.058116, val_acc: 25.725355
+Epoch [1422], train_loss: 0.060937, val_loss: 0.058138, val_acc: 25.729639
+Epoch [1423], train_loss: 0.061141, val_loss: 0.058139, val_acc: 25.761992
+Epoch [1424], train_loss: 0.061011, val_loss: 0.058027, val_acc: 25.830000
+Epoch [1425], train_loss: 0.061041, val_loss: 0.058186, val_acc: 25.719898
+Epoch [1426], train_loss: 0.060827, val_loss: 0.058164, val_acc: 25.692987
+Epoch [1427], train_loss: 0.060966, val_loss: 0.058176, val_acc: 25.677109
+Epoch [1428], train_loss: 0.061011, val_loss: 0.058342, val_acc: 25.643652
+Epoch [1429], train_loss: 0.061155, val_loss: 0.058157, val_acc: 25.738297
+Epoch [1430], train_loss: 0.060883, val_loss: 0.058049, val_acc: 25.735680
+Epoch [1431], train_loss: 0.060943, val_loss: 0.058199, val_acc: 25.683764
+Epoch [1432], train_loss: 0.060915, val_loss: 0.058056, val_acc: 25.749989
+Epoch [1433], train_loss: 0.061045, val_loss: 0.058101, val_acc: 25.765776
+Epoch [1434], train_loss: 0.060700, val_loss: 0.058111, val_acc: 25.780973
+Epoch [1435], train_loss: 0.060845, val_loss: 0.058062, val_acc: 25.808599
+Epoch [1436], train_loss: 0.060776, val_loss: 0.058128, val_acc: 25.758615
+Epoch [1437], train_loss: 0.060886, val_loss: 0.058183, val_acc: 25.681196
+Epoch [1438], train_loss: 0.061283, val_loss: 0.058251, val_acc: 25.729424
+Epoch [1439], train_loss: 0.060861, val_loss: 0.058155, val_acc: 25.657112
+Epoch [1440], train_loss: 0.060960, val_loss: 0.058136, val_acc: 25.740641
+Epoch [1441], train_loss: 0.061070, val_loss: 0.058096, val_acc: 25.748217
+Epoch [1442], train_loss: 0.060886, val_loss: 0.058119, val_acc: 25.770533
+Epoch [1443], train_loss: 0.060909, val_loss: 0.058074, val_acc: 25.768452
+Epoch [1444], train_loss: 0.060827, val_loss: 0.058121, val_acc: 25.708981
+Epoch [1445], train_loss: 0.060848, val_loss: 0.058075, val_acc: 25.780046
+Epoch [1446], train_loss: 0.061003, val_loss: 0.058131, val_acc: 25.694969
+Epoch [1447], train_loss: 0.060705, val_loss: 0.058172, val_acc: 25.687971
+Epoch [1448], train_loss: 0.060966, val_loss: 0.058113, val_acc: 25.745674
+Epoch [1449], train_loss: 0.060827, val_loss: 0.058208, val_acc: 25.654087
+Epoch [1450], train_loss: 0.060856, val_loss: 0.058059, val_acc: 25.784605
+Epoch [1451], train_loss: 0.061044, val_loss: 0.058109, val_acc: 25.761782
+Epoch [1452], train_loss: 0.061028, val_loss: 0.058168, val_acc: 25.684616
+Epoch [1453], train_loss: 0.060908, val_loss: 0.058079, val_acc: 25.743635
+Epoch [1454], train_loss: 0.060790, val_loss: 0.058068, val_acc: 25.775251
+Epoch [1455], train_loss: 0.060972, val_loss: 0.058167, val_acc: 25.720728
+Epoch [1456], train_loss: 0.060973, val_loss: 0.058071, val_acc: 25.801743
+Epoch [1457], train_loss: 0.060894, val_loss: 0.058100, val_acc: 25.715986
+Epoch [1458], train_loss: 0.060958, val_loss: 0.058090, val_acc: 25.769808
+Epoch [1459], train_loss: 0.060924, val_loss: 0.058022, val_acc: 25.854593
+Epoch [1460], train_loss: 0.060985, val_loss: 0.058097, val_acc: 25.713598
+Epoch [1461], train_loss: 0.060769, val_loss: 0.058224, val_acc: 25.643299
+Epoch [1462], train_loss: 0.060932, val_loss: 0.058094, val_acc: 25.728498
+Epoch [1463], train_loss: 0.060989, val_loss: 0.058130, val_acc: 25.738216
+Epoch [1464], train_loss: 0.060892, val_loss: 0.058145, val_acc: 25.729195
+Epoch [1465], train_loss: 0.060893, val_loss: 0.058146, val_acc: 25.679148
+Epoch [1466], train_loss: 0.060985, val_loss: 0.058130, val_acc: 25.686487
+Epoch [1467], train_loss: 0.060999, val_loss: 0.058104, val_acc: 25.698027
+Epoch [1468], train_loss: 0.060800, val_loss: 0.058080, val_acc: 25.695248
+Epoch [1469], train_loss: 0.061064, val_loss: 0.058112, val_acc: 25.734545
+Epoch [1470], train_loss: 0.060965, val_loss: 0.058181, val_acc: 25.747587
+Epoch [1471], train_loss: 0.060894, val_loss: 0.058079, val_acc: 25.803568
+Epoch [1472], train_loss: 0.060975, val_loss: 0.058143, val_acc: 25.707003
+Epoch [1473], train_loss: 0.060868, val_loss: 0.058053, val_acc: 25.811769
+Epoch [1474], train_loss: 0.060944, val_loss: 0.058052, val_acc: 25.790504
+Epoch [1475], train_loss: 0.060967, val_loss: 0.058103, val_acc: 25.767509
+Epoch [1476], train_loss: 0.060952, val_loss: 0.058112, val_acc: 25.746279
+Epoch [1477], train_loss: 0.060919, val_loss: 0.058064, val_acc: 25.802689
+Epoch [1478], train_loss: 0.060921, val_loss: 0.058140, val_acc: 25.743916
+Epoch [1479], train_loss: 0.061067, val_loss: 0.058104, val_acc: 25.710905
+Epoch [1480], train_loss: 0.060814, val_loss: 0.058284, val_acc: 25.614578
+Epoch [1481], train_loss: 0.060853, val_loss: 0.058163, val_acc: 25.646610
+Epoch [1482], train_loss: 0.061108, val_loss: 0.058094, val_acc: 25.746872
+Epoch [1483], train_loss: 0.060978, val_loss: 0.058136, val_acc: 25.733366
+Epoch [1484], train_loss: 0.060818, val_loss: 0.058109, val_acc: 25.712610
+Epoch [1485], train_loss: 0.060822, val_loss: 0.058165, val_acc: 25.641260
+Epoch [1486], train_loss: 0.060896, val_loss: 0.058033, val_acc: 25.821318
+Epoch [1487], train_loss: 0.060949, val_loss: 0.058132, val_acc: 25.715769
+Epoch [1488], train_loss: 0.060917, val_loss: 0.058033, val_acc: 25.720301
+Epoch [1489], train_loss: 0.060980, val_loss: 0.058215, val_acc: 25.674322
+Epoch [1490], train_loss: 0.061061, val_loss: 0.058137, val_acc: 25.689783
+Epoch [1491], train_loss: 0.060874, val_loss: 0.058180, val_acc: 25.720247
+Epoch [1492], train_loss: 0.060790, val_loss: 0.058179, val_acc: 25.663298
+Epoch [1493], train_loss: 0.060811, val_loss: 0.058045, val_acc: 25.806093
+Epoch [1494], train_loss: 0.060921, val_loss: 0.058172, val_acc: 25.725666
+Epoch [1495], train_loss: 0.060938, val_loss: 0.057974, val_acc: 25.877146
+Epoch [1496], train_loss: 0.061002, val_loss: 0.058190, val_acc: 25.687092
+Epoch [1497], train_loss: 0.060799, val_loss: 0.058185, val_acc: 25.667660
+Epoch [1498], train_loss: 0.060956, val_loss: 0.058301, val_acc: 25.628929
+Epoch [1499], train_loss: 0.060947, val_loss: 0.058178, val_acc: 25.672787
+Epoch [1500], train_loss: 0.060924, val_loss: 0.058016, val_acc: 25.812294
+Epoch [1501], train_loss: 0.060827, val_loss: 0.058169, val_acc: 25.695900
+Epoch [1502], train_loss: 0.061024, val_loss: 0.058123, val_acc: 25.739855
+Epoch [1503], train_loss: 0.060903, val_loss: 0.058069, val_acc: 25.754877
+Epoch [1504], train_loss: 0.060754, val_loss: 0.058090, val_acc: 25.714354
+Epoch [1505], train_loss: 0.060834, val_loss: 0.058198, val_acc: 25.677721
+Epoch [1506], train_loss: 0.060967, val_loss: 0.058119, val_acc: 25.719048
+Epoch [1507], train_loss: 0.060929, val_loss: 0.058137, val_acc: 25.709019
+Epoch [1508], train_loss: 0.061028, val_loss: 0.058107, val_acc: 25.715570
+Epoch [1509], train_loss: 0.060917, val_loss: 0.058108, val_acc: 25.694197
+Epoch [1510], train_loss: 0.060848, val_loss: 0.058104, val_acc: 25.711582
+Epoch [1511], train_loss: 0.060914, val_loss: 0.058034, val_acc: 25.739084
+Epoch [1512], train_loss: 0.060956, val_loss: 0.058164, val_acc: 25.720411
+Epoch [1513], train_loss: 0.060822, val_loss: 0.058012, val_acc: 25.793085
+Epoch [1514], train_loss: 0.060891, val_loss: 0.058082, val_acc: 25.705769
+Epoch [1515], train_loss: 0.060876, val_loss: 0.058049, val_acc: 25.774355
+Epoch [1516], train_loss: 0.060836, val_loss: 0.058042, val_acc: 25.775276
+Epoch [1517], train_loss: 0.060912, val_loss: 0.058180, val_acc: 25.633055
+Epoch [1518], train_loss: 0.060727, val_loss: 0.058090, val_acc: 25.722002
+Epoch [1519], train_loss: 0.060811, val_loss: 0.058118, val_acc: 25.739084
+Epoch [1520], train_loss: 0.060795, val_loss: 0.058070, val_acc: 25.689234
+Epoch [1521], train_loss: 0.060763, val_loss: 0.058099, val_acc: 25.658323
+Epoch [1522], train_loss: 0.060935, val_loss: 0.058125, val_acc: 25.674942
+Epoch [1523], train_loss: 0.061000, val_loss: 0.058056, val_acc: 25.734676
+Epoch [1524], train_loss: 0.060697, val_loss: 0.057990, val_acc: 25.819492
+Epoch [1525], train_loss: 0.060934, val_loss: 0.058019, val_acc: 25.808710
+Epoch [1526], train_loss: 0.060761, val_loss: 0.058080, val_acc: 25.646645
+Epoch [1527], train_loss: 0.061004, val_loss: 0.058131, val_acc: 25.699839
+Epoch [1528], train_loss: 0.060783, val_loss: 0.058010, val_acc: 25.770023
+Epoch [1529], train_loss: 0.060759, val_loss: 0.058037, val_acc: 25.772091
+Epoch [1530], train_loss: 0.060782, val_loss: 0.058128, val_acc: 25.715672
+Epoch [1531], train_loss: 0.060824, val_loss: 0.057940, val_acc: 25.865206
+Epoch [1532], train_loss: 0.060876, val_loss: 0.058085, val_acc: 25.724043
+Epoch [1533], train_loss: 0.060888, val_loss: 0.058058, val_acc: 25.747292
+Epoch [1534], train_loss: 0.060901, val_loss: 0.058065, val_acc: 25.741081
+Epoch [1535], train_loss: 0.060853, val_loss: 0.058046, val_acc: 25.729433
+Epoch [1536], train_loss: 0.061084, val_loss: 0.058200, val_acc: 25.692476
+Epoch [1537], train_loss: 0.060829, val_loss: 0.058141, val_acc: 25.659500
+Epoch [1538], train_loss: 0.061023, val_loss: 0.058156, val_acc: 25.708776
+Epoch [1539], train_loss: 0.060872, val_loss: 0.058043, val_acc: 25.780195
+Epoch [1540], train_loss: 0.060847, val_loss: 0.058035, val_acc: 25.797440
+Epoch [1541], train_loss: 0.060769, val_loss: 0.058007, val_acc: 25.796694
+Epoch [1542], train_loss: 0.060711, val_loss: 0.058022, val_acc: 25.773966
+Epoch [1543], train_loss: 0.060613, val_loss: 0.058047, val_acc: 25.749220
+Epoch [1544], train_loss: 0.060877, val_loss: 0.058034, val_acc: 25.766962
+Epoch [1545], train_loss: 0.060815, val_loss: 0.058225, val_acc: 25.578140
+Epoch [1546], train_loss: 0.060896, val_loss: 0.058042, val_acc: 25.749363
+Epoch [1547], train_loss: 0.060965, val_loss: 0.058058, val_acc: 25.763445
+Epoch [1548], train_loss: 0.060859, val_loss: 0.058143, val_acc: 25.773630
+Epoch [1549], train_loss: 0.060817, val_loss: 0.058110, val_acc: 25.698397
+Epoch [1550], train_loss: 0.060896, val_loss: 0.058069, val_acc: 25.719995
+Epoch [1551], train_loss: 0.060897, val_loss: 0.058060, val_acc: 25.736301
+Epoch [1552], train_loss: 0.061038, val_loss: 0.058152, val_acc: 25.683638
+Epoch [1553], train_loss: 0.060799, val_loss: 0.058033, val_acc: 25.791096
+Epoch [1554], train_loss: 0.061017, val_loss: 0.058149, val_acc: 25.690849
+Epoch [1555], train_loss: 0.060990, val_loss: 0.058185, val_acc: 25.657265
+Epoch [1556], train_loss: 0.060904, val_loss: 0.058043, val_acc: 25.786888
+Epoch [1557], train_loss: 0.060882, val_loss: 0.057927, val_acc: 25.873644
+Epoch [1558], train_loss: 0.060763, val_loss: 0.058082, val_acc: 25.711460
+Epoch [1559], train_loss: 0.060787, val_loss: 0.058031, val_acc: 25.718241
+Epoch [1560], train_loss: 0.060929, val_loss: 0.057924, val_acc: 25.850990
+Epoch [1561], train_loss: 0.060834, val_loss: 0.058025, val_acc: 25.805470
+Epoch [1562], train_loss: 0.060791, val_loss: 0.058035, val_acc: 25.812668
+Epoch [1563], train_loss: 0.060730, val_loss: 0.058083, val_acc: 25.695957
+Epoch [1564], train_loss: 0.061014, val_loss: 0.058116, val_acc: 25.733561
+Epoch [1565], train_loss: 0.060929, val_loss: 0.058111, val_acc: 25.716724
+Epoch [1566], train_loss: 0.061062, val_loss: 0.058065, val_acc: 25.752560
+Epoch [1567], train_loss: 0.060647, val_loss: 0.058040, val_acc: 25.741501
+Epoch [1568], train_loss: 0.060854, val_loss: 0.058112, val_acc: 25.682545
+Epoch [1569], train_loss: 0.060928, val_loss: 0.058056, val_acc: 25.750881
+Epoch [1570], train_loss: 0.060945, val_loss: 0.058031, val_acc: 25.818102
+Epoch [1571], train_loss: 0.060782, val_loss: 0.058079, val_acc: 25.759254
+Epoch [1572], train_loss: 0.061016, val_loss: 0.057972, val_acc: 25.750324
+Epoch [1573], train_loss: 0.060903, val_loss: 0.058041, val_acc: 25.752464
+Epoch [1574], train_loss: 0.060925, val_loss: 0.058073, val_acc: 25.785139
+Epoch [1575], train_loss: 0.060829, val_loss: 0.058009, val_acc: 25.744631
+Epoch [1576], train_loss: 0.060765, val_loss: 0.058022, val_acc: 25.740416
+Epoch [1577], train_loss: 0.060916, val_loss: 0.058080, val_acc: 25.740311
+Epoch [1578], train_loss: 0.060819, val_loss: 0.058067, val_acc: 25.753218
+Epoch [1579], train_loss: 0.060742, val_loss: 0.058028, val_acc: 25.780687
+Epoch [1580], train_loss: 0.060898, val_loss: 0.058196, val_acc: 25.687479
+Epoch [1581], train_loss: 0.060653, val_loss: 0.058091, val_acc: 25.718325
+Epoch [1582], train_loss: 0.060882, val_loss: 0.058159, val_acc: 25.661623
+Epoch [1583], train_loss: 0.060871, val_loss: 0.058094, val_acc: 25.723211
+Epoch [1584], train_loss: 0.060858, val_loss: 0.058021, val_acc: 25.755547
+Epoch [1585], train_loss: 0.060989, val_loss: 0.058065, val_acc: 25.781530
+Epoch [1586], train_loss: 0.061043, val_loss: 0.058007, val_acc: 25.796339
+Epoch [1587], train_loss: 0.060813, val_loss: 0.057985, val_acc: 25.769758
+Epoch [1588], train_loss: 0.060946, val_loss: 0.058026, val_acc: 25.760691
+Epoch [1589], train_loss: 0.061027, val_loss: 0.058018, val_acc: 25.767580
+Epoch [1590], train_loss: 0.060861, val_loss: 0.058030, val_acc: 25.735048
+Epoch [1591], train_loss: 0.060830, val_loss: 0.058121, val_acc: 25.633095
+Epoch [1592], train_loss: 0.060941, val_loss: 0.058160, val_acc: 25.660471
+Epoch [1593], train_loss: 0.060746, val_loss: 0.058000, val_acc: 25.800510
+Epoch [1594], train_loss: 0.060913, val_loss: 0.058109, val_acc: 25.687698
+Epoch [1595], train_loss: 0.060896, val_loss: 0.058059, val_acc: 25.724873
+Epoch [1596], train_loss: 0.060942, val_loss: 0.058118, val_acc: 25.677158
+Epoch [1597], train_loss: 0.060707, val_loss: 0.058052, val_acc: 25.759127
+Epoch [1598], train_loss: 0.060820, val_loss: 0.058068, val_acc: 25.672356
+Epoch [1599], train_loss: 0.060701, val_loss: 0.058081, val_acc: 25.729755
+Epoch [1600], train_loss: 0.060790, val_loss: 0.058089, val_acc: 25.762548
+Epoch [1601], train_loss: 0.060879, val_loss: 0.057958, val_acc: 25.736366
+Epoch [1602], train_loss: 0.060530, val_loss: 0.058076, val_acc: 25.673552
+Epoch [1603], train_loss: 0.060760, val_loss: 0.057937, val_acc: 25.859612
+Epoch [1604], train_loss: 0.060898, val_loss: 0.057992, val_acc: 25.777357
+Epoch [1605], train_loss: 0.060809, val_loss: 0.058138, val_acc: 25.688822
+Epoch [1606], train_loss: 0.060996, val_loss: 0.058080, val_acc: 25.731655
+Epoch [1607], train_loss: 0.060895, val_loss: 0.058000, val_acc: 25.802843
+Epoch [1608], train_loss: 0.061022, val_loss: 0.058024, val_acc: 25.749022
+Epoch [1609], train_loss: 0.060596, val_loss: 0.057986, val_acc: 25.770327
+Epoch [1610], train_loss: 0.060880, val_loss: 0.058106, val_acc: 25.699675
+Epoch [1611], train_loss: 0.060686, val_loss: 0.058066, val_acc: 25.656893
+Epoch [1612], train_loss: 0.060684, val_loss: 0.058064, val_acc: 25.728630
+Epoch [1613], train_loss: 0.060786, val_loss: 0.058024, val_acc: 25.704128
+Epoch [1614], train_loss: 0.060765, val_loss: 0.057965, val_acc: 25.827112
+Epoch [1615], train_loss: 0.061026, val_loss: 0.058005, val_acc: 25.758039
+Epoch [1616], train_loss: 0.060873, val_loss: 0.057949, val_acc: 25.871592
+Epoch [1617], train_loss: 0.060828, val_loss: 0.058036, val_acc: 25.749077
+Epoch [1618], train_loss: 0.060985, val_loss: 0.057991, val_acc: 25.773710
+Epoch [1619], train_loss: 0.060870, val_loss: 0.058029, val_acc: 25.772982
+Epoch [1620], train_loss: 0.060854, val_loss: 0.057983, val_acc: 25.802952
+Epoch [1621], train_loss: 0.061040, val_loss: 0.057977, val_acc: 25.792059
+Epoch [1622], train_loss: 0.060883, val_loss: 0.057957, val_acc: 25.773861
+Epoch [1623], train_loss: 0.060786, val_loss: 0.058053, val_acc: 25.721216
+Epoch [1624], train_loss: 0.060652, val_loss: 0.058027, val_acc: 25.747831
+Epoch [1625], train_loss: 0.060696, val_loss: 0.058037, val_acc: 25.780485
+Epoch [1626], train_loss: 0.060818, val_loss: 0.057957, val_acc: 25.803553
+Epoch [1627], train_loss: 0.060786, val_loss: 0.058105, val_acc: 25.730017
+Epoch [1628], train_loss: 0.060653, val_loss: 0.058068, val_acc: 25.728411
+Epoch [1629], train_loss: 0.060805, val_loss: 0.057924, val_acc: 25.882401
+Epoch [1630], train_loss: 0.060823, val_loss: 0.057962, val_acc: 25.781408
+Epoch [1631], train_loss: 0.060838, val_loss: 0.058091, val_acc: 25.668510
+Epoch [1632], train_loss: 0.060871, val_loss: 0.057985, val_acc: 25.750742
+Epoch [1633], train_loss: 0.060769, val_loss: 0.058083, val_acc: 25.726639
+Epoch [1634], train_loss: 0.060779, val_loss: 0.058093, val_acc: 25.696915
+Epoch [1635], train_loss: 0.060840, val_loss: 0.058110, val_acc: 25.693026
+Epoch [1636], train_loss: 0.061003, val_loss: 0.058013, val_acc: 25.753834
+Epoch [1637], train_loss: 0.060805, val_loss: 0.057991, val_acc: 25.810007
+Epoch [1638], train_loss: 0.060976, val_loss: 0.058062, val_acc: 25.714890
+Epoch [1639], train_loss: 0.060923, val_loss: 0.058079, val_acc: 25.731119
+Epoch [1640], train_loss: 0.060765, val_loss: 0.057985, val_acc: 25.765099
+Epoch [1641], train_loss: 0.060684, val_loss: 0.058049, val_acc: 25.675726
+Epoch [1642], train_loss: 0.060758, val_loss: 0.058025, val_acc: 25.736528
+Epoch [1643], train_loss: 0.060943, val_loss: 0.058058, val_acc: 25.720127
+Epoch [1644], train_loss: 0.061021, val_loss: 0.058068, val_acc: 25.716640
+Epoch [1645], train_loss: 0.060647, val_loss: 0.057990, val_acc: 25.754553
+Epoch [1646], train_loss: 0.060808, val_loss: 0.058008, val_acc: 25.720383
+Epoch [1647], train_loss: 0.060753, val_loss: 0.058098, val_acc: 25.654007
+Epoch [1648], train_loss: 0.061133, val_loss: 0.058022, val_acc: 25.711094
+Epoch [1649], train_loss: 0.060799, val_loss: 0.057998, val_acc: 25.765871
+Epoch [1650], train_loss: 0.060795, val_loss: 0.058073, val_acc: 25.703493
+Epoch [1651], train_loss: 0.060860, val_loss: 0.058014, val_acc: 25.756777
+Epoch [1652], train_loss: 0.060948, val_loss: 0.057981, val_acc: 25.769051
+Epoch [1653], train_loss: 0.060833, val_loss: 0.058126, val_acc: 25.658220
+Epoch [1654], train_loss: 0.060850, val_loss: 0.057972, val_acc: 25.832741
+Epoch [1655], train_loss: 0.060812, val_loss: 0.058023, val_acc: 25.721794
+Epoch [1656], train_loss: 0.060961, val_loss: 0.058030, val_acc: 25.729280
+Epoch [1657], train_loss: 0.060937, val_loss: 0.058017, val_acc: 25.752729
+Epoch [1658], train_loss: 0.060496, val_loss: 0.057961, val_acc: 25.773020
+Epoch [1659], train_loss: 0.060905, val_loss: 0.057995, val_acc: 25.727156
+Epoch [1660], train_loss: 0.060715, val_loss: 0.057972, val_acc: 25.745062
+Epoch [1661], train_loss: 0.060615, val_loss: 0.058028, val_acc: 25.726952
+Epoch [1662], train_loss: 0.060791, val_loss: 0.058024, val_acc: 25.787186
+Epoch [1663], train_loss: 0.060880, val_loss: 0.057976, val_acc: 25.752195
+Epoch [1664], train_loss: 0.060675, val_loss: 0.058092, val_acc: 25.733545
+Epoch [1665], train_loss: 0.060777, val_loss: 0.058034, val_acc: 25.746834
+Epoch [1666], train_loss: 0.060769, val_loss: 0.057990, val_acc: 25.688623
+Epoch [1667], train_loss: 0.060838, val_loss: 0.058034, val_acc: 25.742506
+Epoch [1668], train_loss: 0.060731, val_loss: 0.058023, val_acc: 25.703234
+Epoch [1669], train_loss: 0.060763, val_loss: 0.058000, val_acc: 25.777866
+Epoch [1670], train_loss: 0.060551, val_loss: 0.057971, val_acc: 25.734873
+Epoch [1671], train_loss: 0.060795, val_loss: 0.057995, val_acc: 25.727375
+Epoch [1672], train_loss: 0.060760, val_loss: 0.057922, val_acc: 25.854874
+Epoch [1673], train_loss: 0.060662, val_loss: 0.057879, val_acc: 25.863207
+Epoch [1674], train_loss: 0.060765, val_loss: 0.057999, val_acc: 25.820755
+Epoch [1675], train_loss: 0.060705, val_loss: 0.057932, val_acc: 25.842066
+Epoch [1676], train_loss: 0.060947, val_loss: 0.058015, val_acc: 25.701960
+Epoch [1677], train_loss: 0.060776, val_loss: 0.057960, val_acc: 25.795893
+Epoch [1678], train_loss: 0.060934, val_loss: 0.057964, val_acc: 25.810989
+Epoch [1679], train_loss: 0.060640, val_loss: 0.057947, val_acc: 25.767149
+Epoch [1680], train_loss: 0.060849, val_loss: 0.057975, val_acc: 25.773180
+Epoch [1681], train_loss: 0.060714, val_loss: 0.057986, val_acc: 25.726606
+Epoch [1682], train_loss: 0.060724, val_loss: 0.057943, val_acc: 25.769901
+Epoch [1683], train_loss: 0.060741, val_loss: 0.058113, val_acc: 25.696072
+Epoch [1684], train_loss: 0.060928, val_loss: 0.058052, val_acc: 25.675827
+Epoch [1685], train_loss: 0.060770, val_loss: 0.058017, val_acc: 25.749941
+Epoch [1686], train_loss: 0.060767, val_loss: 0.058004, val_acc: 25.773352
+Epoch [1687], train_loss: 0.060952, val_loss: 0.057978, val_acc: 25.802361
+Epoch [1688], train_loss: 0.060690, val_loss: 0.057984, val_acc: 25.771986
+Epoch [1689], train_loss: 0.060808, val_loss: 0.058055, val_acc: 25.678835
+Epoch [1690], train_loss: 0.060769, val_loss: 0.057998, val_acc: 25.754824
+Epoch [1691], train_loss: 0.060679, val_loss: 0.057976, val_acc: 25.747583
+Epoch [1692], train_loss: 0.060808, val_loss: 0.057911, val_acc: 25.829416
+Epoch [1693], train_loss: 0.060800, val_loss: 0.058004, val_acc: 25.723324
+Epoch [1694], train_loss: 0.060594, val_loss: 0.057882, val_acc: 25.868498
+Epoch [1695], train_loss: 0.060784, val_loss: 0.057942, val_acc: 25.760077
+Epoch [1696], train_loss: 0.060967, val_loss: 0.057941, val_acc: 25.759823
+Epoch [1697], train_loss: 0.060685, val_loss: 0.057983, val_acc: 25.746489
+Epoch [1698], train_loss: 0.060853, val_loss: 0.057908, val_acc: 25.842165
+Epoch [1699], train_loss: 0.060798, val_loss: 0.057974, val_acc: 25.767420
+Epoch [1700], train_loss: 0.060745, val_loss: 0.057915, val_acc: 25.845564
+Epoch [1701], train_loss: 0.060794, val_loss: 0.057987, val_acc: 25.765030
+Epoch [1702], train_loss: 0.060907, val_loss: 0.058019, val_acc: 25.744846
+Epoch [1703], train_loss: 0.060887, val_loss: 0.057981, val_acc: 25.814777
+Epoch [1704], train_loss: 0.060780, val_loss: 0.058051, val_acc: 25.729624
+Epoch [1705], train_loss: 0.060735, val_loss: 0.058040, val_acc: 25.735598
+Epoch [1706], train_loss: 0.060645, val_loss: 0.058125, val_acc: 25.688059
+Epoch [1707], train_loss: 0.060957, val_loss: 0.058054, val_acc: 25.750776
+Epoch [1708], train_loss: 0.060691, val_loss: 0.058036, val_acc: 25.768198
+Epoch [1709], train_loss: 0.060938, val_loss: 0.058084, val_acc: 25.678638
+Epoch [1710], train_loss: 0.060696, val_loss: 0.057957, val_acc: 25.826981
+Epoch [1711], train_loss: 0.060607, val_loss: 0.057934, val_acc: 25.754374
+Epoch [1712], train_loss: 0.060805, val_loss: 0.057961, val_acc: 25.775467
+Epoch [1713], train_loss: 0.060678, val_loss: 0.057960, val_acc: 25.760588
+Epoch [1714], train_loss: 0.060740, val_loss: 0.057903, val_acc: 25.845757
+Epoch [1715], train_loss: 0.060774, val_loss: 0.057908, val_acc: 25.828362
+Epoch [1716], train_loss: 0.060780, val_loss: 0.058021, val_acc: 25.742907
+Epoch [1717], train_loss: 0.060640, val_loss: 0.058004, val_acc: 25.746326
+Epoch [1718], train_loss: 0.060841, val_loss: 0.058046, val_acc: 25.730019
+Epoch [1719], train_loss: 0.060717, val_loss: 0.058010, val_acc: 25.674610
+Epoch [1720], train_loss: 0.060811, val_loss: 0.057925, val_acc: 25.823341
+Epoch [1721], train_loss: 0.060680, val_loss: 0.057959, val_acc: 25.736914
+Epoch [1722], train_loss: 0.060867, val_loss: 0.058008, val_acc: 25.723757
+Epoch [1723], train_loss: 0.060541, val_loss: 0.057970, val_acc: 25.753401
+Epoch [1724], train_loss: 0.060828, val_loss: 0.057929, val_acc: 25.769171
+Epoch [1725], train_loss: 0.060612, val_loss: 0.058058, val_acc: 25.699520
+Epoch [1726], train_loss: 0.060848, val_loss: 0.057997, val_acc: 25.729761
+Epoch [1727], train_loss: 0.060737, val_loss: 0.057908, val_acc: 25.836489
+Epoch [1728], train_loss: 0.060727, val_loss: 0.057952, val_acc: 25.823942
+Epoch [1729], train_loss: 0.060697, val_loss: 0.058009, val_acc: 25.676561
+Epoch [1730], train_loss: 0.060753, val_loss: 0.057893, val_acc: 25.795750
+Epoch [1731], train_loss: 0.060931, val_loss: 0.058097, val_acc: 25.704277
+Epoch [1732], train_loss: 0.060840, val_loss: 0.058003, val_acc: 25.773569
+Epoch [1733], train_loss: 0.060808, val_loss: 0.058018, val_acc: 25.718328
+Epoch [1734], train_loss: 0.060719, val_loss: 0.057962, val_acc: 25.738993
+Epoch [1735], train_loss: 0.060894, val_loss: 0.057964, val_acc: 25.750551
+Epoch [1736], train_loss: 0.060804, val_loss: 0.057962, val_acc: 25.792843
+Epoch [1737], train_loss: 0.060826, val_loss: 0.057987, val_acc: 25.781467
+Epoch [1738], train_loss: 0.060623, val_loss: 0.058033, val_acc: 25.746006
+Epoch [1739], train_loss: 0.060860, val_loss: 0.057966, val_acc: 25.793476
+Epoch [1740], train_loss: 0.060852, val_loss: 0.058039, val_acc: 25.713144
+Epoch [1741], train_loss: 0.060848, val_loss: 0.058109, val_acc: 25.729658
+Epoch [1742], train_loss: 0.060581, val_loss: 0.057946, val_acc: 25.803957
+Epoch [1743], train_loss: 0.060551, val_loss: 0.058001, val_acc: 25.738901
+Epoch [1744], train_loss: 0.060760, val_loss: 0.057994, val_acc: 25.682489
+Epoch [1745], train_loss: 0.060687, val_loss: 0.057872, val_acc: 25.825262
+Epoch [1746], train_loss: 0.060775, val_loss: 0.057979, val_acc: 25.759447
+Epoch [1747], train_loss: 0.060867, val_loss: 0.057936, val_acc: 25.765329
+Epoch [1748], train_loss: 0.060883, val_loss: 0.057977, val_acc: 25.760813
+Epoch [1749], train_loss: 0.060995, val_loss: 0.058041, val_acc: 25.702206
+Epoch [1750], train_loss: 0.060909, val_loss: 0.057948, val_acc: 25.774010
+Epoch [1751], train_loss: 0.060935, val_loss: 0.058065, val_acc: 25.751108
+Epoch [1752], train_loss: 0.060643, val_loss: 0.057925, val_acc: 25.837589
+Epoch [1753], train_loss: 0.060693, val_loss: 0.058010, val_acc: 25.720188
+Epoch [1754], train_loss: 0.060809, val_loss: 0.057996, val_acc: 25.762814
+Epoch [1755], train_loss: 0.060737, val_loss: 0.057917, val_acc: 25.819420
+Epoch [1756], train_loss: 0.060786, val_loss: 0.057861, val_acc: 25.854784
+Epoch [1757], train_loss: 0.060807, val_loss: 0.058042, val_acc: 25.680479
+Epoch [1758], train_loss: 0.060706, val_loss: 0.057925, val_acc: 25.771564
+Epoch [1759], train_loss: 0.060714, val_loss: 0.057971, val_acc: 25.729862
+Epoch [1760], train_loss: 0.060674, val_loss: 0.057887, val_acc: 25.790045
+Epoch [1761], train_loss: 0.060765, val_loss: 0.057925, val_acc: 25.795109
+Epoch [1762], train_loss: 0.060755, val_loss: 0.057967, val_acc: 25.755154
+Epoch [1763], train_loss: 0.060751, val_loss: 0.058043, val_acc: 25.678886
+Epoch [1764], train_loss: 0.060767, val_loss: 0.057921, val_acc: 25.763771
+Epoch [1765], train_loss: 0.060857, val_loss: 0.058021, val_acc: 25.697184
+Epoch [1766], train_loss: 0.060705, val_loss: 0.057902, val_acc: 25.797447
+Epoch [1767], train_loss: 0.060852, val_loss: 0.058009, val_acc: 25.776447
+Epoch [1768], train_loss: 0.060712, val_loss: 0.058000, val_acc: 25.794024
+Epoch [1769], train_loss: 0.060748, val_loss: 0.058033, val_acc: 25.664078
+Epoch [1770], train_loss: 0.060790, val_loss: 0.057959, val_acc: 25.750807
+Epoch [1771], train_loss: 0.060840, val_loss: 0.057969, val_acc: 25.724598
+Epoch [1772], train_loss: 0.060899, val_loss: 0.057894, val_acc: 25.796059
+Epoch [1773], train_loss: 0.060778, val_loss: 0.058048, val_acc: 25.701445
+Epoch [1774], train_loss: 0.060618, val_loss: 0.058053, val_acc: 25.745180
+Epoch [1775], train_loss: 0.060739, val_loss: 0.057969, val_acc: 25.730431
+Epoch [1776], train_loss: 0.060747, val_loss: 0.057966, val_acc: 25.749186
+Epoch [1777], train_loss: 0.060738, val_loss: 0.057955, val_acc: 25.752010
+Epoch [1778], train_loss: 0.060628, val_loss: 0.058036, val_acc: 25.727404
+Epoch [1779], train_loss: 0.060664, val_loss: 0.057975, val_acc: 25.768892
+Epoch [1780], train_loss: 0.060756, val_loss: 0.057984, val_acc: 25.754269
+Epoch [1781], train_loss: 0.060690, val_loss: 0.057903, val_acc: 25.797783
+Epoch [1782], train_loss: 0.060918, val_loss: 0.057847, val_acc: 25.913383
+Epoch [1783], train_loss: 0.060686, val_loss: 0.057873, val_acc: 25.774664
+Epoch [1784], train_loss: 0.060887, val_loss: 0.057992, val_acc: 25.807623
+Epoch [1785], train_loss: 0.060737, val_loss: 0.057964, val_acc: 25.713732
+Epoch [1786], train_loss: 0.060725, val_loss: 0.057967, val_acc: 25.723658
+Epoch [1787], train_loss: 0.060658, val_loss: 0.057829, val_acc: 25.799625
+Epoch [1788], train_loss: 0.060724, val_loss: 0.058023, val_acc: 25.673975
+Epoch [1789], train_loss: 0.060607, val_loss: 0.057980, val_acc: 25.716057
+Epoch [1790], train_loss: 0.060859, val_loss: 0.057905, val_acc: 25.797989
+Epoch [1791], train_loss: 0.060607, val_loss: 0.058015, val_acc: 25.650721
+Epoch [1792], train_loss: 0.060800, val_loss: 0.057948, val_acc: 25.731272
+Epoch [1793], train_loss: 0.060773, val_loss: 0.057918, val_acc: 25.809359
+Epoch [1794], train_loss: 0.060722, val_loss: 0.057949, val_acc: 25.773134
+Epoch [1795], train_loss: 0.060761, val_loss: 0.057925, val_acc: 25.712889
+Epoch [1796], train_loss: 0.060723, val_loss: 0.057931, val_acc: 25.782812
+Epoch [1797], train_loss: 0.060759, val_loss: 0.057885, val_acc: 25.797796
+Epoch [1798], train_loss: 0.060772, val_loss: 0.057895, val_acc: 25.755630
+Epoch [1799], train_loss: 0.060841, val_loss: 0.057980, val_acc: 25.719231
+Epoch [1800], train_loss: 0.060764, val_loss: 0.057938, val_acc: 25.780684
+Epoch [1801], train_loss: 0.060643, val_loss: 0.057900, val_acc: 25.809599
+Epoch [1802], train_loss: 0.060728, val_loss: 0.057989, val_acc: 25.755165
+Epoch [1803], train_loss: 0.060775, val_loss: 0.057916, val_acc: 25.747219
+Epoch [1804], train_loss: 0.060536, val_loss: 0.057962, val_acc: 25.729986
+Epoch [1805], train_loss: 0.060776, val_loss: 0.057942, val_acc: 25.782986
+Epoch [1806], train_loss: 0.060758, val_loss: 0.058017, val_acc: 25.676508
+Epoch [1807], train_loss: 0.060611, val_loss: 0.057964, val_acc: 25.741636
+Epoch [1808], train_loss: 0.060768, val_loss: 0.058072, val_acc: 25.665688
+Epoch [1809], train_loss: 0.060837, val_loss: 0.057944, val_acc: 25.756321
+Epoch [1810], train_loss: 0.060869, val_loss: 0.057979, val_acc: 25.778654
+Epoch [1811], train_loss: 0.060698, val_loss: 0.058081, val_acc: 25.685104
+Epoch [1812], train_loss: 0.060799, val_loss: 0.057950, val_acc: 25.803598
+Epoch [1813], train_loss: 0.060548, val_loss: 0.057934, val_acc: 25.737364
+Epoch [1814], train_loss: 0.060756, val_loss: 0.057951, val_acc: 25.721693
+Epoch [1815], train_loss: 0.060661, val_loss: 0.057964, val_acc: 25.798365
+Epoch [1816], train_loss: 0.060641, val_loss: 0.057923, val_acc: 25.823389
+Epoch [1817], train_loss: 0.060788, val_loss: 0.057942, val_acc: 25.789852
+Epoch [1818], train_loss: 0.060689, val_loss: 0.057904, val_acc: 25.774065
+Epoch [1819], train_loss: 0.060671, val_loss: 0.057873, val_acc: 25.813715
+Epoch [1820], train_loss: 0.060598, val_loss: 0.058041, val_acc: 25.693224
+Epoch [1821], train_loss: 0.060752, val_loss: 0.057974, val_acc: 25.769152
+Epoch [1822], train_loss: 0.060674, val_loss: 0.057859, val_acc: 25.886528
+Epoch [1823], train_loss: 0.060904, val_loss: 0.057962, val_acc: 25.761316
+Epoch [1824], train_loss: 0.060522, val_loss: 0.057927, val_acc: 25.790104
+Epoch [1825], train_loss: 0.060948, val_loss: 0.057910, val_acc: 25.782021
+Epoch [1826], train_loss: 0.060832, val_loss: 0.057935, val_acc: 25.762875
+Epoch [1827], train_loss: 0.060795, val_loss: 0.057904, val_acc: 25.760124
+Epoch [1828], train_loss: 0.060724, val_loss: 0.057864, val_acc: 25.781021
+Epoch [1829], train_loss: 0.060730, val_loss: 0.057834, val_acc: 25.852810
+Epoch [1830], train_loss: 0.060761, val_loss: 0.057939, val_acc: 25.785921
+Epoch [1831], train_loss: 0.060907, val_loss: 0.058008, val_acc: 25.739626
+Epoch [1832], train_loss: 0.060667, val_loss: 0.057936, val_acc: 25.803637
+Epoch [1833], train_loss: 0.060767, val_loss: 0.057883, val_acc: 25.805473
+Epoch [1834], train_loss: 0.060648, val_loss: 0.057976, val_acc: 25.705816
+Epoch [1835], train_loss: 0.060792, val_loss: 0.057909, val_acc: 25.793724
+Epoch [1836], train_loss: 0.060633, val_loss: 0.058086, val_acc: 25.611082
+Epoch [1837], train_loss: 0.060954, val_loss: 0.057912, val_acc: 25.795948
+Epoch [1838], train_loss: 0.060761, val_loss: 0.057964, val_acc: 25.693501
+Epoch [1839], train_loss: 0.060856, val_loss: 0.057916, val_acc: 25.798191
+Epoch [1840], train_loss: 0.060567, val_loss: 0.057862, val_acc: 25.787992
+Epoch [1841], train_loss: 0.060819, val_loss: 0.057902, val_acc: 25.707548
+Epoch [1842], train_loss: 0.060708, val_loss: 0.057945, val_acc: 25.779621
+Epoch [1843], train_loss: 0.060650, val_loss: 0.057930, val_acc: 25.761625
+Epoch [1844], train_loss: 0.060755, val_loss: 0.057983, val_acc: 25.699379
+Epoch [1845], train_loss: 0.060665, val_loss: 0.057984, val_acc: 25.757393
+Epoch [1846], train_loss: 0.060703, val_loss: 0.057920, val_acc: 25.739765
+Epoch [1847], train_loss: 0.060715, val_loss: 0.057935, val_acc: 25.746599
+Epoch [1848], train_loss: 0.060830, val_loss: 0.058004, val_acc: 25.715296
+Epoch [1849], train_loss: 0.060608, val_loss: 0.057933, val_acc: 25.723639
+Epoch [1850], train_loss: 0.060615, val_loss: 0.057893, val_acc: 25.831894
+Epoch [1851], train_loss: 0.060752, val_loss: 0.057947, val_acc: 25.748476
+Epoch [1852], train_loss: 0.060539, val_loss: 0.057905, val_acc: 25.746052
+Epoch [1853], train_loss: 0.060655, val_loss: 0.057920, val_acc: 25.743010
+Epoch [1854], train_loss: 0.060572, val_loss: 0.057928, val_acc: 25.746927
+Epoch [1855], train_loss: 0.060684, val_loss: 0.057960, val_acc: 25.696844
+Epoch [1856], train_loss: 0.060725, val_loss: 0.057837, val_acc: 25.807787
+Epoch [1857], train_loss: 0.060708, val_loss: 0.057902, val_acc: 25.733131
+Epoch [1858], train_loss: 0.060642, val_loss: 0.057899, val_acc: 25.753983
+Epoch [1859], train_loss: 0.060598, val_loss: 0.057900, val_acc: 25.804600
+Epoch [1860], train_loss: 0.060535, val_loss: 0.057929, val_acc: 25.723263
+Epoch [1861], train_loss: 0.060545, val_loss: 0.057841, val_acc: 25.833530
+Epoch [1862], train_loss: 0.060749, val_loss: 0.057914, val_acc: 25.741976
+Epoch [1863], train_loss: 0.060740, val_loss: 0.057920, val_acc: 25.800594
+Epoch [1864], train_loss: 0.060769, val_loss: 0.057909, val_acc: 25.836952
+Epoch [1865], train_loss: 0.060652, val_loss: 0.057917, val_acc: 25.790627
+Epoch [1866], train_loss: 0.060699, val_loss: 0.057926, val_acc: 25.778461
+Epoch [1867], train_loss: 0.060584, val_loss: 0.057911, val_acc: 25.740452
+Epoch [1868], train_loss: 0.060622, val_loss: 0.057991, val_acc: 25.737249
+Epoch [1869], train_loss: 0.060694, val_loss: 0.057932, val_acc: 25.771021
+Epoch [1870], train_loss: 0.060628, val_loss: 0.057906, val_acc: 25.780048
+Epoch [1871], train_loss: 0.060768, val_loss: 0.057911, val_acc: 25.829294
+Epoch [1872], train_loss: 0.060600, val_loss: 0.057983, val_acc: 25.692488
+Epoch [1873], train_loss: 0.060548, val_loss: 0.057871, val_acc: 25.798937
+Epoch [1874], train_loss: 0.060685, val_loss: 0.057882, val_acc: 25.834833
+Epoch [1875], train_loss: 0.060809, val_loss: 0.057929, val_acc: 25.752377
+Epoch [1876], train_loss: 0.060632, val_loss: 0.057868, val_acc: 25.799753
+Epoch [1877], train_loss: 0.060550, val_loss: 0.057942, val_acc: 25.721830
+Epoch [1878], train_loss: 0.060618, val_loss: 0.057816, val_acc: 25.830421
+Epoch [1879], train_loss: 0.060589, val_loss: 0.057874, val_acc: 25.673811
+Epoch [1880], train_loss: 0.060733, val_loss: 0.057826, val_acc: 25.812580
+Epoch [1881], train_loss: 0.060659, val_loss: 0.057924, val_acc: 25.767244
+Epoch [1882], train_loss: 0.060566, val_loss: 0.058010, val_acc: 25.708366
+Epoch [1883], train_loss: 0.060515, val_loss: 0.057837, val_acc: 25.828516
+Epoch [1884], train_loss: 0.060513, val_loss: 0.057947, val_acc: 25.723452
+Epoch [1885], train_loss: 0.060732, val_loss: 0.057965, val_acc: 25.722376
+Epoch [1886], train_loss: 0.060832, val_loss: 0.057934, val_acc: 25.722139
+Epoch [1887], train_loss: 0.060402, val_loss: 0.057914, val_acc: 25.767002
+Epoch [1888], train_loss: 0.060659, val_loss: 0.057970, val_acc: 25.703316
+Epoch [1889], train_loss: 0.060675, val_loss: 0.058015, val_acc: 25.733515
+Epoch [1890], train_loss: 0.060693, val_loss: 0.057915, val_acc: 25.768660
+Epoch [1891], train_loss: 0.060673, val_loss: 0.057918, val_acc: 25.738277
+Epoch [1892], train_loss: 0.060910, val_loss: 0.057892, val_acc: 25.821163
+Epoch [1893], train_loss: 0.060773, val_loss: 0.057889, val_acc: 25.794155
+Epoch [1894], train_loss: 0.060738, val_loss: 0.057898, val_acc: 25.769724
+Epoch [1895], train_loss: 0.060689, val_loss: 0.057904, val_acc: 25.796364
+Epoch [1896], train_loss: 0.060712, val_loss: 0.057924, val_acc: 25.784874
+Epoch [1897], train_loss: 0.060752, val_loss: 0.057820, val_acc: 25.831614
+Epoch [1898], train_loss: 0.060711, val_loss: 0.057899, val_acc: 25.760307
+Epoch [1899], train_loss: 0.060474, val_loss: 0.057869, val_acc: 25.799334
+Epoch [1900], train_loss: 0.060675, val_loss: 0.057876, val_acc: 25.820356
+Epoch [1901], train_loss: 0.060597, val_loss: 0.057904, val_acc: 25.783762
+Epoch [1902], train_loss: 0.060700, val_loss: 0.057872, val_acc: 25.837326
+Epoch [1903], train_loss: 0.060443, val_loss: 0.057837, val_acc: 25.799793
+Epoch [1904], train_loss: 0.060662, val_loss: 0.057909, val_acc: 25.757931
+Epoch [1905], train_loss: 0.060786, val_loss: 0.057827, val_acc: 25.818464
+Epoch [1906], train_loss: 0.060667, val_loss: 0.057885, val_acc: 25.760298
+Epoch [1907], train_loss: 0.060667, val_loss: 0.057944, val_acc: 25.759293
+Epoch [1908], train_loss: 0.060585, val_loss: 0.058029, val_acc: 25.643011
+Epoch [1909], train_loss: 0.060631, val_loss: 0.057932, val_acc: 25.721968
+Epoch [1910], train_loss: 0.060715, val_loss: 0.057928, val_acc: 25.746902
+Epoch [1911], train_loss: 0.060720, val_loss: 0.057862, val_acc: 25.822859
+Epoch [1912], train_loss: 0.060546, val_loss: 0.057867, val_acc: 25.811989
+Epoch [1913], train_loss: 0.060594, val_loss: 0.057854, val_acc: 25.837069
+Epoch [1914], train_loss: 0.060683, val_loss: 0.057915, val_acc: 25.737291
+Epoch [1915], train_loss: 0.060674, val_loss: 0.057865, val_acc: 25.738636
+Epoch [1916], train_loss: 0.060631, val_loss: 0.057883, val_acc: 25.809362
+Epoch [1917], train_loss: 0.060678, val_loss: 0.057887, val_acc: 25.805357
+Epoch [1918], train_loss: 0.060610, val_loss: 0.057896, val_acc: 25.721329
+Epoch [1919], train_loss: 0.060749, val_loss: 0.057941, val_acc: 25.752644
+Epoch [1920], train_loss: 0.060538, val_loss: 0.057874, val_acc: 25.793119
+Epoch [1921], train_loss: 0.060448, val_loss: 0.057982, val_acc: 25.710600
+Epoch [1922], train_loss: 0.060553, val_loss: 0.057913, val_acc: 25.736761
+Epoch [1923], train_loss: 0.060764, val_loss: 0.057931, val_acc: 25.679451
+Epoch [1924], train_loss: 0.060593, val_loss: 0.057932, val_acc: 25.707609
+Epoch [1925], train_loss: 0.060611, val_loss: 0.057774, val_acc: 25.898415
+Epoch [1926], train_loss: 0.060624, val_loss: 0.057777, val_acc: 25.857332
+Epoch [1927], train_loss: 0.060636, val_loss: 0.057847, val_acc: 25.795517
+Epoch [1928], train_loss: 0.060779, val_loss: 0.057923, val_acc: 25.775682
+Epoch [1929], train_loss: 0.060753, val_loss: 0.057839, val_acc: 25.795897
+Epoch [1930], train_loss: 0.060648, val_loss: 0.057889, val_acc: 25.727488
+Epoch [1931], train_loss: 0.060728, val_loss: 0.057848, val_acc: 25.807543
+Epoch [1932], train_loss: 0.060568, val_loss: 0.057828, val_acc: 25.719988
+Epoch [1933], train_loss: 0.060666, val_loss: 0.057924, val_acc: 25.767790
+Epoch [1934], train_loss: 0.060669, val_loss: 0.057848, val_acc: 25.819548
+Epoch [1935], train_loss: 0.060696, val_loss: 0.057803, val_acc: 25.804693
+Epoch [1936], train_loss: 0.060666, val_loss: 0.057830, val_acc: 25.782816
+Epoch [1937], train_loss: 0.060507, val_loss: 0.057906, val_acc: 25.783607
+Epoch [1938], train_loss: 0.060742, val_loss: 0.057808, val_acc: 25.787928
+Epoch [1939], train_loss: 0.060735, val_loss: 0.057943, val_acc: 25.682013
+Epoch [1940], train_loss: 0.060580, val_loss: 0.057892, val_acc: 25.769203
+Epoch [1941], train_loss: 0.060552, val_loss: 0.057862, val_acc: 25.799351
+Epoch [1942], train_loss: 0.060637, val_loss: 0.057796, val_acc: 25.797050
+Epoch [1943], train_loss: 0.060770, val_loss: 0.057895, val_acc: 25.807899
+Epoch [1944], train_loss: 0.060732, val_loss: 0.057915, val_acc: 25.773771
+Epoch [1945], train_loss: 0.060617, val_loss: 0.057837, val_acc: 25.822304
+Epoch [1946], train_loss: 0.060661, val_loss: 0.057884, val_acc: 25.735014
+Epoch [1947], train_loss: 0.060657, val_loss: 0.057851, val_acc: 25.779036
+Epoch [1948], train_loss: 0.060788, val_loss: 0.057926, val_acc: 25.733019
+Epoch [1949], train_loss: 0.060594, val_loss: 0.057906, val_acc: 25.704819
+Epoch [1950], train_loss: 0.060797, val_loss: 0.057912, val_acc: 25.756950
+Epoch [1951], train_loss: 0.060527, val_loss: 0.057881, val_acc: 25.718782
+Epoch [1952], train_loss: 0.060652, val_loss: 0.057922, val_acc: 25.722452
+Epoch [1953], train_loss: 0.060559, val_loss: 0.057810, val_acc: 25.787907
+Epoch [1954], train_loss: 0.060664, val_loss: 0.057817, val_acc: 25.798555
+Epoch [1955], train_loss: 0.060615, val_loss: 0.057861, val_acc: 25.771284
+Epoch [1956], train_loss: 0.060672, val_loss: 0.057942, val_acc: 25.698994
+Epoch [1957], train_loss: 0.060602, val_loss: 0.057917, val_acc: 25.724073
+Epoch [1958], train_loss: 0.060667, val_loss: 0.057828, val_acc: 25.822142
+Epoch [1959], train_loss: 0.060671, val_loss: 0.057840, val_acc: 25.761124
+Epoch [1960], train_loss: 0.060784, val_loss: 0.057877, val_acc: 25.819279
+Epoch [1961], train_loss: 0.060631, val_loss: 0.058001, val_acc: 25.672041
+Epoch [1962], train_loss: 0.060622, val_loss: 0.057863, val_acc: 25.802242
+Epoch [1963], train_loss: 0.060642, val_loss: 0.057784, val_acc: 25.791975
+Epoch [1964], train_loss: 0.060743, val_loss: 0.057817, val_acc: 25.805307
+Epoch [1965], train_loss: 0.060599, val_loss: 0.057874, val_acc: 25.789913
+Epoch [1966], train_loss: 0.060571, val_loss: 0.057849, val_acc: 25.756187
+Epoch [1967], train_loss: 0.060622, val_loss: 0.057826, val_acc: 25.807642
+Epoch [1968], train_loss: 0.060540, val_loss: 0.057895, val_acc: 25.709614
+Epoch [1969], train_loss: 0.060764, val_loss: 0.057845, val_acc: 25.814013
+Epoch [1970], train_loss: 0.060707, val_loss: 0.057897, val_acc: 25.771297
+Epoch [1971], train_loss: 0.060666, val_loss: 0.057977, val_acc: 25.664454
+Epoch [1972], train_loss: 0.060654, val_loss: 0.057842, val_acc: 25.808867
+Epoch [1973], train_loss: 0.060567, val_loss: 0.057873, val_acc: 25.725061
+Epoch [1974], train_loss: 0.060512, val_loss: 0.057931, val_acc: 25.741837
+Epoch [1975], train_loss: 0.060594, val_loss: 0.057803, val_acc: 25.840113
+Epoch [1976], train_loss: 0.060375, val_loss: 0.057873, val_acc: 25.728426
+Epoch [1977], train_loss: 0.060687, val_loss: 0.057898, val_acc: 25.721708
+Epoch [1978], train_loss: 0.060480, val_loss: 0.057922, val_acc: 25.652033
+Epoch [1979], train_loss: 0.060641, val_loss: 0.057823, val_acc: 25.779364
+Epoch [1980], train_loss: 0.060432, val_loss: 0.057837, val_acc: 25.797827
+Epoch [1981], train_loss: 0.060455, val_loss: 0.057782, val_acc: 25.835230
+Epoch [1982], train_loss: 0.060527, val_loss: 0.057871, val_acc: 25.738367
+Epoch [1983], train_loss: 0.060450, val_loss: 0.057860, val_acc: 25.744108
+Epoch [1984], train_loss: 0.060603, val_loss: 0.057868, val_acc: 25.757212
+Epoch [1985], train_loss: 0.060679, val_loss: 0.057863, val_acc: 25.753080
+Epoch [1986], train_loss: 0.060475, val_loss: 0.057807, val_acc: 25.830122
+Epoch [1987], train_loss: 0.060704, val_loss: 0.058005, val_acc: 25.609501
+Epoch [1988], train_loss: 0.060499, val_loss: 0.057799, val_acc: 25.804949
+Epoch [1989], train_loss: 0.060478, val_loss: 0.057847, val_acc: 25.714729
+Epoch [1990], train_loss: 0.060539, val_loss: 0.057828, val_acc: 25.806562
+Epoch [1991], train_loss: 0.060610, val_loss: 0.057795, val_acc: 25.869062
+Epoch [1992], train_loss: 0.060647, val_loss: 0.057848, val_acc: 25.773588
+Epoch [1993], train_loss: 0.060618, val_loss: 0.057938, val_acc: 25.749388
+Epoch [1994], train_loss: 0.060591, val_loss: 0.057919, val_acc: 25.661064
+Epoch [1995], train_loss: 0.060666, val_loss: 0.057871, val_acc: 25.737471
+Epoch [1996], train_loss: 0.060615, val_loss: 0.057848, val_acc: 25.750870
+Epoch [1997], train_loss: 0.060603, val_loss: 0.057800, val_acc: 25.795664
+Epoch [1998], train_loss: 0.060570, val_loss: 0.057792, val_acc: 25.835005
+Epoch [1999], train_loss: 0.060591, val_loss: 0.057862, val_acc: 25.784523
+Epoch [2000], train_loss: 0.060467, val_loss: 0.057992, val_acc: 25.675552
+Epoch [2001], train_loss: 0.060720, val_loss: 0.057800, val_acc: 25.792900
+Epoch [2002], train_loss: 0.060532, val_loss: 0.057928, val_acc: 25.768307
+Epoch [2003], train_loss: 0.060554, val_loss: 0.057806, val_acc: 25.831648
+Epoch [2004], train_loss: 0.060539, val_loss: 0.057907, val_acc: 25.729118
+Epoch [2005], train_loss: 0.060553, val_loss: 0.057824, val_acc: 25.788866
+Epoch [2006], train_loss: 0.060715, val_loss: 0.057816, val_acc: 25.774521
+Epoch [2007], train_loss: 0.060629, val_loss: 0.057962, val_acc: 25.677082
+Epoch [2008], train_loss: 0.060616, val_loss: 0.057857, val_acc: 25.725122
+Epoch [2009], train_loss: 0.060535, val_loss: 0.057835, val_acc: 25.754622
+Epoch [2010], train_loss: 0.060644, val_loss: 0.057963, val_acc: 25.645311
+Epoch [2011], train_loss: 0.060733, val_loss: 0.057894, val_acc: 25.808266
+Epoch [2012], train_loss: 0.060668, val_loss: 0.057801, val_acc: 25.848276
+Epoch [2013], train_loss: 0.060431, val_loss: 0.057903, val_acc: 25.716825
+Epoch [2014], train_loss: 0.060555, val_loss: 0.057897, val_acc: 25.719440
+Epoch [2015], train_loss: 0.060427, val_loss: 0.057869, val_acc: 25.745663
+Epoch [2016], train_loss: 0.060563, val_loss: 0.057957, val_acc: 25.695698
+Epoch [2017], train_loss: 0.060661, val_loss: 0.057784, val_acc: 25.814585
+Epoch [2018], train_loss: 0.060529, val_loss: 0.057858, val_acc: 25.688087
+Epoch [2019], train_loss: 0.060634, val_loss: 0.057862, val_acc: 25.745232
+Epoch [2020], train_loss: 0.060634, val_loss: 0.057839, val_acc: 25.758522
+Epoch [2021], train_loss: 0.060588, val_loss: 0.057818, val_acc: 25.790602
+Epoch [2022], train_loss: 0.060569, val_loss: 0.057855, val_acc: 25.785721
+Epoch [2023], train_loss: 0.060692, val_loss: 0.057778, val_acc: 25.808039
+Epoch [2024], train_loss: 0.060369, val_loss: 0.057889, val_acc: 25.714603
+Epoch [2025], train_loss: 0.060470, val_loss: 0.057793, val_acc: 25.795155
+Epoch [2026], train_loss: 0.060494, val_loss: 0.057847, val_acc: 25.754574
+Epoch [2027], train_loss: 0.060713, val_loss: 0.057845, val_acc: 25.720240
+Epoch [2028], train_loss: 0.060649, val_loss: 0.058016, val_acc: 25.612751
+Epoch [2029], train_loss: 0.060573, val_loss: 0.057850, val_acc: 25.777836
+Epoch [2030], train_loss: 0.060587, val_loss: 0.057774, val_acc: 25.767967
+Epoch [2031], train_loss: 0.060548, val_loss: 0.057853, val_acc: 25.778936
+Epoch [2032], train_loss: 0.060645, val_loss: 0.057875, val_acc: 25.730820
+Epoch [2033], train_loss: 0.060556, val_loss: 0.057836, val_acc: 25.770809
+Epoch [2034], train_loss: 0.060466, val_loss: 0.057858, val_acc: 25.770109
+Epoch [2035], train_loss: 0.060490, val_loss: 0.057785, val_acc: 25.843552
+Epoch [2036], train_loss: 0.060599, val_loss: 0.057985, val_acc: 25.631340
+Epoch [2037], train_loss: 0.060596, val_loss: 0.057815, val_acc: 25.775209
+Epoch [2038], train_loss: 0.060599, val_loss: 0.057803, val_acc: 25.787928
+Epoch [2039], train_loss: 0.060619, val_loss: 0.057858, val_acc: 25.740221
+Epoch [2040], train_loss: 0.060417, val_loss: 0.057754, val_acc: 25.830666
+Epoch [2041], train_loss: 0.060625, val_loss: 0.057783, val_acc: 25.815393
+Epoch [2042], train_loss: 0.060461, val_loss: 0.057821, val_acc: 25.818781
+Epoch [2043], train_loss: 0.060717, val_loss: 0.057791, val_acc: 25.754246
+Epoch [2044], train_loss: 0.060799, val_loss: 0.057787, val_acc: 25.812380
+Epoch [2045], train_loss: 0.060649, val_loss: 0.057778, val_acc: 25.771223
+Epoch [2046], train_loss: 0.060530, val_loss: 0.057919, val_acc: 25.679893
+Epoch [2047], train_loss: 0.060842, val_loss: 0.057859, val_acc: 25.801821
+Epoch [2048], train_loss: 0.060615, val_loss: 0.057893, val_acc: 25.668665
+Epoch [2049], train_loss: 0.060483, val_loss: 0.057793, val_acc: 25.796824
+Epoch [2050], train_loss: 0.060512, val_loss: 0.057776, val_acc: 25.789299
+Epoch [2051], train_loss: 0.060622, val_loss: 0.057871, val_acc: 25.732231
+Epoch [2052], train_loss: 0.060643, val_loss: 0.057881, val_acc: 25.690388
+Epoch [2053], train_loss: 0.060583, val_loss: 0.057790, val_acc: 25.789383
+Epoch [2054], train_loss: 0.060546, val_loss: 0.057824, val_acc: 25.779085
+Epoch [2055], train_loss: 0.060528, val_loss: 0.057828, val_acc: 25.701191
+Epoch [2056], train_loss: 0.060528, val_loss: 0.057728, val_acc: 25.800003
+Epoch [2057], train_loss: 0.060341, val_loss: 0.057750, val_acc: 25.790480
+Epoch [2058], train_loss: 0.060611, val_loss: 0.057717, val_acc: 25.797857
+Epoch [2059], train_loss: 0.060478, val_loss: 0.057810, val_acc: 25.715593
+Epoch [2060], train_loss: 0.060500, val_loss: 0.057765, val_acc: 25.808947
+Epoch [2061], train_loss: 0.060617, val_loss: 0.057835, val_acc: 25.742184
+Epoch [2062], train_loss: 0.060500, val_loss: 0.057880, val_acc: 25.705341
+Epoch [2063], train_loss: 0.060587, val_loss: 0.057839, val_acc: 25.739052
+Epoch [2064], train_loss: 0.060449, val_loss: 0.057832, val_acc: 25.724596
+Epoch [2065], train_loss: 0.060552, val_loss: 0.057905, val_acc: 25.708536
+Epoch [2066], train_loss: 0.060481, val_loss: 0.057786, val_acc: 25.818674
+Epoch [2067], train_loss: 0.060661, val_loss: 0.057758, val_acc: 25.824492
+Epoch [2068], train_loss: 0.060378, val_loss: 0.057861, val_acc: 25.685776
+Epoch [2069], train_loss: 0.060555, val_loss: 0.057783, val_acc: 25.718573
+Epoch [2070], train_loss: 0.060407, val_loss: 0.057754, val_acc: 25.785448
+Epoch [2071], train_loss: 0.060702, val_loss: 0.057777, val_acc: 25.854490
+Epoch [2072], train_loss: 0.060659, val_loss: 0.057771, val_acc: 25.725286
+Epoch [2073], train_loss: 0.060655, val_loss: 0.057783, val_acc: 25.798092
+Epoch [2074], train_loss: 0.060791, val_loss: 0.057828, val_acc: 25.720751
+Epoch [2075], train_loss: 0.060509, val_loss: 0.057815, val_acc: 25.756018
+Epoch [2076], train_loss: 0.060509, val_loss: 0.057760, val_acc: 25.819523
+Epoch [2077], train_loss: 0.060675, val_loss: 0.057796, val_acc: 25.842150
+Epoch [2078], train_loss: 0.060397, val_loss: 0.057829, val_acc: 25.751654
+Epoch [2079], train_loss: 0.060506, val_loss: 0.057750, val_acc: 25.780537
+Epoch [2080], train_loss: 0.060621, val_loss: 0.057796, val_acc: 25.748690
+Epoch [2081], train_loss: 0.060489, val_loss: 0.057819, val_acc: 25.715643
+Epoch [2082], train_loss: 0.060729, val_loss: 0.057825, val_acc: 25.781223
+Epoch [2083], train_loss: 0.060559, val_loss: 0.057798, val_acc: 25.805647
+Epoch [2084], train_loss: 0.060593, val_loss: 0.057779, val_acc: 25.755501
+Epoch [2085], train_loss: 0.060645, val_loss: 0.057843, val_acc: 25.748102
+Epoch [2086], train_loss: 0.060589, val_loss: 0.057818, val_acc: 25.757679
+Epoch [2087], train_loss: 0.060652, val_loss: 0.057778, val_acc: 25.797817
+Epoch [2088], train_loss: 0.060395, val_loss: 0.057783, val_acc: 25.798803
+Epoch [2089], train_loss: 0.060461, val_loss: 0.057708, val_acc: 25.819643
+Epoch [2090], train_loss: 0.060568, val_loss: 0.057791, val_acc: 25.794361
+Epoch [2091], train_loss: 0.060392, val_loss: 0.057742, val_acc: 25.767414
+Epoch [2092], train_loss: 0.060659, val_loss: 0.057733, val_acc: 25.833851
+Epoch [2093], train_loss: 0.060609, val_loss: 0.057782, val_acc: 25.775709
+Epoch [2094], train_loss: 0.060725, val_loss: 0.057768, val_acc: 25.770979
+Epoch [2095], train_loss: 0.060681, val_loss: 0.057806, val_acc: 25.769707
+Epoch [2096], train_loss: 0.060543, val_loss: 0.057818, val_acc: 25.759251
+Epoch [2097], train_loss: 0.060419, val_loss: 0.057776, val_acc: 25.751493
+Epoch [2098], train_loss: 0.060550, val_loss: 0.057776, val_acc: 25.758497
+Epoch [2099], train_loss: 0.060476, val_loss: 0.057786, val_acc: 25.785734
+Epoch [2100], train_loss: 0.060457, val_loss: 0.057684, val_acc: 25.835432
+Epoch [2101], train_loss: 0.060508, val_loss: 0.057768, val_acc: 25.731443
+Epoch [2102], train_loss: 0.060526, val_loss: 0.057815, val_acc: 25.750660
+Epoch [2103], train_loss: 0.060512, val_loss: 0.057781, val_acc: 25.750648
+Epoch [2104], train_loss: 0.060414, val_loss: 0.057790, val_acc: 25.754282
+Epoch [2105], train_loss: 0.060611, val_loss: 0.057793, val_acc: 25.775105
+Epoch [2106], train_loss: 0.060265, val_loss: 0.057796, val_acc: 25.787712
+Epoch [2107], train_loss: 0.060357, val_loss: 0.057746, val_acc: 25.806059
+Epoch [2108], train_loss: 0.060614, val_loss: 0.057748, val_acc: 25.743071
+Epoch [2109], train_loss: 0.060553, val_loss: 0.057831, val_acc: 25.757973
+Epoch [2110], train_loss: 0.060563, val_loss: 0.057703, val_acc: 25.819155
+Epoch [2111], train_loss: 0.060527, val_loss: 0.057792, val_acc: 25.737038
+Epoch [2112], train_loss: 0.060571, val_loss: 0.057840, val_acc: 25.741673
+Epoch [2113], train_loss: 0.060426, val_loss: 0.057753, val_acc: 25.821943
+Epoch [2114], train_loss: 0.060441, val_loss: 0.057827, val_acc: 25.732670
+Epoch [2115], train_loss: 0.060661, val_loss: 0.057893, val_acc: 25.697752
+Epoch [2116], train_loss: 0.060579, val_loss: 0.057756, val_acc: 25.786448
+Epoch [2117], train_loss: 0.060582, val_loss: 0.057768, val_acc: 25.810890
+Epoch [2118], train_loss: 0.060365, val_loss: 0.057728, val_acc: 25.778387
+Epoch [2119], train_loss: 0.060483, val_loss: 0.057752, val_acc: 25.810337
+Epoch [2120], train_loss: 0.060632, val_loss: 0.057794, val_acc: 25.761789
+Epoch [2121], train_loss: 0.060530, val_loss: 0.057764, val_acc: 25.711740
+Epoch [2122], train_loss: 0.060418, val_loss: 0.057768, val_acc: 25.754143
+Epoch [2123], train_loss: 0.060514, val_loss: 0.057794, val_acc: 25.663702
+Epoch [2124], train_loss: 0.060472, val_loss: 0.057873, val_acc: 25.643713
+Epoch [2125], train_loss: 0.060629, val_loss: 0.057913, val_acc: 25.702360
+Epoch [2126], train_loss: 0.060549, val_loss: 0.057742, val_acc: 25.746275
+Epoch [2127], train_loss: 0.060638, val_loss: 0.057810, val_acc: 25.789139
+Epoch [2128], train_loss: 0.060488, val_loss: 0.057801, val_acc: 25.753475
+Epoch [2129], train_loss: 0.060739, val_loss: 0.057816, val_acc: 25.770485
+Epoch [2130], train_loss: 0.060589, val_loss: 0.057783, val_acc: 25.777655
+Epoch [2131], train_loss: 0.060288, val_loss: 0.057759, val_acc: 25.824373
+Epoch [2132], train_loss: 0.060399, val_loss: 0.057823, val_acc: 25.721811
+Epoch [2133], train_loss: 0.060287, val_loss: 0.057785, val_acc: 25.773781
+Epoch [2134], train_loss: 0.060509, val_loss: 0.057744, val_acc: 25.799679
+Epoch [2135], train_loss: 0.060589, val_loss: 0.057783, val_acc: 25.754112
+Epoch [2136], train_loss: 0.060551, val_loss: 0.057743, val_acc: 25.785891
+Epoch [2137], train_loss: 0.060605, val_loss: 0.057695, val_acc: 25.859438
+Epoch [2138], train_loss: 0.060537, val_loss: 0.057766, val_acc: 25.765072
+Epoch [2139], train_loss: 0.060686, val_loss: 0.057802, val_acc: 25.787819
+Epoch [2140], train_loss: 0.060446, val_loss: 0.057752, val_acc: 25.773863
+Epoch [2141], train_loss: 0.060507, val_loss: 0.057769, val_acc: 25.735466
+Epoch [2142], train_loss: 0.060430, val_loss: 0.057789, val_acc: 25.777903
+Epoch [2143], train_loss: 0.060346, val_loss: 0.057820, val_acc: 25.731836
+Epoch [2144], train_loss: 0.060544, val_loss: 0.057724, val_acc: 25.873320
+Epoch [2145], train_loss: 0.060503, val_loss: 0.057813, val_acc: 25.745455
+Epoch [2146], train_loss: 0.060438, val_loss: 0.057787, val_acc: 25.805660
+Epoch [2147], train_loss: 0.060358, val_loss: 0.057781, val_acc: 25.727564
+Epoch [2148], train_loss: 0.060575, val_loss: 0.057767, val_acc: 25.817884
+Epoch [2149], train_loss: 0.060650, val_loss: 0.057720, val_acc: 25.813257
+Epoch [2150], train_loss: 0.060276, val_loss: 0.057783, val_acc: 25.717924
+Epoch [2151], train_loss: 0.060663, val_loss: 0.057762, val_acc: 25.795622
+Epoch [2152], train_loss: 0.060515, val_loss: 0.057803, val_acc: 25.767160
+Epoch [2153], train_loss: 0.060478, val_loss: 0.057810, val_acc: 25.803493
+Epoch [2154], train_loss: 0.060359, val_loss: 0.057684, val_acc: 25.852522
+Epoch [2155], train_loss: 0.060508, val_loss: 0.057700, val_acc: 25.852602
+Epoch [2156], train_loss: 0.060510, val_loss: 0.057870, val_acc: 25.657120
+Epoch [2157], train_loss: 0.060359, val_loss: 0.057765, val_acc: 25.741968
+Epoch [2158], train_loss: 0.060626, val_loss: 0.057765, val_acc: 25.745171
+Epoch [2159], train_loss: 0.060544, val_loss: 0.057796, val_acc: 25.727818
+Epoch [2160], train_loss: 0.060439, val_loss: 0.057763, val_acc: 25.760374
+Epoch [2161], train_loss: 0.060601, val_loss: 0.057806, val_acc: 25.767580
+Epoch [2162], train_loss: 0.060579, val_loss: 0.057718, val_acc: 25.830309
+Epoch [2163], train_loss: 0.060184, val_loss: 0.057722, val_acc: 25.845072
+Epoch [2164], train_loss: 0.060766, val_loss: 0.057734, val_acc: 25.830000
+Epoch [2165], train_loss: 0.060416, val_loss: 0.057722, val_acc: 25.801022
+Epoch [2166], train_loss: 0.060542, val_loss: 0.057861, val_acc: 25.706369
+Epoch [2167], train_loss: 0.060560, val_loss: 0.057742, val_acc: 25.767771
+Epoch [2168], train_loss: 0.060379, val_loss: 0.057928, val_acc: 25.654495
+Epoch [2169], train_loss: 0.060486, val_loss: 0.057788, val_acc: 25.743372
+Epoch [2170], train_loss: 0.060463, val_loss: 0.057705, val_acc: 25.799894
+Epoch [2171], train_loss: 0.060448, val_loss: 0.057763, val_acc: 25.769062
+Epoch [2172], train_loss: 0.060548, val_loss: 0.057796, val_acc: 25.732431
+Epoch [2173], train_loss: 0.060332, val_loss: 0.057812, val_acc: 25.721066
+Epoch [2174], train_loss: 0.060685, val_loss: 0.057682, val_acc: 25.839436
+Epoch [2175], train_loss: 0.060615, val_loss: 0.057744, val_acc: 25.759651
diff --git a/UNet/Sim_logs/UNet_V10_25612277.log b/UNet/Sim_logs/UNet_V10_25612277.log
new file mode 100644
index 0000000000000000000000000000000000000000..ea9db2ebf08666c9cad5a6bff1e896e418a91467
--- /dev/null
+++ b/UNet/Sim_logs/UNet_V10_25612277.log
@@ -0,0 +1,251 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+9.1 k=7 lr=1e-06
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 230
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 7
+ seed is: 2193910023
+Epoch [0], train_loss: 0.172159, val_loss: 0.134897, val_acc: 8.232150
+Epoch [1], train_loss: 0.154764, val_loss: 0.149516, val_acc: 9.604359
+Epoch [2], train_loss: 0.145394, val_loss: 0.141792, val_acc: 10.315170
+Epoch [3], train_loss: 0.137908, val_loss: 0.133222, val_acc: 11.566414
+Epoch [4], train_loss: 0.130699, val_loss: 0.126603, val_acc: 12.800211
+Epoch [5], train_loss: 0.124300, val_loss: 0.119395, val_acc: 14.006802
+Epoch [6], train_loss: 0.119279, val_loss: 0.117697, val_acc: 14.731509
+Epoch [7], train_loss: 0.115127, val_loss: 0.113816, val_acc: 15.213119
+Epoch [8], train_loss: 0.111531, val_loss: 0.108839, val_acc: 15.948540
+Epoch [9], train_loss: 0.108368, val_loss: 0.105441, val_acc: 16.304361
+Epoch [10], train_loss: 0.105599, val_loss: 0.105059, val_acc: 16.669409
+Epoch [11], train_loss: 0.103105, val_loss: 0.100976, val_acc: 16.997753
+Epoch [12], train_loss: 0.100788, val_loss: 0.099912, val_acc: 17.878908
+Epoch [13], train_loss: 0.098584, val_loss: 0.098851, val_acc: 17.732853
+Epoch [14], train_loss: 0.096507, val_loss: 0.098165, val_acc: 18.081085
+Epoch [15], train_loss: 0.094575, val_loss: 0.090383, val_acc: 18.973143
+Epoch [16], train_loss: 0.092825, val_loss: 0.091215, val_acc: 19.212927
+Epoch [17], train_loss: 0.091223, val_loss: 0.087831, val_acc: 19.317930
+Epoch [18], train_loss: 0.089696, val_loss: 0.090915, val_acc: 19.258047
+Epoch [19], train_loss: 0.088216, val_loss: 0.086321, val_acc: 20.215672
+Epoch [20], train_loss: 0.086918, val_loss: 0.086204, val_acc: 20.171139
+Epoch [21], train_loss: 0.085722, val_loss: 0.084409, val_acc: 20.590620
+Epoch [22], train_loss: 0.084520, val_loss: 0.084003, val_acc: 20.398914
+Epoch [23], train_loss: 0.083519, val_loss: 0.084401, val_acc: 20.686411
+Epoch [24], train_loss: 0.082327, val_loss: 0.082196, val_acc: 21.190142
+Epoch [25], train_loss: 0.081367, val_loss: 0.080058, val_acc: 21.363438
+Epoch [26], train_loss: 0.080530, val_loss: 0.077848, val_acc: 21.824215
+Epoch [27], train_loss: 0.079562, val_loss: 0.078423, val_acc: 21.799906
+Epoch [28], train_loss: 0.078792, val_loss: 0.077483, val_acc: 22.013546
+Epoch [29], train_loss: 0.077926, val_loss: 0.075273, val_acc: 22.397171
+Epoch [30], train_loss: 0.077265, val_loss: 0.075685, val_acc: 22.393076
+Epoch [31], train_loss: 0.076482, val_loss: 0.075852, val_acc: 22.358948
+Epoch [32], train_loss: 0.075862, val_loss: 0.076072, val_acc: 22.077982
+Epoch [33], train_loss: 0.075173, val_loss: 0.072920, val_acc: 22.887217
+Epoch [34], train_loss: 0.074599, val_loss: 0.072450, val_acc: 22.682665
+Epoch [35], train_loss: 0.073980, val_loss: 0.073773, val_acc: 22.988997
+Epoch [36], train_loss: 0.073328, val_loss: 0.072438, val_acc: 22.933538
+Epoch [37], train_loss: 0.072738, val_loss: 0.071225, val_acc: 23.384687
+Epoch [38], train_loss: 0.072217, val_loss: 0.070216, val_acc: 23.484192
+Epoch [39], train_loss: 0.071800, val_loss: 0.070990, val_acc: 23.032196
+Epoch [40], train_loss: 0.071185, val_loss: 0.071625, val_acc: 23.454826
+Epoch [41], train_loss: 0.070669, val_loss: 0.070073, val_acc: 23.252888
+Epoch [42], train_loss: 0.070109, val_loss: 0.070504, val_acc: 23.212296
+Epoch [43], train_loss: 0.069636, val_loss: 0.068912, val_acc: 23.789619
+Epoch [44], train_loss: 0.069329, val_loss: 0.069678, val_acc: 23.558134
+Epoch [45], train_loss: 0.068717, val_loss: 0.067627, val_acc: 23.818995
+Epoch [46], train_loss: 0.068469, val_loss: 0.067959, val_acc: 23.958155
+Epoch [47], train_loss: 0.068010, val_loss: 0.066766, val_acc: 24.074717
+Epoch [48], train_loss: 0.067513, val_loss: 0.067961, val_acc: 23.863684
+Epoch [49], train_loss: 0.067128, val_loss: 0.066838, val_acc: 24.060387
+Epoch [50], train_loss: 0.066870, val_loss: 0.066638, val_acc: 23.897293
+Epoch [51], train_loss: 0.066397, val_loss: 0.066282, val_acc: 24.089914
+Epoch [52], train_loss: 0.066092, val_loss: 0.066567, val_acc: 24.095610
+Epoch [53], train_loss: 0.065676, val_loss: 0.065494, val_acc: 24.244860
+Epoch [54], train_loss: 0.065444, val_loss: 0.066086, val_acc: 24.026777
+Epoch [55], train_loss: 0.065024, val_loss: 0.065012, val_acc: 24.286543
+Epoch [56], train_loss: 0.064683, val_loss: 0.065099, val_acc: 24.150110
+Epoch [57], train_loss: 0.064366, val_loss: 0.065600, val_acc: 24.080805
+Epoch [58], train_loss: 0.064120, val_loss: 0.064881, val_acc: 24.287113
+Epoch [59], train_loss: 0.063756, val_loss: 0.064497, val_acc: 24.255999
+Epoch [60], train_loss: 0.063500, val_loss: 0.064650, val_acc: 24.231817
+Epoch [61], train_loss: 0.063203, val_loss: 0.064819, val_acc: 24.282621
+Epoch [62], train_loss: 0.063003, val_loss: 0.064180, val_acc: 24.062078
+Epoch [63], train_loss: 0.062609, val_loss: 0.064087, val_acc: 24.224848
+Epoch [64], train_loss: 0.062379, val_loss: 0.065130, val_acc: 23.877653
+Epoch [65], train_loss: 0.062041, val_loss: 0.064029, val_acc: 24.273172
+Epoch [66], train_loss: 0.061703, val_loss: 0.062578, val_acc: 24.398014
+Epoch [67], train_loss: 0.061536, val_loss: 0.063704, val_acc: 24.092329
+Epoch [68], train_loss: 0.061319, val_loss: 0.063331, val_acc: 24.390806
+Epoch [69], train_loss: 0.061058, val_loss: 0.063721, val_acc: 24.198601
+Epoch [70], train_loss: 0.060738, val_loss: 0.063059, val_acc: 24.333126
+Epoch [71], train_loss: 0.060506, val_loss: 0.063049, val_acc: 24.440384
+Epoch [72], train_loss: 0.060214, val_loss: 0.062859, val_acc: 24.316059
+Epoch [73], train_loss: 0.060017, val_loss: 0.062895, val_acc: 24.254503
+Epoch [74], train_loss: 0.059867, val_loss: 0.063250, val_acc: 24.138424
+Epoch [75], train_loss: 0.059651, val_loss: 0.061885, val_acc: 24.508385
+Epoch [76], train_loss: 0.059416, val_loss: 0.062053, val_acc: 24.309017
+Epoch [77], train_loss: 0.059125, val_loss: 0.061551, val_acc: 24.361935
+Epoch [78], train_loss: 0.059023, val_loss: 0.061961, val_acc: 24.446722
+Epoch [79], train_loss: 0.058713, val_loss: 0.061251, val_acc: 24.280487
+Epoch [80], train_loss: 0.058500, val_loss: 0.062571, val_acc: 24.323124
+Epoch [81], train_loss: 0.058362, val_loss: 0.062421, val_acc: 24.249861
+Epoch [82], train_loss: 0.058205, val_loss: 0.062051, val_acc: 24.325773
+Epoch [83], train_loss: 0.058029, val_loss: 0.061250, val_acc: 24.275801
+Epoch [84], train_loss: 0.057766, val_loss: 0.061070, val_acc: 24.199694
+Epoch [85], train_loss: 0.057649, val_loss: 0.061570, val_acc: 24.325516
+Epoch [86], train_loss: 0.057445, val_loss: 0.062155, val_acc: 24.266544
+Epoch [87], train_loss: 0.057256, val_loss: 0.060859, val_acc: 24.455339
+Epoch [88], train_loss: 0.057114, val_loss: 0.060820, val_acc: 24.254255
+Epoch [89], train_loss: 0.057001, val_loss: 0.061154, val_acc: 24.557724
+Epoch [90], train_loss: 0.056861, val_loss: 0.060831, val_acc: 24.378281
+Epoch [91], train_loss: 0.056660, val_loss: 0.060268, val_acc: 24.438402
+Epoch [92], train_loss: 0.056568, val_loss: 0.060897, val_acc: 24.290401
+Epoch [93], train_loss: 0.056425, val_loss: 0.060776, val_acc: 24.258410
+Epoch [94], train_loss: 0.056314, val_loss: 0.060799, val_acc: 24.340508
+Epoch [95], train_loss: 0.056134, val_loss: 0.060945, val_acc: 24.232895
+Epoch [96], train_loss: 0.056113, val_loss: 0.060882, val_acc: 23.777275
+Epoch [97], train_loss: 0.056289, val_loss: 0.060267, val_acc: 24.278284
+Epoch [98], train_loss: 0.055945, val_loss: 0.060256, val_acc: 24.209894
+Epoch [99], train_loss: 0.055786, val_loss: 0.060310, val_acc: 24.180080
+Epoch [100], train_loss: 0.055632, val_loss: 0.060529, val_acc: 24.099735
+Epoch [101], train_loss: 0.055694, val_loss: 0.061185, val_acc: 23.389952
+Epoch [102], train_loss: 0.055926, val_loss: 0.060353, val_acc: 24.192337
+Epoch [103], train_loss: 0.055472, val_loss: 0.060154, val_acc: 24.165007
+Epoch [104], train_loss: 0.055439, val_loss: 0.060070, val_acc: 23.972866
+Epoch [105], train_loss: 0.055293, val_loss: 0.060097, val_acc: 24.157364
+Epoch [106], train_loss: 0.055193, val_loss: 0.060955, val_acc: 24.023041
+Epoch [107], train_loss: 0.055109, val_loss: 0.060630, val_acc: 24.161814
+Epoch [108], train_loss: 0.055057, val_loss: 0.060489, val_acc: 23.957731
+Epoch [109], train_loss: 0.054979, val_loss: 0.060167, val_acc: 24.176863
+Epoch [110], train_loss: 0.054931, val_loss: 0.059829, val_acc: 23.956688
+Epoch [111], train_loss: 0.055027, val_loss: 0.060497, val_acc: 23.829912
+Epoch [112], train_loss: 0.054838, val_loss: 0.060153, val_acc: 23.995926
+Epoch [113], train_loss: 0.054784, val_loss: 0.059802, val_acc: 24.149084
+Epoch [114], train_loss: 0.054705, val_loss: 0.060275, val_acc: 23.911692
+Epoch [115], train_loss: 0.055079, val_loss: 0.060241, val_acc: 23.964331
+Epoch [116], train_loss: 0.054876, val_loss: 0.060179, val_acc: 23.978962
+Epoch [117], train_loss: 0.054685, val_loss: 0.060202, val_acc: 23.811707
+Epoch [118], train_loss: 0.054609, val_loss: 0.060163, val_acc: 23.988604
+Epoch [119], train_loss: 0.054491, val_loss: 0.060386, val_acc: 23.773090
+Epoch [120], train_loss: 0.054471, val_loss: 0.060152, val_acc: 23.892086
+Epoch [121], train_loss: 0.054437, val_loss: 0.060284, val_acc: 23.877115
+Epoch [122], train_loss: 0.054334, val_loss: 0.060145, val_acc: 23.740318
+Epoch [123], train_loss: 0.054292, val_loss: 0.060177, val_acc: 23.745134
+Epoch [124], train_loss: 0.054205, val_loss: 0.060238, val_acc: 23.732887
+Epoch [125], train_loss: 0.054200, val_loss: 0.060215, val_acc: 24.049982
+Epoch [126], train_loss: 0.054244, val_loss: 0.060076, val_acc: 23.699244
+Epoch [127], train_loss: 0.054138, val_loss: 0.059991, val_acc: 23.864050
+Epoch [128], train_loss: 0.054141, val_loss: 0.060241, val_acc: 23.903837
+Epoch [129], train_loss: 0.054151, val_loss: 0.060281, val_acc: 23.560783
+Epoch [130], train_loss: 0.054029, val_loss: 0.060238, val_acc: 23.911936
+Epoch [131], train_loss: 0.054044, val_loss: 0.060180, val_acc: 23.787781
+Epoch [132], train_loss: 0.054013, val_loss: 0.060039, val_acc: 23.625818
+Epoch [133], train_loss: 0.053930, val_loss: 0.060410, val_acc: 23.763611
+Epoch [134], train_loss: 0.053942, val_loss: 0.060252, val_acc: 23.723066
+Epoch [135], train_loss: 0.053861, val_loss: 0.060532, val_acc: 23.409136
+Epoch [136], train_loss: 0.053899, val_loss: 0.059902, val_acc: 23.788069
+Epoch [137], train_loss: 0.053863, val_loss: 0.060050, val_acc: 23.572224
+Epoch [138], train_loss: 0.053788, val_loss: 0.060165, val_acc: 23.586567
+Epoch [139], train_loss: 0.053757, val_loss: 0.060219, val_acc: 23.589643
+Epoch [140], train_loss: 0.053733, val_loss: 0.060481, val_acc: 23.728474
+Epoch [141], train_loss: 0.053643, val_loss: 0.060674, val_acc: 23.406296
+Epoch [142], train_loss: 0.053663, val_loss: 0.060293, val_acc: 23.437836
+Epoch [143], train_loss: 0.053664, val_loss: 0.060190, val_acc: 23.753626
+Epoch [144], train_loss: 0.053624, val_loss: 0.060696, val_acc: 23.474693
+Epoch [145], train_loss: 0.053589, val_loss: 0.060527, val_acc: 23.435268
+Epoch [146], train_loss: 0.053518, val_loss: 0.060734, val_acc: 23.370728
+Epoch [147], train_loss: 0.053471, val_loss: 0.060297, val_acc: 23.710260
+Epoch [148], train_loss: 0.053493, val_loss: 0.060698, val_acc: 23.308548
+Epoch [149], train_loss: 0.053488, val_loss: 0.060436, val_acc: 23.432035
+Epoch [150], train_loss: 0.053476, val_loss: 0.060203, val_acc: 23.481852
+Epoch [151], train_loss: 0.053509, val_loss: 0.060281, val_acc: 23.655087
+Epoch [152], train_loss: 0.053340, val_loss: 0.060345, val_acc: 23.572468
+Epoch [153], train_loss: 0.053342, val_loss: 0.060569, val_acc: 23.384251
+Epoch [154], train_loss: 0.053312, val_loss: 0.060304, val_acc: 23.557726
+Epoch [155], train_loss: 0.053360, val_loss: 0.060526, val_acc: 23.549656
+Epoch [156], train_loss: 0.053343, val_loss: 0.060306, val_acc: 23.492746
+Epoch [157], train_loss: 0.053305, val_loss: 0.060262, val_acc: 23.589207
+Epoch [158], train_loss: 0.053291, val_loss: 0.060595, val_acc: 23.471169
+Epoch [159], train_loss: 0.053241, val_loss: 0.060537, val_acc: 23.457382
+Epoch [160], train_loss: 0.053227, val_loss: 0.060434, val_acc: 23.477564
+Epoch [161], train_loss: 0.053294, val_loss: 0.060241, val_acc: 23.508228
+Epoch [162], train_loss: 0.053212, val_loss: 0.060455, val_acc: 23.365995
+Epoch [163], train_loss: 0.053139, val_loss: 0.060506, val_acc: 23.337477
+Epoch [164], train_loss: 0.053097, val_loss: 0.060382, val_acc: 23.412491
+Epoch [165], train_loss: 0.053078, val_loss: 0.060694, val_acc: 23.422421
+Epoch [166], train_loss: 0.053050, val_loss: 0.060495, val_acc: 23.351999
+Epoch [167], train_loss: 0.053065, val_loss: 0.060436, val_acc: 23.359755
+Epoch [168], train_loss: 0.052945, val_loss: 0.060826, val_acc: 23.351957
+Epoch [169], train_loss: 0.052947, val_loss: 0.060521, val_acc: 23.386696
+Epoch [170], train_loss: 0.052955, val_loss: 0.060605, val_acc: 23.356211
+Epoch [171], train_loss: 0.052973, val_loss: 0.060659, val_acc: 23.118883
+Epoch [172], train_loss: 0.052890, val_loss: 0.060813, val_acc: 23.110664
+Epoch [173], train_loss: 0.052895, val_loss: 0.060651, val_acc: 23.097389
+Epoch [174], train_loss: 0.052838, val_loss: 0.060652, val_acc: 23.441105
+Epoch [175], train_loss: 0.052819, val_loss: 0.060550, val_acc: 23.350451
+Epoch [176], train_loss: 0.052831, val_loss: 0.060747, val_acc: 23.232273
+Epoch [177], train_loss: 0.052797, val_loss: 0.060623, val_acc: 23.047886
+Epoch [178], train_loss: 0.052731, val_loss: 0.060686, val_acc: 23.325443
+Epoch [179], train_loss: 0.052780, val_loss: 0.060437, val_acc: 23.221397
+Epoch [180], train_loss: 0.052774, val_loss: 0.060859, val_acc: 23.149523
+Epoch [181], train_loss: 0.052705, val_loss: 0.060648, val_acc: 23.438019
+Epoch [182], train_loss: 0.052706, val_loss: 0.061055, val_acc: 23.198784
+Epoch [183], train_loss: 0.052639, val_loss: 0.060664, val_acc: 23.085558
+Epoch [184], train_loss: 0.052612, val_loss: 0.060640, val_acc: 23.230736
+Epoch [185], train_loss: 0.052606, val_loss: 0.061146, val_acc: 23.229620
+Epoch [186], train_loss: 0.052564, val_loss: 0.060964, val_acc: 23.203112
+Epoch [187], train_loss: 0.052561, val_loss: 0.061020, val_acc: 22.961943
+Epoch [188], train_loss: 0.052595, val_loss: 0.060720, val_acc: 23.079414
+Epoch [189], train_loss: 0.052645, val_loss: 0.060697, val_acc: 23.262266
+Epoch [190], train_loss: 0.052493, val_loss: 0.061061, val_acc: 23.339027
+Epoch [191], train_loss: 0.052501, val_loss: 0.060688, val_acc: 23.206562
+Epoch [192], train_loss: 0.052480, val_loss: 0.060922, val_acc: 23.330856
+Epoch [193], train_loss: 0.052404, val_loss: 0.060872, val_acc: 23.047688
+Epoch [194], train_loss: 0.052458, val_loss: 0.060749, val_acc: 23.113152
+Epoch [195], train_loss: 0.052585, val_loss: 0.061227, val_acc: 23.220179
+Epoch [196], train_loss: 0.052396, val_loss: 0.061003, val_acc: 23.182514
+Epoch [197], train_loss: 0.052372, val_loss: 0.060952, val_acc: 22.946548
+Epoch [198], train_loss: 0.052380, val_loss: 0.061128, val_acc: 22.987700
+Epoch [199], train_loss: 0.052300, val_loss: 0.060594, val_acc: 23.095743
+Epoch [200], train_loss: 0.052291, val_loss: 0.060978, val_acc: 22.815882
+Epoch [201], train_loss: 0.052304, val_loss: 0.060610, val_acc: 23.178596
+Epoch [202], train_loss: 0.052227, val_loss: 0.060631, val_acc: 23.097298
+Epoch [203], train_loss: 0.052170, val_loss: 0.061387, val_acc: 22.992418
+Epoch [204], train_loss: 0.052247, val_loss: 0.060712, val_acc: 23.201307
+Epoch [205], train_loss: 0.052191, val_loss: 0.061065, val_acc: 22.982407
+Epoch [206], train_loss: 0.052159, val_loss: 0.061087, val_acc: 22.831919
+Epoch [207], train_loss: 0.052104, val_loss: 0.060824, val_acc: 23.127977
+Epoch [208], train_loss: 0.052097, val_loss: 0.061191, val_acc: 22.871489
+Epoch [209], train_loss: 0.052083, val_loss: 0.061110, val_acc: 22.917845
+Epoch [210], train_loss: 0.052055, val_loss: 0.061059, val_acc: 22.979807
+Epoch [211], train_loss: 0.052102, val_loss: 0.061664, val_acc: 22.933458
+Epoch [212], train_loss: 0.052074, val_loss: 0.061027, val_acc: 23.026991
+Epoch [213], train_loss: 0.052006, val_loss: 0.060921, val_acc: 23.098963
+Epoch [214], train_loss: 0.052017, val_loss: 0.061084, val_acc: 23.007317
+Epoch [215], train_loss: 0.051949, val_loss: 0.060901, val_acc: 22.937109
+Epoch [216], train_loss: 0.051969, val_loss: 0.061064, val_acc: 23.070229
+Epoch [217], train_loss: 0.051991, val_loss: 0.061036, val_acc: 22.973877
+Epoch [218], train_loss: 0.051925, val_loss: 0.061145, val_acc: 22.991600
+Epoch [219], train_loss: 0.051897, val_loss: 0.061255, val_acc: 22.972713
+Epoch [220], train_loss: 0.051878, val_loss: 0.061245, val_acc: 22.834990
+Epoch [221], train_loss: 0.051870, val_loss: 0.061007, val_acc: 22.964268
+Epoch [222], train_loss: 0.051876, val_loss: 0.061137, val_acc: 22.931534
+Epoch [223], train_loss: 0.051881, val_loss: 0.061633, val_acc: 22.742224
+Epoch [224], train_loss: 0.051870, val_loss: 0.061135, val_acc: 22.845118
+Epoch [225], train_loss: 0.051751, val_loss: 0.061083, val_acc: 23.100956
+Epoch [226], train_loss: 0.051763, val_loss: 0.061297, val_acc: 23.037960
+Epoch [227], train_loss: 0.051742, val_loss: 0.061086, val_acc: 22.866312
+Epoch [228], train_loss: 0.051751, val_loss: 0.061252, val_acc: 23.103235
+Epoch [229], train_loss: 0.051696, val_loss: 0.061179, val_acc: 22.944912
+python3 ./UNet_V10.py  3181.56s user 3237.04s system 99% cpu 1:47:49.21 total
diff --git a/UNet/Sim_logs/UNet_V10_25613891.log b/UNet/Sim_logs/UNet_V10_25613891.log
new file mode 100644
index 0000000000000000000000000000000000000000..ae4b0c2adfa1c59a0a94a2fc037ecfe3acdfc273
--- /dev/null
+++ b/UNet/Sim_logs/UNet_V10_25613891.log
@@ -0,0 +1,150 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+9.1 k=7 lr=1e-06
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 230
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 7
+ seed is: 2193910023
+Epoch [0], train_loss: 0.172159, val_loss: 0.134897, val_acc: 8.232150
+Epoch [1], train_loss: 0.154764, val_loss: 0.149513, val_acc: 9.604362
+Epoch [2], train_loss: 0.145393, val_loss: 0.141783, val_acc: 10.315677
+Epoch [3], train_loss: 0.137907, val_loss: 0.133238, val_acc: 11.565667
+Epoch [4], train_loss: 0.130700, val_loss: 0.126618, val_acc: 12.799486
+Epoch [5], train_loss: 0.124301, val_loss: 0.119391, val_acc: 14.005971
+Epoch [6], train_loss: 0.119281, val_loss: 0.117707, val_acc: 14.730141
+Epoch [7], train_loss: 0.115129, val_loss: 0.113810, val_acc: 15.213734
+Epoch [8], train_loss: 0.111532, val_loss: 0.108844, val_acc: 15.947191
+Epoch [9], train_loss: 0.108369, val_loss: 0.105434, val_acc: 16.305136
+Epoch [10], train_loss: 0.105600, val_loss: 0.105060, val_acc: 16.668842
+Epoch [11], train_loss: 0.103106, val_loss: 0.100977, val_acc: 16.996891
+Epoch [12], train_loss: 0.100789, val_loss: 0.099918, val_acc: 17.879194
+Epoch [13], train_loss: 0.098587, val_loss: 0.098855, val_acc: 17.733910
+Epoch [14], train_loss: 0.096511, val_loss: 0.098181, val_acc: 18.080399
+Epoch [15], train_loss: 0.094577, val_loss: 0.090388, val_acc: 18.970169
+Epoch [16], train_loss: 0.092829, val_loss: 0.091234, val_acc: 19.204573
+Epoch [17], train_loss: 0.091226, val_loss: 0.087852, val_acc: 19.315931
+Epoch [18], train_loss: 0.089698, val_loss: 0.090928, val_acc: 19.257055
+Epoch [19], train_loss: 0.088219, val_loss: 0.086333, val_acc: 20.211460
+Epoch [20], train_loss: 0.086921, val_loss: 0.086304, val_acc: 20.159164
+Epoch [21], train_loss: 0.085725, val_loss: 0.084462, val_acc: 20.588184
+Epoch [22], train_loss: 0.084522, val_loss: 0.083909, val_acc: 20.408340
+Epoch [23], train_loss: 0.083521, val_loss: 0.084287, val_acc: 20.691048
+Epoch [24], train_loss: 0.082329, val_loss: 0.082153, val_acc: 21.196333
+Epoch [25], train_loss: 0.081370, val_loss: 0.079981, val_acc: 21.367714
+Epoch [26], train_loss: 0.080531, val_loss: 0.077731, val_acc: 21.832436
+Epoch [27], train_loss: 0.079563, val_loss: 0.078639, val_acc: 21.780674
+Epoch [28], train_loss: 0.078792, val_loss: 0.077568, val_acc: 22.011267
+Epoch [29], train_loss: 0.077926, val_loss: 0.075187, val_acc: 22.399889
+Epoch [30], train_loss: 0.077268, val_loss: 0.075598, val_acc: 22.404566
+Epoch [31], train_loss: 0.076484, val_loss: 0.075851, val_acc: 22.357248
+Epoch [32], train_loss: 0.075862, val_loss: 0.076010, val_acc: 22.082264
+Epoch [33], train_loss: 0.075176, val_loss: 0.073063, val_acc: 22.871202
+Epoch [34], train_loss: 0.074600, val_loss: 0.072424, val_acc: 22.691294
+Epoch [35], train_loss: 0.073978, val_loss: 0.073854, val_acc: 22.983658
+Epoch [36], train_loss: 0.073329, val_loss: 0.072467, val_acc: 22.935041
+Epoch [37], train_loss: 0.072738, val_loss: 0.071249, val_acc: 23.380610
+Epoch [38], train_loss: 0.072212, val_loss: 0.070255, val_acc: 23.480034
+Epoch [39], train_loss: 0.071806, val_loss: 0.070987, val_acc: 23.009283
+Epoch [40], train_loss: 0.071186, val_loss: 0.071874, val_acc: 23.429399
+Epoch [41], train_loss: 0.070669, val_loss: 0.069939, val_acc: 23.303867
+Epoch [42], train_loss: 0.070107, val_loss: 0.070615, val_acc: 23.203262
+Epoch [43], train_loss: 0.069639, val_loss: 0.068891, val_acc: 23.788996
+Epoch [44], train_loss: 0.069327, val_loss: 0.069616, val_acc: 23.544291
+Epoch [45], train_loss: 0.068721, val_loss: 0.067706, val_acc: 23.798956
+Epoch [46], train_loss: 0.068469, val_loss: 0.067857, val_acc: 23.981165
+Epoch [47], train_loss: 0.068017, val_loss: 0.066973, val_acc: 24.036139
+Epoch [48], train_loss: 0.067512, val_loss: 0.067707, val_acc: 23.934046
+Epoch [49], train_loss: 0.067128, val_loss: 0.067037, val_acc: 24.049589
+Epoch [50], train_loss: 0.066871, val_loss: 0.066642, val_acc: 23.908134
+Epoch [51], train_loss: 0.066393, val_loss: 0.066199, val_acc: 24.143085
+Epoch [52], train_loss: 0.066087, val_loss: 0.066597, val_acc: 24.089371
+Epoch [53], train_loss: 0.065676, val_loss: 0.065594, val_acc: 24.231956
+Epoch [54], train_loss: 0.065438, val_loss: 0.066157, val_acc: 24.028706
+Epoch [55], train_loss: 0.065025, val_loss: 0.064976, val_acc: 24.295607
+Epoch [56], train_loss: 0.064680, val_loss: 0.065133, val_acc: 24.169670
+Epoch [57], train_loss: 0.064373, val_loss: 0.065394, val_acc: 24.158226
+Epoch [58], train_loss: 0.064136, val_loss: 0.064854, val_acc: 24.283140
+Epoch [59], train_loss: 0.063758, val_loss: 0.064388, val_acc: 24.285776
+Epoch [60], train_loss: 0.063500, val_loss: 0.064462, val_acc: 24.282780
+Epoch [61], train_loss: 0.063202, val_loss: 0.064755, val_acc: 24.260828
+Epoch [62], train_loss: 0.062999, val_loss: 0.064171, val_acc: 24.064079
+Epoch [63], train_loss: 0.062612, val_loss: 0.063785, val_acc: 24.316320
+Epoch [64], train_loss: 0.062364, val_loss: 0.065014, val_acc: 23.879269
+Epoch [65], train_loss: 0.062031, val_loss: 0.064160, val_acc: 24.264816
+Epoch [66], train_loss: 0.061702, val_loss: 0.062580, val_acc: 24.390438
+Epoch [67], train_loss: 0.061543, val_loss: 0.063562, val_acc: 24.139484
+Epoch [68], train_loss: 0.061312, val_loss: 0.063350, val_acc: 24.383400
+Epoch [69], train_loss: 0.061051, val_loss: 0.063542, val_acc: 24.254921
+Epoch [70], train_loss: 0.060737, val_loss: 0.063298, val_acc: 24.286329
+Epoch [71], train_loss: 0.060505, val_loss: 0.063022, val_acc: 24.449074
+Epoch [72], train_loss: 0.060215, val_loss: 0.062785, val_acc: 24.295307
+Epoch [73], train_loss: 0.060015, val_loss: 0.062736, val_acc: 24.266800
+Epoch [74], train_loss: 0.059863, val_loss: 0.063201, val_acc: 24.179512
+Epoch [75], train_loss: 0.059640, val_loss: 0.061952, val_acc: 24.476267
+Epoch [76], train_loss: 0.059409, val_loss: 0.062113, val_acc: 24.239363
+Epoch [77], train_loss: 0.059127, val_loss: 0.061637, val_acc: 24.367235
+Epoch [78], train_loss: 0.059035, val_loss: 0.061945, val_acc: 24.438744
+Epoch [79], train_loss: 0.058708, val_loss: 0.061262, val_acc: 24.321024
+Epoch [80], train_loss: 0.058513, val_loss: 0.062731, val_acc: 24.271156
+Epoch [81], train_loss: 0.058360, val_loss: 0.062234, val_acc: 24.304838
+Epoch [82], train_loss: 0.058198, val_loss: 0.062076, val_acc: 24.321804
+Epoch [83], train_loss: 0.058039, val_loss: 0.061176, val_acc: 24.197659
+Epoch [84], train_loss: 0.057764, val_loss: 0.061019, val_acc: 24.051081
+Epoch [85], train_loss: 0.057638, val_loss: 0.061459, val_acc: 24.361351
+Epoch [86], train_loss: 0.057442, val_loss: 0.062194, val_acc: 24.275339
+Epoch [87], train_loss: 0.057255, val_loss: 0.060897, val_acc: 24.429157
+Epoch [88], train_loss: 0.057108, val_loss: 0.060761, val_acc: 24.160164
+Epoch [89], train_loss: 0.056999, val_loss: 0.061203, val_acc: 24.515099
+Epoch [90], train_loss: 0.056865, val_loss: 0.060768, val_acc: 24.382809
+Epoch [91], train_loss: 0.056664, val_loss: 0.060413, val_acc: 24.461168
+Epoch [92], train_loss: 0.056576, val_loss: 0.060958, val_acc: 24.297220
+Epoch [93], train_loss: 0.056440, val_loss: 0.060730, val_acc: 24.298838
+Epoch [94], train_loss: 0.056320, val_loss: 0.060735, val_acc: 24.367786
+Epoch [95], train_loss: 0.056127, val_loss: 0.060964, val_acc: 24.244566
+Epoch [96], train_loss: 0.056081, val_loss: 0.060804, val_acc: 23.738966
+Epoch [97], train_loss: 0.056158, val_loss: 0.060442, val_acc: 24.291683
+Epoch [98], train_loss: 0.055901, val_loss: 0.060288, val_acc: 24.195393
+Epoch [99], train_loss: 0.055746, val_loss: 0.060228, val_acc: 24.155149
+Epoch [100], train_loss: 0.055599, val_loss: 0.060531, val_acc: 24.092314
+Epoch [101], train_loss: 0.055697, val_loss: 0.061861, val_acc: 23.017687
+Epoch [102], train_loss: 0.056154, val_loss: 0.060368, val_acc: 24.190067
+Epoch [103], train_loss: 0.055577, val_loss: 0.060029, val_acc: 24.183594
+Epoch [104], train_loss: 0.055478, val_loss: 0.060107, val_acc: 24.027838
+Epoch [105], train_loss: 0.055320, val_loss: 0.059970, val_acc: 24.185102
+Epoch [106], train_loss: 0.055209, val_loss: 0.060843, val_acc: 24.038437
+Epoch [107], train_loss: 0.055137, val_loss: 0.060474, val_acc: 24.235643
+Epoch [108], train_loss: 0.055071, val_loss: 0.060558, val_acc: 23.983761
+Epoch [109], train_loss: 0.055062, val_loss: 0.059950, val_acc: 24.179392
+Epoch [110], train_loss: 0.054991, val_loss: 0.059790, val_acc: 23.953821
+Epoch [111], train_loss: 0.054931, val_loss: 0.060107, val_acc: 23.820332
+Epoch [112], train_loss: 0.054804, val_loss: 0.059997, val_acc: 24.063360
+Epoch [113], train_loss: 0.054792, val_loss: 0.059835, val_acc: 24.158604
+Epoch [114], train_loss: 0.054743, val_loss: 0.060376, val_acc: 23.906847
+Epoch [115], train_loss: 0.054635, val_loss: 0.060312, val_acc: 24.036772
+Epoch [116], train_loss: 0.054594, val_loss: 0.060109, val_acc: 24.035889
+Epoch [117], train_loss: 0.054520, val_loss: 0.060240, val_acc: 23.802015
+Epoch [118], train_loss: 0.054514, val_loss: 0.060085, val_acc: 23.984306
+Epoch [119], train_loss: 0.054441, val_loss: 0.060467, val_acc: 23.706032
+Epoch [120], train_loss: 0.054434, val_loss: 0.060116, val_acc: 23.896467
+Epoch [121], train_loss: 0.054380, val_loss: 0.060271, val_acc: 23.816908
+Epoch [122], train_loss: 0.054289, val_loss: 0.060246, val_acc: 23.649164
+Epoch [123], train_loss: 0.054335, val_loss: 0.060152, val_acc: 23.814463
+Epoch [124], train_loss: 0.054230, val_loss: 0.060325, val_acc: 23.708891
+Epoch [125], train_loss: 0.054205, val_loss: 0.060346, val_acc: 24.028004
+Epoch [126], train_loss: 0.054326, val_loss: 0.060018, val_acc: 23.773373
+Epoch [127], train_loss: 0.054209, val_loss: 0.059985, val_acc: 23.883633
+Epoch [128], train_loss: 0.054153, val_loss: 0.060210, val_acc: 23.929909
+slurmstepd: error: *** JOB 25613891 ON ncg09 CANCELLED AT 2022-02-18T16:40:12 ***
diff --git a/UNet/Sim_logs/UNet_V10_25613902.log b/UNet/Sim_logs/UNet_V10_25613902.log
new file mode 100644
index 0000000000000000000000000000000000000000..4a2c857712f1116381ed923cf506aabfcf7eb9dc
--- /dev/null
+++ b/UNet/Sim_logs/UNet_V10_25613902.log
@@ -0,0 +1,40 @@
+(OK) Loading cuda 10.2.89
+(OK) Loading python 3.7.11
+(!!) The SciPy Stack is available: http://www.scipy.org/stackspec.html
+ Built with GCC compilers.
+9.1 k=7 lr=1e-06
+Collecting torch==1.10.1
+  Using cached torch-1.10.1-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)
+Collecting typing-extensions
+  Using cached typing_extensions-4.1.1-py3-none-any.whl (26 kB)
+Installing collected packages: typing-extensions, torch
+  WARNING: The scripts convert-caffe2-to-onnx, convert-onnx-to-caffe2 and torchrun are installed in '/home/yk138599/.local/bin' which is not on PATH.
+  Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.
+Successfully installed torch-1.10.1 typing-extensions-4.1.1
+WARNING: You are using pip version 21.2.4; however, version 22.0.3 is available.
+You should consider upgrading via the '/usr/local_rwth/sw/python/3.7.11/x86_64/bin/python3.7 -m pip install --upgrade pip' command.
+number auf epochs: 230
+batchsize: 32
+learning rate: 3e-05
+kernel size is: 7
+ seed is: 2193910023
+Epoch [0], train_loss: 0.172159, val_loss: 0.134897, val_acc: 8.232150
+Epoch [1], train_loss: 0.154764, val_loss: 0.149515, val_acc: 9.604716
+Epoch [2], train_loss: 0.145394, val_loss: 0.141791, val_acc: 10.314930
+Epoch [3], train_loss: 0.137908, val_loss: 0.133227, val_acc: 11.566616
+Epoch [4], train_loss: 0.130700, val_loss: 0.126600, val_acc: 12.800251
+Epoch [5], train_loss: 0.124301, val_loss: 0.119409, val_acc: 14.005206
+Epoch [6], train_loss: 0.119281, val_loss: 0.117702, val_acc: 14.731109
+Epoch [7], train_loss: 0.115129, val_loss: 0.113808, val_acc: 15.213907
+Epoch [8], train_loss: 0.111533, val_loss: 0.108849, val_acc: 15.948510
+Epoch [9], train_loss: 0.108369, val_loss: 0.105453, val_acc: 16.303570
+Epoch [10], train_loss: 0.105600, val_loss: 0.105038, val_acc: 16.671921
+Epoch [11], train_loss: 0.103106, val_loss: 0.100967, val_acc: 16.997890
+Epoch [12], train_loss: 0.100788, val_loss: 0.099916, val_acc: 17.879736
+Epoch [13], train_loss: 0.098585, val_loss: 0.098820, val_acc: 17.735533
+Epoch [14], train_loss: 0.096508, val_loss: 0.098303, val_acc: 18.074396
+Epoch [15], train_loss: 0.094576, val_loss: 0.090374, val_acc: 18.972033
+Epoch [16], train_loss: 0.092827, val_loss: 0.091200, val_acc: 19.210970
+Epoch [17], train_loss: 0.091224, val_loss: 0.087842, val_acc: 19.315222
+Epoch [18], train_loss: 0.089697, val_loss: 0.090892, val_acc: 19.257893
+slurmstepd: error: *** JOB 25613902 ON ncg08 CANCELLED AT 2022-02-18T15:53:18 ***
diff --git a/UNet/Sim_logs/UNet_V9_1_25611141.log b/UNet/Sim_logs/UNet_V9_1_25611141.log
index 275764a54bf4d79ac276ab894a95cf9b0318604d..6c50606e72c6d54f9960a158d454b415af4b87e0 100644
--- a/UNet/Sim_logs/UNet_V9_1_25611141.log
+++ b/UNet/Sim_logs/UNet_V9_1_25611141.log
@@ -50,3 +50,972 @@ Epoch [28], train_loss: 0.188027, val_loss: 0.189067, val_acc: 6.282068
 Epoch [29], train_loss: 0.187412, val_loss: 0.188952, val_acc: 6.267137
 Epoch [30], train_loss: 0.186806, val_loss: 0.188327, val_acc: 6.304575
 Epoch [31], train_loss: 0.186197, val_loss: 0.187869, val_acc: 6.352044
+Epoch [32], train_loss: 0.185586, val_loss: 0.186002, val_acc: 6.434209
+Epoch [33], train_loss: 0.184955, val_loss: 0.184668, val_acc: 6.538694
+Epoch [34], train_loss: 0.184296, val_loss: 0.184656, val_acc: 6.553225
+Epoch [35], train_loss: 0.183652, val_loss: 0.183366, val_acc: 6.648615
+Epoch [36], train_loss: 0.183005, val_loss: 0.182568, val_acc: 6.686866
+Epoch [37], train_loss: 0.182412, val_loss: 0.182207, val_acc: 6.762219
+Epoch [38], train_loss: 0.181761, val_loss: 0.181803, val_acc: 6.783301
+Epoch [39], train_loss: 0.181158, val_loss: 0.182120, val_acc: 6.810902
+Epoch [40], train_loss: 0.180514, val_loss: 0.180265, val_acc: 6.926140
+Epoch [41], train_loss: 0.179910, val_loss: 0.181119, val_acc: 6.950724
+Epoch [42], train_loss: 0.179287, val_loss: 0.181330, val_acc: 7.009320
+Epoch [43], train_loss: 0.178682, val_loss: 0.181129, val_acc: 7.013269
+Epoch [44], train_loss: 0.178051, val_loss: 0.180185, val_acc: 7.153759
+Epoch [45], train_loss: 0.177413, val_loss: 0.177864, val_acc: 7.325569
+Epoch [46], train_loss: 0.176811, val_loss: 0.179582, val_acc: 7.280849
+Epoch [47], train_loss: 0.176193, val_loss: 0.177398, val_acc: 7.435537
+Epoch [48], train_loss: 0.175576, val_loss: 0.174322, val_acc: 7.617465
+Epoch [49], train_loss: 0.174978, val_loss: 0.175285, val_acc: 7.643335
+Epoch [50], train_loss: 0.174353, val_loss: 0.174529, val_acc: 7.731801
+Epoch [51], train_loss: 0.173793, val_loss: 0.174405, val_acc: 7.788347
+Epoch [52], train_loss: 0.173150, val_loss: 0.173778, val_acc: 7.897002
+Epoch [53], train_loss: 0.172536, val_loss: 0.173915, val_acc: 7.931596
+Epoch [54], train_loss: 0.171946, val_loss: 0.170892, val_acc: 8.157052
+Epoch [55], train_loss: 0.171383, val_loss: 0.173791, val_acc: 8.083368
+Epoch [56], train_loss: 0.170819, val_loss: 0.173955, val_acc: 8.088478
+Epoch [57], train_loss: 0.170193, val_loss: 0.171605, val_acc: 8.299802
+Epoch [58], train_loss: 0.169655, val_loss: 0.172419, val_acc: 8.263888
+Epoch [59], train_loss: 0.169093, val_loss: 0.174126, val_acc: 8.270204
+Epoch [60], train_loss: 0.168525, val_loss: 0.170397, val_acc: 8.491250
+Epoch [61], train_loss: 0.168004, val_loss: 0.171498, val_acc: 8.519955
+Epoch [62], train_loss: 0.167464, val_loss: 0.168845, val_acc: 8.654284
+Epoch [63], train_loss: 0.166891, val_loss: 0.168406, val_acc: 8.774763
+Epoch [64], train_loss: 0.166374, val_loss: 0.169097, val_acc: 8.753822
+Epoch [65], train_loss: 0.165830, val_loss: 0.166489, val_acc: 8.969545
+Epoch [66], train_loss: 0.165362, val_loss: 0.165986, val_acc: 9.110494
+Epoch [67], train_loss: 0.164870, val_loss: 0.166089, val_acc: 9.073074
+Epoch [68], train_loss: 0.164359, val_loss: 0.166062, val_acc: 9.097000
+Epoch [69], train_loss: 0.163878, val_loss: 0.164030, val_acc: 9.256886
+Epoch [70], train_loss: 0.163385, val_loss: 0.164800, val_acc: 9.248062
+Epoch [71], train_loss: 0.162930, val_loss: 0.165091, val_acc: 9.359792
+Epoch [72], train_loss: 0.162412, val_loss: 0.162088, val_acc: 9.449850
+Epoch [73], train_loss: 0.161957, val_loss: 0.164978, val_acc: 9.381195
+Epoch [74], train_loss: 0.161490, val_loss: 0.164685, val_acc: 9.467165
+Epoch [75], train_loss: 0.161044, val_loss: 0.163421, val_acc: 9.494834
+Epoch [76], train_loss: 0.160613, val_loss: 0.163475, val_acc: 9.572630
+Epoch [77], train_loss: 0.160089, val_loss: 0.161263, val_acc: 9.760539
+Epoch [78], train_loss: 0.159718, val_loss: 0.160143, val_acc: 9.887158
+Epoch [79], train_loss: 0.159306, val_loss: 0.161229, val_acc: 9.819016
+Epoch [80], train_loss: 0.158844, val_loss: 0.161864, val_acc: 9.756536
+Epoch [81], train_loss: 0.158461, val_loss: 0.160362, val_acc: 9.916168
+Epoch [82], train_loss: 0.158091, val_loss: 0.161436, val_acc: 9.851745
+Epoch [83], train_loss: 0.157709, val_loss: 0.162159, val_acc: 9.912709
+Epoch [84], train_loss: 0.157267, val_loss: 0.161053, val_acc: 9.970392
+Epoch [85], train_loss: 0.156923, val_loss: 0.160564, val_acc: 10.047922
+Epoch [86], train_loss: 0.156483, val_loss: 0.157377, val_acc: 10.284967
+Epoch [87], train_loss: 0.156120, val_loss: 0.160920, val_acc: 10.133070
+Epoch [88], train_loss: 0.155748, val_loss: 0.158000, val_acc: 10.243478
+Epoch [89], train_loss: 0.155329, val_loss: 0.156014, val_acc: 10.448779
+Epoch [90], train_loss: 0.155008, val_loss: 0.155890, val_acc: 10.467603
+Epoch [91], train_loss: 0.154603, val_loss: 0.156862, val_acc: 10.383559
+Epoch [92], train_loss: 0.154287, val_loss: 0.155945, val_acc: 10.500990
+Epoch [93], train_loss: 0.153914, val_loss: 0.154736, val_acc: 10.594455
+Epoch [94], train_loss: 0.153520, val_loss: 0.156850, val_acc: 10.500782
+Epoch [95], train_loss: 0.153229, val_loss: 0.156637, val_acc: 10.593576
+Epoch [96], train_loss: 0.152866, val_loss: 0.154054, val_acc: 10.725142
+Epoch [97], train_loss: 0.152506, val_loss: 0.154033, val_acc: 10.840881
+Epoch [98], train_loss: 0.152233, val_loss: 0.153583, val_acc: 10.825500
+Epoch [99], train_loss: 0.151903, val_loss: 0.154843, val_acc: 10.759509
+Epoch [100], train_loss: 0.151614, val_loss: 0.154275, val_acc: 10.800281
+Epoch [101], train_loss: 0.151332, val_loss: 0.152841, val_acc: 10.993850
+Epoch [102], train_loss: 0.151096, val_loss: 0.151795, val_acc: 10.951658
+Epoch [103], train_loss: 0.150761, val_loss: 0.154513, val_acc: 10.915642
+Epoch [104], train_loss: 0.150497, val_loss: 0.154929, val_acc: 10.898633
+Epoch [105], train_loss: 0.150219, val_loss: 0.149582, val_acc: 11.217516
+Epoch [106], train_loss: 0.149988, val_loss: 0.152332, val_acc: 11.051826
+Epoch [107], train_loss: 0.149685, val_loss: 0.152799, val_acc: 11.148404
+Epoch [108], train_loss: 0.149418, val_loss: 0.150836, val_acc: 11.245765
+Epoch [109], train_loss: 0.149212, val_loss: 0.153223, val_acc: 11.143644
+Epoch [110], train_loss: 0.148897, val_loss: 0.150268, val_acc: 11.296826
+Epoch [111], train_loss: 0.148648, val_loss: 0.152774, val_acc: 11.202285
+Epoch [112], train_loss: 0.148402, val_loss: 0.151597, val_acc: 11.381148
+Epoch [113], train_loss: 0.148197, val_loss: 0.150797, val_acc: 11.406116
+Epoch [114], train_loss: 0.147894, val_loss: 0.149660, val_acc: 11.391711
+Epoch [115], train_loss: 0.147701, val_loss: 0.150474, val_acc: 11.460968
+Epoch [116], train_loss: 0.147480, val_loss: 0.150328, val_acc: 11.417285
+Epoch [117], train_loss: 0.147158, val_loss: 0.150008, val_acc: 11.397562
+Epoch [118], train_loss: 0.146930, val_loss: 0.150270, val_acc: 11.502626
+Epoch [119], train_loss: 0.146706, val_loss: 0.150108, val_acc: 11.536699
+Epoch [120], train_loss: 0.146434, val_loss: 0.152073, val_acc: 11.420612
+Epoch [121], train_loss: 0.146267, val_loss: 0.148355, val_acc: 11.636759
+Epoch [122], train_loss: 0.146012, val_loss: 0.146260, val_acc: 11.746386
+Epoch [123], train_loss: 0.145766, val_loss: 0.149195, val_acc: 11.605203
+Epoch [124], train_loss: 0.145535, val_loss: 0.147516, val_acc: 11.705865
+Epoch [125], train_loss: 0.145331, val_loss: 0.148731, val_acc: 11.785217
+Epoch [126], train_loss: 0.145109, val_loss: 0.148667, val_acc: 11.783245
+Epoch [127], train_loss: 0.144891, val_loss: 0.146448, val_acc: 11.796944
+Epoch [128], train_loss: 0.144630, val_loss: 0.146367, val_acc: 11.990458
+Epoch [129], train_loss: 0.144447, val_loss: 0.147201, val_acc: 11.815796
+Epoch [130], train_loss: 0.144205, val_loss: 0.147195, val_acc: 11.908739
+Epoch [131], train_loss: 0.144045, val_loss: 0.143775, val_acc: 12.007455
+Epoch [132], train_loss: 0.143793, val_loss: 0.148133, val_acc: 11.891901
+Epoch [133], train_loss: 0.143587, val_loss: 0.147146, val_acc: 11.961936
+Epoch [134], train_loss: 0.143300, val_loss: 0.146476, val_acc: 11.960666
+Epoch [135], train_loss: 0.143143, val_loss: 0.146780, val_acc: 11.811768
+Epoch [136], train_loss: 0.142884, val_loss: 0.147636, val_acc: 11.909829
+Epoch [137], train_loss: 0.142656, val_loss: 0.145365, val_acc: 12.064577
+Epoch [138], train_loss: 0.142447, val_loss: 0.144190, val_acc: 12.228879
+Epoch [139], train_loss: 0.142208, val_loss: 0.144072, val_acc: 12.356130
+Epoch [140], train_loss: 0.141973, val_loss: 0.144755, val_acc: 12.190444
+Epoch [141], train_loss: 0.141822, val_loss: 0.145827, val_acc: 12.178550
+Epoch [142], train_loss: 0.141616, val_loss: 0.143384, val_acc: 12.284534
+Epoch [143], train_loss: 0.141389, val_loss: 0.145760, val_acc: 12.179168
+Epoch [144], train_loss: 0.141178, val_loss: 0.143712, val_acc: 12.316963
+Epoch [145], train_loss: 0.140883, val_loss: 0.143497, val_acc: 12.441803
+Epoch [146], train_loss: 0.140722, val_loss: 0.141808, val_acc: 12.412549
+Epoch [147], train_loss: 0.140509, val_loss: 0.144298, val_acc: 12.324183
+Epoch [148], train_loss: 0.140246, val_loss: 0.141410, val_acc: 12.524118
+Epoch [149], train_loss: 0.140041, val_loss: 0.143001, val_acc: 12.365356
+Epoch [150], train_loss: 0.139860, val_loss: 0.141320, val_acc: 12.593962
+Epoch [151], train_loss: 0.139591, val_loss: 0.145401, val_acc: 12.379488
+Epoch [152], train_loss: 0.139439, val_loss: 0.142312, val_acc: 12.378268
+Epoch [153], train_loss: 0.139167, val_loss: 0.141654, val_acc: 12.582480
+Epoch [154], train_loss: 0.138849, val_loss: 0.140548, val_acc: 12.689817
+Epoch [155], train_loss: 0.138587, val_loss: 0.142132, val_acc: 12.652493
+Epoch [156], train_loss: 0.138333, val_loss: 0.141063, val_acc: 12.657022
+Epoch [157], train_loss: 0.138071, val_loss: 0.141121, val_acc: 12.597986
+Epoch [158], train_loss: 0.137774, val_loss: 0.140334, val_acc: 12.838399
+Epoch [159], train_loss: 0.137586, val_loss: 0.140178, val_acc: 12.750547
+Epoch [160], train_loss: 0.137335, val_loss: 0.139141, val_acc: 12.933786
+Epoch [161], train_loss: 0.137097, val_loss: 0.139074, val_acc: 12.901073
+Epoch [162], train_loss: 0.136900, val_loss: 0.139993, val_acc: 12.841092
+Epoch [163], train_loss: 0.136645, val_loss: 0.137947, val_acc: 12.909835
+Epoch [164], train_loss: 0.136461, val_loss: 0.136850, val_acc: 13.195046
+Epoch [165], train_loss: 0.136350, val_loss: 0.139273, val_acc: 12.960593
+Epoch [166], train_loss: 0.136152, val_loss: 0.137975, val_acc: 13.131146
+Epoch [167], train_loss: 0.136002, val_loss: 0.139226, val_acc: 13.100100
+Epoch [168], train_loss: 0.135763, val_loss: 0.139082, val_acc: 13.066933
+Epoch [169], train_loss: 0.135587, val_loss: 0.138362, val_acc: 13.159636
+Epoch [170], train_loss: 0.135460, val_loss: 0.136024, val_acc: 13.181515
+Epoch [171], train_loss: 0.135228, val_loss: 0.136687, val_acc: 13.281129
+Epoch [172], train_loss: 0.135021, val_loss: 0.138616, val_acc: 12.995362
+Epoch [173], train_loss: 0.134904, val_loss: 0.136118, val_acc: 13.201427
+Epoch [174], train_loss: 0.134756, val_loss: 0.139727, val_acc: 13.125984
+Epoch [175], train_loss: 0.134546, val_loss: 0.137040, val_acc: 13.371977
+Epoch [176], train_loss: 0.134397, val_loss: 0.134968, val_acc: 13.431436
+Epoch [177], train_loss: 0.134192, val_loss: 0.138925, val_acc: 13.208304
+Epoch [178], train_loss: 0.134036, val_loss: 0.137861, val_acc: 13.207913
+Epoch [179], train_loss: 0.133958, val_loss: 0.138126, val_acc: 13.362432
+Epoch [180], train_loss: 0.133689, val_loss: 0.134258, val_acc: 13.483045
+Epoch [181], train_loss: 0.133586, val_loss: 0.135111, val_acc: 13.632307
+Epoch [182], train_loss: 0.133425, val_loss: 0.133756, val_acc: 13.490182
+Epoch [183], train_loss: 0.133178, val_loss: 0.137394, val_acc: 13.441247
+Epoch [184], train_loss: 0.133071, val_loss: 0.139785, val_acc: 13.180146
+Epoch [185], train_loss: 0.132910, val_loss: 0.132387, val_acc: 13.572789
+Epoch [186], train_loss: 0.132768, val_loss: 0.137227, val_acc: 13.342103
+Epoch [187], train_loss: 0.132580, val_loss: 0.134621, val_acc: 13.600669
+Epoch [188], train_loss: 0.132530, val_loss: 0.136737, val_acc: 13.471825
+Epoch [189], train_loss: 0.132382, val_loss: 0.136311, val_acc: 13.441480
+Epoch [190], train_loss: 0.132187, val_loss: 0.137608, val_acc: 13.453459
+Epoch [191], train_loss: 0.131963, val_loss: 0.133601, val_acc: 13.724373
+Epoch [192], train_loss: 0.131945, val_loss: 0.134285, val_acc: 13.752950
+Epoch [193], train_loss: 0.131760, val_loss: 0.133385, val_acc: 13.698997
+Epoch [194], train_loss: 0.131547, val_loss: 0.132635, val_acc: 13.795275
+Epoch [195], train_loss: 0.131454, val_loss: 0.133000, val_acc: 13.838666
+Epoch [196], train_loss: 0.131235, val_loss: 0.136586, val_acc: 13.645609
+Epoch [197], train_loss: 0.131173, val_loss: 0.135369, val_acc: 13.612986
+Epoch [198], train_loss: 0.130982, val_loss: 0.133728, val_acc: 13.812575
+Epoch [199], train_loss: 0.130865, val_loss: 0.132832, val_acc: 13.909591
+Epoch [200], train_loss: 0.130664, val_loss: 0.134238, val_acc: 13.717673
+Epoch [201], train_loss: 0.130569, val_loss: 0.133528, val_acc: 13.874163
+Epoch [202], train_loss: 0.130405, val_loss: 0.132298, val_acc: 13.925676
+Epoch [203], train_loss: 0.130282, val_loss: 0.131595, val_acc: 13.975410
+Epoch [204], train_loss: 0.130208, val_loss: 0.133676, val_acc: 13.782387
+Epoch [205], train_loss: 0.130049, val_loss: 0.131856, val_acc: 13.958401
+Epoch [206], train_loss: 0.129826, val_loss: 0.131815, val_acc: 13.980597
+Epoch [207], train_loss: 0.129707, val_loss: 0.132557, val_acc: 13.941780
+Epoch [208], train_loss: 0.129569, val_loss: 0.135364, val_acc: 13.737324
+Epoch [209], train_loss: 0.129402, val_loss: 0.130556, val_acc: 14.059841
+Epoch [210], train_loss: 0.129319, val_loss: 0.135471, val_acc: 13.727314
+Epoch [211], train_loss: 0.129194, val_loss: 0.133345, val_acc: 13.847918
+Epoch [212], train_loss: 0.129030, val_loss: 0.130129, val_acc: 14.242491
+Epoch [213], train_loss: 0.128855, val_loss: 0.131517, val_acc: 13.944407
+Epoch [214], train_loss: 0.128739, val_loss: 0.131876, val_acc: 14.113317
+Epoch [215], train_loss: 0.128551, val_loss: 0.130842, val_acc: 14.205584
+Epoch [216], train_loss: 0.128427, val_loss: 0.127983, val_acc: 14.316573
+Epoch [217], train_loss: 0.128366, val_loss: 0.132667, val_acc: 14.127317
+Epoch [218], train_loss: 0.128234, val_loss: 0.131116, val_acc: 14.184644
+Epoch [219], train_loss: 0.128100, val_loss: 0.131974, val_acc: 13.987443
+Epoch [220], train_loss: 0.127899, val_loss: 0.129356, val_acc: 14.126008
+Epoch [221], train_loss: 0.127755, val_loss: 0.128887, val_acc: 14.280963
+Epoch [222], train_loss: 0.127600, val_loss: 0.132909, val_acc: 14.118318
+Epoch [223], train_loss: 0.127566, val_loss: 0.129592, val_acc: 14.324776
+Epoch [224], train_loss: 0.127336, val_loss: 0.129028, val_acc: 14.357968
+Epoch [225], train_loss: 0.127331, val_loss: 0.131747, val_acc: 14.056700
+Epoch [226], train_loss: 0.127081, val_loss: 0.131376, val_acc: 14.313796
+Epoch [227], train_loss: 0.126985, val_loss: 0.132055, val_acc: 14.248090
+Epoch [228], train_loss: 0.126878, val_loss: 0.129242, val_acc: 14.414266
+Epoch [229], train_loss: 0.126753, val_loss: 0.133320, val_acc: 14.065124
+Epoch [230], train_loss: 0.126608, val_loss: 0.129886, val_acc: 14.394722
+Epoch [231], train_loss: 0.126436, val_loss: 0.126537, val_acc: 14.575456
+Epoch [232], train_loss: 0.126278, val_loss: 0.127490, val_acc: 14.615437
+Epoch [233], train_loss: 0.126219, val_loss: 0.129974, val_acc: 14.476854
+Epoch [234], train_loss: 0.126043, val_loss: 0.126702, val_acc: 14.706308
+Epoch [235], train_loss: 0.125966, val_loss: 0.129115, val_acc: 14.542982
+Epoch [236], train_loss: 0.125823, val_loss: 0.128849, val_acc: 14.505553
+Epoch [237], train_loss: 0.125651, val_loss: 0.128258, val_acc: 14.512290
+Epoch [238], train_loss: 0.125554, val_loss: 0.130289, val_acc: 14.352880
+Epoch [239], train_loss: 0.125428, val_loss: 0.125473, val_acc: 14.812263
+Epoch [240], train_loss: 0.125252, val_loss: 0.126943, val_acc: 14.707499
+Epoch [241], train_loss: 0.125116, val_loss: 0.129586, val_acc: 14.555185
+Epoch [242], train_loss: 0.125004, val_loss: 0.127233, val_acc: 14.755655
+Epoch [243], train_loss: 0.124864, val_loss: 0.124770, val_acc: 14.900861
+Epoch [244], train_loss: 0.124767, val_loss: 0.127851, val_acc: 14.591082
+Epoch [245], train_loss: 0.124586, val_loss: 0.129015, val_acc: 14.525776
+Epoch [246], train_loss: 0.124523, val_loss: 0.124632, val_acc: 14.918354
+Epoch [247], train_loss: 0.124369, val_loss: 0.129108, val_acc: 14.610168
+Epoch [248], train_loss: 0.124185, val_loss: 0.127449, val_acc: 14.705079
+Epoch [249], train_loss: 0.124080, val_loss: 0.127252, val_acc: 14.695400
+Epoch [250], train_loss: 0.123908, val_loss: 0.125093, val_acc: 14.920932
+Epoch [251], train_loss: 0.123818, val_loss: 0.125386, val_acc: 15.115317
+Epoch [252], train_loss: 0.123716, val_loss: 0.125858, val_acc: 14.960703
+Epoch [253], train_loss: 0.123559, val_loss: 0.125251, val_acc: 14.887910
+Epoch [254], train_loss: 0.123416, val_loss: 0.128754, val_acc: 14.749982
+Epoch [255], train_loss: 0.123328, val_loss: 0.129141, val_acc: 14.699121
+Epoch [256], train_loss: 0.123176, val_loss: 0.124283, val_acc: 15.046897
+Epoch [257], train_loss: 0.123060, val_loss: 0.124946, val_acc: 15.060324
+Epoch [258], train_loss: 0.123024, val_loss: 0.125397, val_acc: 14.951602
+Epoch [259], train_loss: 0.122745, val_loss: 0.124627, val_acc: 14.901258
+Epoch [260], train_loss: 0.122614, val_loss: 0.125625, val_acc: 14.963218
+Epoch [261], train_loss: 0.122586, val_loss: 0.127373, val_acc: 14.777391
+Epoch [262], train_loss: 0.122450, val_loss: 0.126081, val_acc: 15.066932
+Epoch [263], train_loss: 0.122261, val_loss: 0.124467, val_acc: 15.159121
+Epoch [264], train_loss: 0.122124, val_loss: 0.124425, val_acc: 15.204859
+Epoch [265], train_loss: 0.122025, val_loss: 0.123473, val_acc: 15.209599
+Epoch [266], train_loss: 0.121914, val_loss: 0.123994, val_acc: 15.280214
+Epoch [267], train_loss: 0.121817, val_loss: 0.123274, val_acc: 15.262285
+Epoch [268], train_loss: 0.121651, val_loss: 0.123845, val_acc: 15.239892
+Epoch [269], train_loss: 0.121507, val_loss: 0.122916, val_acc: 15.348881
+Epoch [270], train_loss: 0.121347, val_loss: 0.121319, val_acc: 15.480497
+Epoch [271], train_loss: 0.121214, val_loss: 0.123375, val_acc: 15.336892
+Epoch [272], train_loss: 0.121127, val_loss: 0.126105, val_acc: 15.114371
+Epoch [273], train_loss: 0.120973, val_loss: 0.122137, val_acc: 15.390536
+Epoch [274], train_loss: 0.120911, val_loss: 0.125171, val_acc: 15.175784
+Epoch [275], train_loss: 0.120755, val_loss: 0.124542, val_acc: 15.283087
+Epoch [276], train_loss: 0.120602, val_loss: 0.122259, val_acc: 15.483066
+Epoch [277], train_loss: 0.120479, val_loss: 0.122193, val_acc: 15.386752
+Epoch [278], train_loss: 0.120408, val_loss: 0.123206, val_acc: 15.356086
+Epoch [279], train_loss: 0.120230, val_loss: 0.120226, val_acc: 15.593708
+Epoch [280], train_loss: 0.120130, val_loss: 0.119399, val_acc: 15.673744
+Epoch [281], train_loss: 0.119999, val_loss: 0.122750, val_acc: 15.512918
+Epoch [282], train_loss: 0.119933, val_loss: 0.124132, val_acc: 15.316158
+Epoch [283], train_loss: 0.119773, val_loss: 0.120162, val_acc: 15.612412
+Epoch [284], train_loss: 0.119612, val_loss: 0.123961, val_acc: 15.342393
+Epoch [285], train_loss: 0.119434, val_loss: 0.120716, val_acc: 15.577994
+Epoch [286], train_loss: 0.119351, val_loss: 0.122636, val_acc: 15.633738
+Epoch [287], train_loss: 0.119251, val_loss: 0.122642, val_acc: 15.510924
+Epoch [288], train_loss: 0.119127, val_loss: 0.120415, val_acc: 15.725314
+Epoch [289], train_loss: 0.118958, val_loss: 0.121659, val_acc: 15.655169
+Epoch [290], train_loss: 0.118788, val_loss: 0.120727, val_acc: 15.614434
+Epoch [291], train_loss: 0.118711, val_loss: 0.119770, val_acc: 15.778038
+Epoch [292], train_loss: 0.118594, val_loss: 0.120802, val_acc: 15.677132
+Epoch [293], train_loss: 0.118507, val_loss: 0.121020, val_acc: 15.662647
+Epoch [294], train_loss: 0.118350, val_loss: 0.123029, val_acc: 15.548571
+Epoch [295], train_loss: 0.118239, val_loss: 0.121913, val_acc: 15.769586
+Epoch [296], train_loss: 0.118103, val_loss: 0.123317, val_acc: 15.595695
+Epoch [297], train_loss: 0.117949, val_loss: 0.118407, val_acc: 16.019354
+Epoch [298], train_loss: 0.117874, val_loss: 0.120246, val_acc: 15.765043
+Epoch [299], train_loss: 0.117681, val_loss: 0.118213, val_acc: 15.988753
+Epoch [300], train_loss: 0.117559, val_loss: 0.118167, val_acc: 16.033304
+Epoch [301], train_loss: 0.117417, val_loss: 0.123548, val_acc: 15.578711
+Epoch [302], train_loss: 0.117274, val_loss: 0.120260, val_acc: 15.954419
+Epoch [303], train_loss: 0.117152, val_loss: 0.118744, val_acc: 15.888710
+Epoch [304], train_loss: 0.116982, val_loss: 0.118063, val_acc: 15.947111
+Epoch [305], train_loss: 0.116891, val_loss: 0.118670, val_acc: 16.020470
+Epoch [306], train_loss: 0.116774, val_loss: 0.117968, val_acc: 16.104467
+Epoch [307], train_loss: 0.116596, val_loss: 0.119472, val_acc: 15.961663
+Epoch [308], train_loss: 0.116532, val_loss: 0.119985, val_acc: 15.888636
+Epoch [309], train_loss: 0.116342, val_loss: 0.118214, val_acc: 16.165606
+Epoch [310], train_loss: 0.116220, val_loss: 0.120815, val_acc: 15.881144
+Epoch [311], train_loss: 0.116079, val_loss: 0.119059, val_acc: 15.937569
+Epoch [312], train_loss: 0.115923, val_loss: 0.117046, val_acc: 16.210159
+Epoch [313], train_loss: 0.115834, val_loss: 0.118440, val_acc: 16.074120
+Epoch [314], train_loss: 0.115653, val_loss: 0.117749, val_acc: 16.111130
+Epoch [315], train_loss: 0.115605, val_loss: 0.118445, val_acc: 16.160204
+Epoch [316], train_loss: 0.115403, val_loss: 0.119289, val_acc: 16.112858
+Epoch [317], train_loss: 0.115335, val_loss: 0.117048, val_acc: 16.150122
+Epoch [318], train_loss: 0.115226, val_loss: 0.117355, val_acc: 16.171757
+Epoch [319], train_loss: 0.115075, val_loss: 0.117542, val_acc: 16.292984
+Epoch [320], train_loss: 0.114960, val_loss: 0.116637, val_acc: 16.365660
+Epoch [321], train_loss: 0.114762, val_loss: 0.120223, val_acc: 16.029234
+Epoch [322], train_loss: 0.114686, val_loss: 0.116151, val_acc: 16.473454
+Epoch [323], train_loss: 0.114530, val_loss: 0.115603, val_acc: 16.431009
+Epoch [324], train_loss: 0.114402, val_loss: 0.115908, val_acc: 16.447367
+Epoch [325], train_loss: 0.114319, val_loss: 0.116620, val_acc: 16.287378
+Epoch [326], train_loss: 0.114252, val_loss: 0.120748, val_acc: 15.976707
+Epoch [327], train_loss: 0.114085, val_loss: 0.114947, val_acc: 16.582331
+Epoch [328], train_loss: 0.113936, val_loss: 0.117167, val_acc: 16.373837
+Epoch [329], train_loss: 0.113770, val_loss: 0.116053, val_acc: 16.492300
+Epoch [330], train_loss: 0.113731, val_loss: 0.116964, val_acc: 16.425577
+Epoch [331], train_loss: 0.113598, val_loss: 0.118593, val_acc: 16.233835
+Epoch [332], train_loss: 0.113474, val_loss: 0.115161, val_acc: 16.641903
+Epoch [333], train_loss: 0.113396, val_loss: 0.114288, val_acc: 16.683092
+Epoch [334], train_loss: 0.113333, val_loss: 0.117320, val_acc: 16.363867
+Epoch [335], train_loss: 0.113174, val_loss: 0.113567, val_acc: 16.729383
+Epoch [336], train_loss: 0.113066, val_loss: 0.118343, val_acc: 16.355415
+Epoch [337], train_loss: 0.112955, val_loss: 0.112630, val_acc: 16.858757
+Epoch [338], train_loss: 0.112875, val_loss: 0.113505, val_acc: 16.735388
+Epoch [339], train_loss: 0.112731, val_loss: 0.112763, val_acc: 16.797060
+Epoch [340], train_loss: 0.112641, val_loss: 0.115257, val_acc: 16.681458
+Epoch [341], train_loss: 0.112565, val_loss: 0.116461, val_acc: 16.475054
+Epoch [342], train_loss: 0.112411, val_loss: 0.117938, val_acc: 16.430513
+Epoch [343], train_loss: 0.112295, val_loss: 0.113677, val_acc: 16.822475
+Epoch [344], train_loss: 0.112137, val_loss: 0.114325, val_acc: 16.775200
+Epoch [345], train_loss: 0.112106, val_loss: 0.112660, val_acc: 16.963591
+Epoch [346], train_loss: 0.111962, val_loss: 0.115973, val_acc: 16.646297
+Epoch [347], train_loss: 0.111903, val_loss: 0.113115, val_acc: 16.838570
+Epoch [348], train_loss: 0.111801, val_loss: 0.115100, val_acc: 16.676846
+Epoch [349], train_loss: 0.111655, val_loss: 0.114561, val_acc: 16.749832
+Epoch [350], train_loss: 0.111586, val_loss: 0.112575, val_acc: 16.875004
+Epoch [351], train_loss: 0.111369, val_loss: 0.114587, val_acc: 16.784159
+Epoch [352], train_loss: 0.111311, val_loss: 0.114797, val_acc: 16.702164
+Epoch [353], train_loss: 0.111250, val_loss: 0.113538, val_acc: 16.840071
+Epoch [354], train_loss: 0.111088, val_loss: 0.112023, val_acc: 16.978054
+Epoch [355], train_loss: 0.111019, val_loss: 0.114639, val_acc: 16.815289
+Epoch [356], train_loss: 0.110881, val_loss: 0.113723, val_acc: 16.873829
+Epoch [357], train_loss: 0.110790, val_loss: 0.113884, val_acc: 16.881136
+Epoch [358], train_loss: 0.110673, val_loss: 0.113818, val_acc: 16.877499
+Epoch [359], train_loss: 0.110633, val_loss: 0.112154, val_acc: 17.073366
+Epoch [360], train_loss: 0.110450, val_loss: 0.111628, val_acc: 17.067892
+Epoch [361], train_loss: 0.110406, val_loss: 0.114066, val_acc: 16.914759
+Epoch [362], train_loss: 0.110250, val_loss: 0.112874, val_acc: 16.969004
+Epoch [363], train_loss: 0.110118, val_loss: 0.111930, val_acc: 17.092609
+Epoch [364], train_loss: 0.110106, val_loss: 0.110595, val_acc: 17.197668
+Epoch [365], train_loss: 0.109997, val_loss: 0.113794, val_acc: 17.011053
+Epoch [366], train_loss: 0.109883, val_loss: 0.111963, val_acc: 17.151030
+Epoch [367], train_loss: 0.109797, val_loss: 0.108572, val_acc: 17.500118
+Epoch [368], train_loss: 0.109715, val_loss: 0.109222, val_acc: 17.300657
+Epoch [369], train_loss: 0.109547, val_loss: 0.111085, val_acc: 17.173277
+Epoch [370], train_loss: 0.109540, val_loss: 0.110658, val_acc: 17.157583
+Epoch [371], train_loss: 0.109438, val_loss: 0.112914, val_acc: 17.036690
+Epoch [372], train_loss: 0.109289, val_loss: 0.110246, val_acc: 17.268183
+Epoch [373], train_loss: 0.109287, val_loss: 0.109067, val_acc: 17.443678
+Epoch [374], train_loss: 0.109101, val_loss: 0.111160, val_acc: 17.161758
+Epoch [375], train_loss: 0.108994, val_loss: 0.108837, val_acc: 17.514812
+Epoch [376], train_loss: 0.108934, val_loss: 0.111943, val_acc: 17.181400
+Epoch [377], train_loss: 0.108779, val_loss: 0.110676, val_acc: 17.323490
+Epoch [378], train_loss: 0.108703, val_loss: 0.109530, val_acc: 17.416479
+Epoch [379], train_loss: 0.108582, val_loss: 0.109437, val_acc: 17.445721
+Epoch [380], train_loss: 0.108416, val_loss: 0.110817, val_acc: 17.256895
+Epoch [381], train_loss: 0.108447, val_loss: 0.109498, val_acc: 17.467020
+Epoch [382], train_loss: 0.108311, val_loss: 0.108901, val_acc: 17.500929
+Epoch [383], train_loss: 0.108244, val_loss: 0.110714, val_acc: 17.228844
+Epoch [384], train_loss: 0.108049, val_loss: 0.107659, val_acc: 17.672838
+Epoch [385], train_loss: 0.108036, val_loss: 0.111640, val_acc: 17.273067
+Epoch [386], train_loss: 0.107938, val_loss: 0.109284, val_acc: 17.476173
+Epoch [387], train_loss: 0.107857, val_loss: 0.111423, val_acc: 17.289972
+Epoch [388], train_loss: 0.107661, val_loss: 0.112344, val_acc: 17.182631
+Epoch [389], train_loss: 0.107623, val_loss: 0.108465, val_acc: 17.571621
+Epoch [390], train_loss: 0.107535, val_loss: 0.108805, val_acc: 17.538748
+Epoch [391], train_loss: 0.107453, val_loss: 0.107524, val_acc: 17.714157
+Epoch [392], train_loss: 0.107334, val_loss: 0.107688, val_acc: 17.680262
+Epoch [393], train_loss: 0.107296, val_loss: 0.110605, val_acc: 17.383085
+Epoch [394], train_loss: 0.107133, val_loss: 0.107274, val_acc: 17.736277
+Epoch [395], train_loss: 0.107000, val_loss: 0.107015, val_acc: 17.668737
+Epoch [396], train_loss: 0.106959, val_loss: 0.108912, val_acc: 17.553415
+Epoch [397], train_loss: 0.106872, val_loss: 0.109535, val_acc: 17.470846
+Epoch [398], train_loss: 0.106800, val_loss: 0.107799, val_acc: 17.810665
+Epoch [399], train_loss: 0.106719, val_loss: 0.108416, val_acc: 17.724138
+Epoch [400], train_loss: 0.106629, val_loss: 0.108617, val_acc: 17.619055
+Epoch [401], train_loss: 0.106500, val_loss: 0.108860, val_acc: 17.666027
+Epoch [402], train_loss: 0.106388, val_loss: 0.108572, val_acc: 17.784554
+Epoch [403], train_loss: 0.106383, val_loss: 0.108926, val_acc: 17.552023
+Epoch [404], train_loss: 0.106334, val_loss: 0.109442, val_acc: 17.641600
+Epoch [405], train_loss: 0.106062, val_loss: 0.106122, val_acc: 17.930521
+Epoch [406], train_loss: 0.106006, val_loss: 0.107595, val_acc: 17.904902
+Epoch [407], train_loss: 0.106025, val_loss: 0.108549, val_acc: 17.777655
+Epoch [408], train_loss: 0.105970, val_loss: 0.104972, val_acc: 18.105518
+Epoch [409], train_loss: 0.105719, val_loss: 0.106849, val_acc: 17.899031
+Epoch [410], train_loss: 0.105683, val_loss: 0.107447, val_acc: 17.897840
+Epoch [411], train_loss: 0.105544, val_loss: 0.108551, val_acc: 17.839737
+Epoch [412], train_loss: 0.105444, val_loss: 0.107388, val_acc: 17.808527
+Epoch [413], train_loss: 0.105383, val_loss: 0.108061, val_acc: 17.914251
+Epoch [414], train_loss: 0.105285, val_loss: 0.106741, val_acc: 18.009373
+Epoch [415], train_loss: 0.105330, val_loss: 0.107826, val_acc: 17.839813
+Epoch [416], train_loss: 0.105181, val_loss: 0.109479, val_acc: 17.618395
+Epoch [417], train_loss: 0.105070, val_loss: 0.108312, val_acc: 17.806528
+Epoch [418], train_loss: 0.104998, val_loss: 0.105582, val_acc: 18.046448
+Epoch [419], train_loss: 0.104830, val_loss: 0.104241, val_acc: 18.226316
+Epoch [420], train_loss: 0.104757, val_loss: 0.105750, val_acc: 18.082863
+Epoch [421], train_loss: 0.104629, val_loss: 0.105715, val_acc: 18.050205
+Epoch [422], train_loss: 0.104661, val_loss: 0.106324, val_acc: 18.116940
+Epoch [423], train_loss: 0.104558, val_loss: 0.109954, val_acc: 17.542599
+Epoch [424], train_loss: 0.104488, val_loss: 0.103084, val_acc: 18.343155
+Epoch [425], train_loss: 0.104401, val_loss: 0.104942, val_acc: 18.084913
+Epoch [426], train_loss: 0.104294, val_loss: 0.104140, val_acc: 18.329794
+Epoch [427], train_loss: 0.104214, val_loss: 0.105616, val_acc: 18.018351
+Epoch [428], train_loss: 0.104060, val_loss: 0.104929, val_acc: 18.268318
+Epoch [429], train_loss: 0.103986, val_loss: 0.105335, val_acc: 18.205544
+Epoch [430], train_loss: 0.104026, val_loss: 0.103717, val_acc: 18.247515
+Epoch [431], train_loss: 0.103832, val_loss: 0.101884, val_acc: 18.616203
+Epoch [432], train_loss: 0.103793, val_loss: 0.103926, val_acc: 18.242447
+Epoch [433], train_loss: 0.103687, val_loss: 0.106286, val_acc: 18.104414
+Epoch [434], train_loss: 0.103687, val_loss: 0.106073, val_acc: 18.115831
+Epoch [435], train_loss: 0.103457, val_loss: 0.104349, val_acc: 18.153749
+Epoch [436], train_loss: 0.103432, val_loss: 0.100956, val_acc: 18.720921
+Epoch [437], train_loss: 0.103407, val_loss: 0.104168, val_acc: 18.411396
+Epoch [438], train_loss: 0.103233, val_loss: 0.103922, val_acc: 18.394857
+Epoch [439], train_loss: 0.103200, val_loss: 0.107121, val_acc: 18.159035
+Epoch [440], train_loss: 0.103055, val_loss: 0.105986, val_acc: 18.159904
+Epoch [441], train_loss: 0.103019, val_loss: 0.103055, val_acc: 18.391333
+Epoch [442], train_loss: 0.102846, val_loss: 0.104296, val_acc: 18.304739
+Epoch [443], train_loss: 0.102864, val_loss: 0.105904, val_acc: 18.150242
+Epoch [444], train_loss: 0.102732, val_loss: 0.102637, val_acc: 18.470610
+Epoch [445], train_loss: 0.102634, val_loss: 0.105166, val_acc: 18.382082
+Epoch [446], train_loss: 0.102614, val_loss: 0.102510, val_acc: 18.621456
+Epoch [447], train_loss: 0.102461, val_loss: 0.102945, val_acc: 18.558796
+Epoch [448], train_loss: 0.102398, val_loss: 0.104475, val_acc: 18.339560
+Epoch [449], train_loss: 0.102421, val_loss: 0.103763, val_acc: 18.400221
+Epoch [450], train_loss: 0.102144, val_loss: 0.106648, val_acc: 18.233231
+Epoch [451], train_loss: 0.102174, val_loss: 0.102884, val_acc: 18.647009
+Epoch [452], train_loss: 0.102061, val_loss: 0.102996, val_acc: 18.500828
+Epoch [453], train_loss: 0.102096, val_loss: 0.105517, val_acc: 18.324863
+Epoch [454], train_loss: 0.101938, val_loss: 0.098736, val_acc: 18.995548
+Epoch [455], train_loss: 0.101832, val_loss: 0.104062, val_acc: 18.422617
+Epoch [456], train_loss: 0.101790, val_loss: 0.105590, val_acc: 18.347359
+Epoch [457], train_loss: 0.101680, val_loss: 0.105687, val_acc: 18.201773
+Epoch [458], train_loss: 0.101596, val_loss: 0.103734, val_acc: 18.605169
+Epoch [459], train_loss: 0.101582, val_loss: 0.102685, val_acc: 18.622820
+Epoch [460], train_loss: 0.101394, val_loss: 0.101820, val_acc: 18.588343
+Epoch [461], train_loss: 0.101339, val_loss: 0.100247, val_acc: 18.792517
+Epoch [462], train_loss: 0.101191, val_loss: 0.100502, val_acc: 18.870325
+Epoch [463], train_loss: 0.101228, val_loss: 0.104550, val_acc: 18.489994
+Epoch [464], train_loss: 0.101092, val_loss: 0.101854, val_acc: 18.688105
+Epoch [465], train_loss: 0.101017, val_loss: 0.102876, val_acc: 18.649126
+Epoch [466], train_loss: 0.100910, val_loss: 0.100912, val_acc: 18.747419
+Epoch [467], train_loss: 0.100812, val_loss: 0.100326, val_acc: 18.929949
+Epoch [468], train_loss: 0.100740, val_loss: 0.103340, val_acc: 18.562305
+Epoch [469], train_loss: 0.100574, val_loss: 0.101266, val_acc: 18.741272
+Epoch [470], train_loss: 0.100678, val_loss: 0.101746, val_acc: 18.691818
+Epoch [471], train_loss: 0.100513, val_loss: 0.102613, val_acc: 18.692825
+Epoch [472], train_loss: 0.100541, val_loss: 0.103631, val_acc: 18.544275
+Epoch [473], train_loss: 0.100388, val_loss: 0.102749, val_acc: 18.619240
+Epoch [474], train_loss: 0.100283, val_loss: 0.103286, val_acc: 18.599524
+Epoch [475], train_loss: 0.100245, val_loss: 0.104329, val_acc: 18.474571
+Epoch [476], train_loss: 0.100140, val_loss: 0.101738, val_acc: 18.749023
+Epoch [477], train_loss: 0.100072, val_loss: 0.103267, val_acc: 18.727798
+Epoch [478], train_loss: 0.099903, val_loss: 0.101880, val_acc: 18.794373
+Epoch [479], train_loss: 0.099971, val_loss: 0.099518, val_acc: 19.007299
+Epoch [480], train_loss: 0.099816, val_loss: 0.101424, val_acc: 18.949148
+Epoch [481], train_loss: 0.099734, val_loss: 0.099611, val_acc: 18.929499
+Epoch [482], train_loss: 0.099655, val_loss: 0.101008, val_acc: 19.040588
+Epoch [483], train_loss: 0.099617, val_loss: 0.104378, val_acc: 18.502756
+Epoch [484], train_loss: 0.099576, val_loss: 0.103488, val_acc: 18.738199
+Epoch [485], train_loss: 0.099416, val_loss: 0.103091, val_acc: 18.795074
+Epoch [486], train_loss: 0.099345, val_loss: 0.098569, val_acc: 19.227566
+Epoch [487], train_loss: 0.099305, val_loss: 0.100329, val_acc: 19.047094
+Epoch [488], train_loss: 0.099220, val_loss: 0.097631, val_acc: 19.290846
+Epoch [489], train_loss: 0.099154, val_loss: 0.099808, val_acc: 19.108971
+Epoch [490], train_loss: 0.099034, val_loss: 0.101772, val_acc: 18.761549
+Epoch [491], train_loss: 0.099077, val_loss: 0.099908, val_acc: 19.144701
+Epoch [492], train_loss: 0.098920, val_loss: 0.100246, val_acc: 19.020054
+Epoch [493], train_loss: 0.098856, val_loss: 0.099144, val_acc: 19.036806
+Epoch [494], train_loss: 0.098736, val_loss: 0.100712, val_acc: 18.995453
+Epoch [495], train_loss: 0.098726, val_loss: 0.102087, val_acc: 18.854692
+Epoch [496], train_loss: 0.098646, val_loss: 0.098499, val_acc: 19.134729
+Epoch [497], train_loss: 0.098554, val_loss: 0.103481, val_acc: 18.784019
+Epoch [498], train_loss: 0.098542, val_loss: 0.102562, val_acc: 18.980322
+Epoch [499], train_loss: 0.098377, val_loss: 0.097164, val_acc: 19.341881
+Epoch [500], train_loss: 0.098344, val_loss: 0.098127, val_acc: 19.306961
+Epoch [501], train_loss: 0.098251, val_loss: 0.097968, val_acc: 19.390247
+Epoch [502], train_loss: 0.098214, val_loss: 0.096678, val_acc: 19.396753
+Epoch [503], train_loss: 0.098080, val_loss: 0.097981, val_acc: 19.373343
+Epoch [504], train_loss: 0.097986, val_loss: 0.099246, val_acc: 19.134979
+Epoch [505], train_loss: 0.097906, val_loss: 0.100477, val_acc: 19.070032
+Epoch [506], train_loss: 0.097948, val_loss: 0.099067, val_acc: 19.304859
+Epoch [507], train_loss: 0.097880, val_loss: 0.101249, val_acc: 19.006306
+Epoch [508], train_loss: 0.098018, val_loss: 0.098483, val_acc: 19.417234
+Epoch [509], train_loss: 0.097702, val_loss: 0.096408, val_acc: 19.522942
+Epoch [510], train_loss: 0.097611, val_loss: 0.097514, val_acc: 19.402531
+Epoch [511], train_loss: 0.097500, val_loss: 0.100877, val_acc: 19.050451
+Epoch [512], train_loss: 0.097445, val_loss: 0.097124, val_acc: 19.437204
+Epoch [513], train_loss: 0.097406, val_loss: 0.097879, val_acc: 19.214533
+Epoch [514], train_loss: 0.097371, val_loss: 0.101982, val_acc: 19.023703
+Epoch [515], train_loss: 0.097225, val_loss: 0.099214, val_acc: 19.240685
+Epoch [516], train_loss: 0.097251, val_loss: 0.098673, val_acc: 19.304663
+Epoch [517], train_loss: 0.097146, val_loss: 0.097271, val_acc: 19.444223
+Epoch [518], train_loss: 0.097056, val_loss: 0.098844, val_acc: 19.338835
+Epoch [519], train_loss: 0.096944, val_loss: 0.101596, val_acc: 19.144156
+Epoch [520], train_loss: 0.096881, val_loss: 0.100464, val_acc: 19.140142
+Epoch [521], train_loss: 0.096818, val_loss: 0.098946, val_acc: 19.286007
+Epoch [522], train_loss: 0.096780, val_loss: 0.097714, val_acc: 19.348759
+Epoch [523], train_loss: 0.096689, val_loss: 0.097096, val_acc: 19.529264
+Epoch [524], train_loss: 0.096689, val_loss: 0.099017, val_acc: 19.288006
+Epoch [525], train_loss: 0.096541, val_loss: 0.098991, val_acc: 19.256943
+Epoch [526], train_loss: 0.096448, val_loss: 0.097338, val_acc: 19.459652
+Epoch [527], train_loss: 0.096471, val_loss: 0.096960, val_acc: 19.542582
+Epoch [528], train_loss: 0.096313, val_loss: 0.097066, val_acc: 19.494858
+Epoch [529], train_loss: 0.096308, val_loss: 0.095315, val_acc: 19.618372
+Epoch [530], train_loss: 0.096184, val_loss: 0.095343, val_acc: 19.680508
+Epoch [531], train_loss: 0.096153, val_loss: 0.097890, val_acc: 19.483358
+Epoch [532], train_loss: 0.096100, val_loss: 0.096458, val_acc: 19.718662
+Epoch [533], train_loss: 0.096006, val_loss: 0.097393, val_acc: 19.394409
+Epoch [534], train_loss: 0.095989, val_loss: 0.097426, val_acc: 19.470715
+Epoch [535], train_loss: 0.095866, val_loss: 0.098107, val_acc: 19.506659
+Epoch [536], train_loss: 0.095852, val_loss: 0.096101, val_acc: 19.679035
+Epoch [537], train_loss: 0.095915, val_loss: 0.099941, val_acc: 19.302277
+Epoch [538], train_loss: 0.095734, val_loss: 0.098685, val_acc: 19.434217
+Epoch [539], train_loss: 0.095581, val_loss: 0.097844, val_acc: 19.581594
+Epoch [540], train_loss: 0.095661, val_loss: 0.095930, val_acc: 19.616903
+Epoch [541], train_loss: 0.095661, val_loss: 0.094082, val_acc: 19.855150
+Epoch [542], train_loss: 0.095443, val_loss: 0.097773, val_acc: 19.507118
+Epoch [543], train_loss: 0.095353, val_loss: 0.099477, val_acc: 19.460361
+Epoch [544], train_loss: 0.095290, val_loss: 0.094986, val_acc: 19.855202
+Epoch [545], train_loss: 0.095286, val_loss: 0.099276, val_acc: 19.435516
+Epoch [546], train_loss: 0.095163, val_loss: 0.095124, val_acc: 19.778002
+Epoch [547], train_loss: 0.095025, val_loss: 0.093488, val_acc: 19.875025
+Epoch [548], train_loss: 0.095038, val_loss: 0.096998, val_acc: 19.659960
+Epoch [549], train_loss: 0.094945, val_loss: 0.097877, val_acc: 19.669119
+Epoch [550], train_loss: 0.094838, val_loss: 0.099419, val_acc: 19.371670
+Epoch [551], train_loss: 0.094816, val_loss: 0.096185, val_acc: 19.724106
+Epoch [552], train_loss: 0.094731, val_loss: 0.096468, val_acc: 19.589128
+Epoch [553], train_loss: 0.094826, val_loss: 0.096942, val_acc: 19.714413
+Epoch [554], train_loss: 0.094744, val_loss: 0.094181, val_acc: 19.876837
+Epoch [555], train_loss: 0.094649, val_loss: 0.093675, val_acc: 19.960035
+Epoch [556], train_loss: 0.094546, val_loss: 0.095059, val_acc: 19.923018
+Epoch [557], train_loss: 0.094488, val_loss: 0.094390, val_acc: 19.828436
+Epoch [558], train_loss: 0.094474, val_loss: 0.096793, val_acc: 19.646650
+Epoch [559], train_loss: 0.094342, val_loss: 0.094066, val_acc: 19.961910
+Epoch [560], train_loss: 0.094418, val_loss: 0.095114, val_acc: 19.902689
+Epoch [561], train_loss: 0.094165, val_loss: 0.093505, val_acc: 20.111982
+Epoch [562], train_loss: 0.094209, val_loss: 0.098556, val_acc: 19.506144
+Epoch [563], train_loss: 0.094138, val_loss: 0.096705, val_acc: 19.791523
+Epoch [564], train_loss: 0.094051, val_loss: 0.095546, val_acc: 19.872301
+Epoch [565], train_loss: 0.094039, val_loss: 0.094788, val_acc: 19.968637
+Epoch [566], train_loss: 0.093930, val_loss: 0.097764, val_acc: 19.656324
+Epoch [567], train_loss: 0.093808, val_loss: 0.092935, val_acc: 20.159796
+Epoch [568], train_loss: 0.093707, val_loss: 0.094018, val_acc: 19.904001
+Epoch [569], train_loss: 0.093633, val_loss: 0.093594, val_acc: 19.981968
+Epoch [570], train_loss: 0.093736, val_loss: 0.093112, val_acc: 20.057089
+Epoch [571], train_loss: 0.093625, val_loss: 0.095167, val_acc: 19.946859
+Epoch [572], train_loss: 0.093609, val_loss: 0.096453, val_acc: 19.709097
+Epoch [573], train_loss: 0.093454, val_loss: 0.093062, val_acc: 19.992554
+Epoch [574], train_loss: 0.093404, val_loss: 0.097921, val_acc: 19.621693
+Epoch [575], train_loss: 0.093315, val_loss: 0.094627, val_acc: 19.941959
+Epoch [576], train_loss: 0.093371, val_loss: 0.096060, val_acc: 19.830698
+Epoch [577], train_loss: 0.093172, val_loss: 0.094976, val_acc: 20.043858
+Epoch [578], train_loss: 0.093312, val_loss: 0.096368, val_acc: 19.618544
+Epoch [579], train_loss: 0.093184, val_loss: 0.091805, val_acc: 20.225149
+Epoch [580], train_loss: 0.093002, val_loss: 0.095350, val_acc: 19.922451
+Epoch [581], train_loss: 0.092953, val_loss: 0.092978, val_acc: 20.168665
+Epoch [582], train_loss: 0.093024, val_loss: 0.095511, val_acc: 19.992287
+Epoch [583], train_loss: 0.092823, val_loss: 0.091939, val_acc: 20.214651
+Epoch [584], train_loss: 0.092725, val_loss: 0.093043, val_acc: 20.132322
+Epoch [585], train_loss: 0.092739, val_loss: 0.093326, val_acc: 20.251625
+Epoch [586], train_loss: 0.092749, val_loss: 0.091350, val_acc: 20.413998
+Epoch [587], train_loss: 0.092728, val_loss: 0.092084, val_acc: 20.451616
+Epoch [588], train_loss: 0.092564, val_loss: 0.095713, val_acc: 19.993599
+Epoch [589], train_loss: 0.092663, val_loss: 0.093485, val_acc: 20.139784
+Epoch [590], train_loss: 0.092426, val_loss: 0.096660, val_acc: 19.769794
+Epoch [591], train_loss: 0.092428, val_loss: 0.091771, val_acc: 20.368338
+Epoch [592], train_loss: 0.092426, val_loss: 0.094311, val_acc: 20.058313
+Epoch [593], train_loss: 0.092224, val_loss: 0.094141, val_acc: 20.041294
+Epoch [594], train_loss: 0.092185, val_loss: 0.095093, val_acc: 19.990774
+Epoch [595], train_loss: 0.092184, val_loss: 0.090546, val_acc: 20.462206
+Epoch [596], train_loss: 0.092103, val_loss: 0.091654, val_acc: 20.366249
+Epoch [597], train_loss: 0.092163, val_loss: 0.096475, val_acc: 19.742081
+Epoch [598], train_loss: 0.091927, val_loss: 0.097956, val_acc: 19.872156
+Epoch [599], train_loss: 0.091951, val_loss: 0.091899, val_acc: 20.327440
+Epoch [600], train_loss: 0.091888, val_loss: 0.094591, val_acc: 20.094938
+Epoch [601], train_loss: 0.091818, val_loss: 0.093594, val_acc: 20.160650
+Epoch [602], train_loss: 0.091792, val_loss: 0.090217, val_acc: 20.356544
+Epoch [603], train_loss: 0.091623, val_loss: 0.090423, val_acc: 20.565416
+Epoch [604], train_loss: 0.091657, val_loss: 0.094648, val_acc: 20.159622
+Epoch [605], train_loss: 0.091469, val_loss: 0.093027, val_acc: 20.256680
+Epoch [606], train_loss: 0.091671, val_loss: 0.091513, val_acc: 20.323618
+Epoch [607], train_loss: 0.091510, val_loss: 0.090592, val_acc: 20.515621
+Epoch [608], train_loss: 0.091462, val_loss: 0.091646, val_acc: 20.413439
+Epoch [609], train_loss: 0.091337, val_loss: 0.089829, val_acc: 20.669495
+Epoch [610], train_loss: 0.091333, val_loss: 0.091314, val_acc: 20.398310
+Epoch [611], train_loss: 0.091211, val_loss: 0.093174, val_acc: 20.280354
+Epoch [612], train_loss: 0.091234, val_loss: 0.089609, val_acc: 20.579903
+Epoch [613], train_loss: 0.091146, val_loss: 0.094652, val_acc: 20.057316
+Epoch [614], train_loss: 0.091149, val_loss: 0.094026, val_acc: 20.149532
+Epoch [615], train_loss: 0.091070, val_loss: 0.092416, val_acc: 20.300636
+Epoch [616], train_loss: 0.091097, val_loss: 0.093613, val_acc: 20.175383
+Epoch [617], train_loss: 0.091014, val_loss: 0.089716, val_acc: 20.590155
+Epoch [618], train_loss: 0.090926, val_loss: 0.092276, val_acc: 20.507156
+Epoch [619], train_loss: 0.090840, val_loss: 0.093382, val_acc: 20.444971
+Epoch [620], train_loss: 0.090840, val_loss: 0.092079, val_acc: 20.455069
+Epoch [621], train_loss: 0.090796, val_loss: 0.091943, val_acc: 20.595671
+Epoch [622], train_loss: 0.090617, val_loss: 0.091147, val_acc: 20.576960
+Epoch [623], train_loss: 0.090516, val_loss: 0.092077, val_acc: 20.305510
+Epoch [624], train_loss: 0.090602, val_loss: 0.090970, val_acc: 20.539284
+Epoch [625], train_loss: 0.090491, val_loss: 0.090470, val_acc: 20.731520
+Epoch [626], train_loss: 0.090451, val_loss: 0.092173, val_acc: 20.414900
+Epoch [627], train_loss: 0.090296, val_loss: 0.093062, val_acc: 20.271902
+Epoch [628], train_loss: 0.090227, val_loss: 0.091700, val_acc: 20.512886
+Epoch [629], train_loss: 0.090257, val_loss: 0.092597, val_acc: 20.432550
+Epoch [630], train_loss: 0.090285, val_loss: 0.091722, val_acc: 20.469671
+Epoch [631], train_loss: 0.090185, val_loss: 0.089836, val_acc: 20.641794
+Epoch [632], train_loss: 0.090057, val_loss: 0.090623, val_acc: 20.653349
+Epoch [633], train_loss: 0.089943, val_loss: 0.091272, val_acc: 20.495754
+Epoch [634], train_loss: 0.089944, val_loss: 0.094600, val_acc: 20.196131
+Epoch [635], train_loss: 0.089921, val_loss: 0.091124, val_acc: 20.528603
+Epoch [636], train_loss: 0.089902, val_loss: 0.090556, val_acc: 20.602238
+Epoch [637], train_loss: 0.089909, val_loss: 0.091998, val_acc: 20.462626
+Epoch [638], train_loss: 0.089843, val_loss: 0.090735, val_acc: 20.599741
+Epoch [639], train_loss: 0.089840, val_loss: 0.090093, val_acc: 20.806845
+Epoch [640], train_loss: 0.089691, val_loss: 0.091620, val_acc: 20.665321
+Epoch [641], train_loss: 0.089654, val_loss: 0.091558, val_acc: 20.556753
+Epoch [642], train_loss: 0.089485, val_loss: 0.092343, val_acc: 20.467682
+Epoch [643], train_loss: 0.089681, val_loss: 0.089587, val_acc: 20.739481
+Epoch [644], train_loss: 0.089544, val_loss: 0.086906, val_acc: 21.038496
+Epoch [645], train_loss: 0.089573, val_loss: 0.092886, val_acc: 20.257010
+Epoch [646], train_loss: 0.089321, val_loss: 0.090255, val_acc: 20.693146
+Epoch [647], train_loss: 0.089290, val_loss: 0.091638, val_acc: 20.622940
+Epoch [648], train_loss: 0.089313, val_loss: 0.088933, val_acc: 20.689533
+Epoch [649], train_loss: 0.089180, val_loss: 0.092441, val_acc: 20.483444
+Epoch [650], train_loss: 0.089170, val_loss: 0.092383, val_acc: 20.488943
+Epoch [651], train_loss: 0.089186, val_loss: 0.088631, val_acc: 20.827337
+Epoch [652], train_loss: 0.089040, val_loss: 0.091035, val_acc: 20.467127
+Epoch [653], train_loss: 0.088977, val_loss: 0.089829, val_acc: 20.746084
+Epoch [654], train_loss: 0.088850, val_loss: 0.091289, val_acc: 20.601744
+Epoch [655], train_loss: 0.089022, val_loss: 0.091939, val_acc: 20.495438
+Epoch [656], train_loss: 0.088843, val_loss: 0.088324, val_acc: 20.972136
+Epoch [657], train_loss: 0.088904, val_loss: 0.090254, val_acc: 20.703583
+Epoch [658], train_loss: 0.088833, val_loss: 0.090077, val_acc: 20.863749
+Epoch [659], train_loss: 0.088724, val_loss: 0.088802, val_acc: 20.771929
+Epoch [660], train_loss: 0.088625, val_loss: 0.087175, val_acc: 20.909407
+Epoch [661], train_loss: 0.088705, val_loss: 0.091580, val_acc: 20.706135
+Epoch [662], train_loss: 0.088637, val_loss: 0.086300, val_acc: 21.068970
+Epoch [663], train_loss: 0.088558, val_loss: 0.090963, val_acc: 20.642817
+Epoch [664], train_loss: 0.088488, val_loss: 0.090407, val_acc: 20.766886
+Epoch [665], train_loss: 0.088374, val_loss: 0.090007, val_acc: 20.810093
+Epoch [666], train_loss: 0.088327, val_loss: 0.089108, val_acc: 20.839972
+Epoch [667], train_loss: 0.088236, val_loss: 0.087606, val_acc: 21.066820
+Epoch [668], train_loss: 0.088238, val_loss: 0.091323, val_acc: 20.710089
+Epoch [669], train_loss: 0.088172, val_loss: 0.088539, val_acc: 20.864134
+Epoch [670], train_loss: 0.088181, val_loss: 0.086873, val_acc: 21.128557
+Epoch [671], train_loss: 0.088121, val_loss: 0.086467, val_acc: 21.114948
+Epoch [672], train_loss: 0.088063, val_loss: 0.087278, val_acc: 21.047909
+Epoch [673], train_loss: 0.088028, val_loss: 0.087952, val_acc: 20.970779
+Epoch [674], train_loss: 0.087934, val_loss: 0.090444, val_acc: 20.795456
+Epoch [675], train_loss: 0.087976, val_loss: 0.090834, val_acc: 20.869888
+Epoch [676], train_loss: 0.087984, val_loss: 0.085577, val_acc: 21.324163
+Epoch [677], train_loss: 0.087761, val_loss: 0.090807, val_acc: 20.743801
+Epoch [678], train_loss: 0.087643, val_loss: 0.088319, val_acc: 20.960657
+Epoch [679], train_loss: 0.087691, val_loss: 0.089191, val_acc: 20.825397
+Epoch [680], train_loss: 0.087741, val_loss: 0.087897, val_acc: 20.947199
+Epoch [681], train_loss: 0.087857, val_loss: 0.087801, val_acc: 21.053244
+Epoch [682], train_loss: 0.087670, val_loss: 0.087732, val_acc: 21.109669
+Epoch [683], train_loss: 0.087532, val_loss: 0.088054, val_acc: 21.082537
+Epoch [684], train_loss: 0.087444, val_loss: 0.084919, val_acc: 21.471165
+Epoch [685], train_loss: 0.087434, val_loss: 0.087146, val_acc: 21.067682
+Epoch [686], train_loss: 0.087424, val_loss: 0.087886, val_acc: 21.113493
+Epoch [687], train_loss: 0.087508, val_loss: 0.086572, val_acc: 21.135183
+Epoch [688], train_loss: 0.087294, val_loss: 0.087392, val_acc: 21.102345
+Epoch [689], train_loss: 0.087186, val_loss: 0.086558, val_acc: 21.236383
+Epoch [690], train_loss: 0.087130, val_loss: 0.086544, val_acc: 21.227709
+Epoch [691], train_loss: 0.087106, val_loss: 0.089485, val_acc: 21.008493
+Epoch [692], train_loss: 0.087307, val_loss: 0.088054, val_acc: 21.105581
+Epoch [693], train_loss: 0.087056, val_loss: 0.086865, val_acc: 21.291828
+Epoch [694], train_loss: 0.087039, val_loss: 0.088641, val_acc: 21.007141
+Epoch [695], train_loss: 0.086971, val_loss: 0.090857, val_acc: 20.663460
+Epoch [696], train_loss: 0.087044, val_loss: 0.089347, val_acc: 20.935743
+Epoch [697], train_loss: 0.086931, val_loss: 0.087892, val_acc: 21.009180
+Epoch [698], train_loss: 0.086782, val_loss: 0.085758, val_acc: 21.242529
+Epoch [699], train_loss: 0.086828, val_loss: 0.085323, val_acc: 21.335232
+Epoch [700], train_loss: 0.086759, val_loss: 0.084169, val_acc: 21.493330
+Epoch [701], train_loss: 0.086688, val_loss: 0.087627, val_acc: 21.064209
+Epoch [702], train_loss: 0.086675, val_loss: 0.086279, val_acc: 21.326115
+Epoch [703], train_loss: 0.086547, val_loss: 0.085947, val_acc: 21.395931
+Epoch [704], train_loss: 0.086434, val_loss: 0.085413, val_acc: 21.238132
+Epoch [705], train_loss: 0.086567, val_loss: 0.088396, val_acc: 20.897749
+Epoch [706], train_loss: 0.086423, val_loss: 0.085422, val_acc: 21.316605
+Epoch [707], train_loss: 0.086645, val_loss: 0.091775, val_acc: 20.523760
+Epoch [708], train_loss: 0.086286, val_loss: 0.086707, val_acc: 21.277384
+Epoch [709], train_loss: 0.086363, val_loss: 0.088108, val_acc: 21.087206
+Epoch [710], train_loss: 0.086183, val_loss: 0.084319, val_acc: 21.462318
+Epoch [711], train_loss: 0.086087, val_loss: 0.086501, val_acc: 21.212666
+Epoch [712], train_loss: 0.086235, val_loss: 0.085399, val_acc: 21.381233
+Epoch [713], train_loss: 0.086127, val_loss: 0.085314, val_acc: 21.425547
+Epoch [714], train_loss: 0.086009, val_loss: 0.085501, val_acc: 21.359749
+Epoch [715], train_loss: 0.086089, val_loss: 0.087311, val_acc: 21.260563
+Epoch [716], train_loss: 0.086011, val_loss: 0.087470, val_acc: 21.243757
+Epoch [717], train_loss: 0.086053, val_loss: 0.088079, val_acc: 21.218149
+Epoch [718], train_loss: 0.085798, val_loss: 0.086284, val_acc: 21.311266
+Epoch [719], train_loss: 0.085852, val_loss: 0.086638, val_acc: 21.279953
+Epoch [720], train_loss: 0.085777, val_loss: 0.085016, val_acc: 21.405302
+Epoch [721], train_loss: 0.085818, val_loss: 0.089755, val_acc: 21.031260
+Epoch [722], train_loss: 0.085668, val_loss: 0.085396, val_acc: 21.351213
+Epoch [723], train_loss: 0.085677, val_loss: 0.085269, val_acc: 21.470911
+Epoch [724], train_loss: 0.085704, val_loss: 0.089542, val_acc: 20.918007
+Epoch [725], train_loss: 0.085541, val_loss: 0.086733, val_acc: 21.254734
+Epoch [726], train_loss: 0.085607, val_loss: 0.088357, val_acc: 21.242386
+Epoch [727], train_loss: 0.085413, val_loss: 0.084734, val_acc: 21.435287
+Epoch [728], train_loss: 0.085388, val_loss: 0.082994, val_acc: 21.720652
+Epoch [729], train_loss: 0.085485, val_loss: 0.084538, val_acc: 21.499363
+Epoch [730], train_loss: 0.085420, val_loss: 0.086265, val_acc: 21.360359
+Epoch [731], train_loss: 0.085340, val_loss: 0.085069, val_acc: 21.529999
+Epoch [732], train_loss: 0.085324, val_loss: 0.086624, val_acc: 21.254551
+Epoch [733], train_loss: 0.085299, val_loss: 0.087622, val_acc: 21.278021
+Epoch [734], train_loss: 0.085216, val_loss: 0.083826, val_acc: 21.581350
+Epoch [735], train_loss: 0.085078, val_loss: 0.085273, val_acc: 21.418535
+Epoch [736], train_loss: 0.085103, val_loss: 0.083661, val_acc: 21.562204
+Epoch [737], train_loss: 0.085085, val_loss: 0.084488, val_acc: 21.500809
+Epoch [738], train_loss: 0.085000, val_loss: 0.083046, val_acc: 21.749498
+Epoch [739], train_loss: 0.084812, val_loss: 0.083687, val_acc: 21.667557
+Epoch [740], train_loss: 0.084853, val_loss: 0.085425, val_acc: 21.317572
+Epoch [741], train_loss: 0.085011, val_loss: 0.083369, val_acc: 21.644753
+Epoch [742], train_loss: 0.084802, val_loss: 0.086664, val_acc: 21.330587
+Epoch [743], train_loss: 0.084835, val_loss: 0.085925, val_acc: 21.431906
+Epoch [744], train_loss: 0.084657, val_loss: 0.083404, val_acc: 21.599945
+Epoch [745], train_loss: 0.084719, val_loss: 0.085241, val_acc: 21.369019
+Epoch [746], train_loss: 0.084707, val_loss: 0.086599, val_acc: 21.261702
+Epoch [747], train_loss: 0.084637, val_loss: 0.088163, val_acc: 21.134501
+Epoch [748], train_loss: 0.084674, val_loss: 0.084982, val_acc: 21.476965
+Epoch [749], train_loss: 0.084475, val_loss: 0.082560, val_acc: 21.734074
+Epoch [750], train_loss: 0.084444, val_loss: 0.087575, val_acc: 21.104136
+Epoch [751], train_loss: 0.084419, val_loss: 0.085158, val_acc: 21.521156
+Epoch [752], train_loss: 0.084581, val_loss: 0.083303, val_acc: 21.647074
+Epoch [753], train_loss: 0.084391, val_loss: 0.085310, val_acc: 21.486702
+Epoch [754], train_loss: 0.084263, val_loss: 0.088980, val_acc: 21.076738
+Epoch [755], train_loss: 0.084285, val_loss: 0.084445, val_acc: 21.575731
+Epoch [756], train_loss: 0.084349, val_loss: 0.086181, val_acc: 21.381342
+Epoch [757], train_loss: 0.084169, val_loss: 0.083724, val_acc: 21.688454
+Epoch [758], train_loss: 0.084132, val_loss: 0.084827, val_acc: 21.442957
+Epoch [759], train_loss: 0.084177, val_loss: 0.081680, val_acc: 21.810495
+Epoch [760], train_loss: 0.084033, val_loss: 0.087318, val_acc: 21.364668
+Epoch [761], train_loss: 0.084020, val_loss: 0.082381, val_acc: 21.729193
+Epoch [762], train_loss: 0.083992, val_loss: 0.085970, val_acc: 21.310217
+Epoch [763], train_loss: 0.083957, val_loss: 0.083635, val_acc: 21.659918
+Epoch [764], train_loss: 0.083840, val_loss: 0.084830, val_acc: 21.634018
+Epoch [765], train_loss: 0.083861, val_loss: 0.085882, val_acc: 21.460901
+Epoch [766], train_loss: 0.083773, val_loss: 0.082781, val_acc: 21.773241
+Epoch [767], train_loss: 0.083849, val_loss: 0.083963, val_acc: 21.652430
+Epoch [768], train_loss: 0.083582, val_loss: 0.082556, val_acc: 21.867868
+Epoch [769], train_loss: 0.083675, val_loss: 0.082754, val_acc: 21.762871
+Epoch [770], train_loss: 0.083673, val_loss: 0.082086, val_acc: 21.883165
+Epoch [771], train_loss: 0.083679, val_loss: 0.083353, val_acc: 21.710995
+Epoch [772], train_loss: 0.083568, val_loss: 0.084342, val_acc: 21.529955
+Epoch [773], train_loss: 0.083688, val_loss: 0.082519, val_acc: 21.825768
+Epoch [774], train_loss: 0.083614, val_loss: 0.085456, val_acc: 21.440390
+Epoch [775], train_loss: 0.083441, val_loss: 0.083903, val_acc: 21.681726
+Epoch [776], train_loss: 0.083537, val_loss: 0.084350, val_acc: 21.579576
+Epoch [777], train_loss: 0.083364, val_loss: 0.085615, val_acc: 21.446217
+Epoch [778], train_loss: 0.083325, val_loss: 0.087544, val_acc: 21.372976
+Epoch [779], train_loss: 0.083307, val_loss: 0.081753, val_acc: 21.954659
+Epoch [780], train_loss: 0.083188, val_loss: 0.083965, val_acc: 21.773054
+Epoch [781], train_loss: 0.083087, val_loss: 0.081658, val_acc: 21.895203
+Epoch [782], train_loss: 0.083125, val_loss: 0.080939, val_acc: 21.948252
+Epoch [783], train_loss: 0.083111, val_loss: 0.081433, val_acc: 21.806129
+Epoch [784], train_loss: 0.082996, val_loss: 0.086043, val_acc: 21.511145
+Epoch [785], train_loss: 0.082950, val_loss: 0.084689, val_acc: 21.509264
+Epoch [786], train_loss: 0.083072, val_loss: 0.082952, val_acc: 21.636360
+Epoch [787], train_loss: 0.083038, val_loss: 0.083074, val_acc: 21.759047
+Epoch [788], train_loss: 0.082807, val_loss: 0.083327, val_acc: 21.644602
+Epoch [789], train_loss: 0.082899, val_loss: 0.083403, val_acc: 21.638954
+Epoch [790], train_loss: 0.082874, val_loss: 0.086640, val_acc: 21.403547
+Epoch [791], train_loss: 0.082858, val_loss: 0.079257, val_acc: 22.201477
+Epoch [792], train_loss: 0.082810, val_loss: 0.082007, val_acc: 21.890913
+Epoch [793], train_loss: 0.082814, val_loss: 0.081190, val_acc: 21.993046
+Epoch [794], train_loss: 0.082710, val_loss: 0.081223, val_acc: 21.978949
+Epoch [795], train_loss: 0.082504, val_loss: 0.083111, val_acc: 21.908890
+Epoch [796], train_loss: 0.082706, val_loss: 0.083864, val_acc: 21.556700
+Epoch [797], train_loss: 0.082731, val_loss: 0.081690, val_acc: 21.858198
+Epoch [798], train_loss: 0.082611, val_loss: 0.082397, val_acc: 21.840958
+Epoch [799], train_loss: 0.082561, val_loss: 0.081138, val_acc: 21.972548
+Epoch [800], train_loss: 0.082540, val_loss: 0.080814, val_acc: 21.970301
+Epoch [801], train_loss: 0.082467, val_loss: 0.079324, val_acc: 22.238138
+Epoch [802], train_loss: 0.082356, val_loss: 0.081682, val_acc: 21.907101
+Epoch [803], train_loss: 0.082211, val_loss: 0.082724, val_acc: 21.822502
+Epoch [804], train_loss: 0.082348, val_loss: 0.081978, val_acc: 21.890930
+Epoch [805], train_loss: 0.082328, val_loss: 0.082908, val_acc: 21.746260
+Epoch [806], train_loss: 0.082168, val_loss: 0.081359, val_acc: 22.008184
+Epoch [807], train_loss: 0.082179, val_loss: 0.081298, val_acc: 21.935318
+Epoch [808], train_loss: 0.081992, val_loss: 0.081409, val_acc: 22.035486
+Epoch [809], train_loss: 0.082079, val_loss: 0.082134, val_acc: 21.929186
+Epoch [810], train_loss: 0.082082, val_loss: 0.079084, val_acc: 22.228878
+Epoch [811], train_loss: 0.081993, val_loss: 0.082158, val_acc: 21.944122
+Epoch [812], train_loss: 0.082048, val_loss: 0.080478, val_acc: 22.125200
+Epoch [813], train_loss: 0.081979, val_loss: 0.079475, val_acc: 22.083454
+Epoch [814], train_loss: 0.082049, val_loss: 0.082308, val_acc: 21.673790
+Epoch [815], train_loss: 0.081863, val_loss: 0.083673, val_acc: 21.775955
+Epoch [816], train_loss: 0.081825, val_loss: 0.080248, val_acc: 22.177689
+Epoch [817], train_loss: 0.081760, val_loss: 0.083939, val_acc: 21.743095
+Epoch [818], train_loss: 0.081723, val_loss: 0.082162, val_acc: 21.992100
+Epoch [819], train_loss: 0.081907, val_loss: 0.079812, val_acc: 22.000603
+Epoch [820], train_loss: 0.081726, val_loss: 0.082910, val_acc: 21.811174
+Epoch [821], train_loss: 0.081638, val_loss: 0.079377, val_acc: 22.213440
+Epoch [822], train_loss: 0.081595, val_loss: 0.081086, val_acc: 21.998554
+Epoch [823], train_loss: 0.081691, val_loss: 0.082525, val_acc: 21.888626
+Epoch [824], train_loss: 0.081550, val_loss: 0.083390, val_acc: 21.749189
+Epoch [825], train_loss: 0.081403, val_loss: 0.079330, val_acc: 22.212202
+Epoch [826], train_loss: 0.081498, val_loss: 0.083608, val_acc: 21.759090
+Epoch [827], train_loss: 0.081406, val_loss: 0.078290, val_acc: 22.408125
+Epoch [828], train_loss: 0.081319, val_loss: 0.082889, val_acc: 21.788984
+Epoch [829], train_loss: 0.081352, val_loss: 0.079548, val_acc: 22.017208
+Epoch [830], train_loss: 0.081366, val_loss: 0.079509, val_acc: 22.181055
+Epoch [831], train_loss: 0.081184, val_loss: 0.083528, val_acc: 21.719898
+Epoch [832], train_loss: 0.081299, val_loss: 0.080507, val_acc: 22.147020
+Epoch [833], train_loss: 0.081179, val_loss: 0.081503, val_acc: 22.017792
+Epoch [834], train_loss: 0.081120, val_loss: 0.080379, val_acc: 21.947628
+Epoch [835], train_loss: 0.081005, val_loss: 0.081435, val_acc: 21.988878
+Epoch [836], train_loss: 0.081133, val_loss: 0.079686, val_acc: 22.094521
+Epoch [837], train_loss: 0.080971, val_loss: 0.080357, val_acc: 22.221617
+Epoch [838], train_loss: 0.080940, val_loss: 0.080549, val_acc: 22.132029
+Epoch [839], train_loss: 0.080970, val_loss: 0.080785, val_acc: 22.090584
+Epoch [840], train_loss: 0.081053, val_loss: 0.081910, val_acc: 22.137903
+Epoch [841], train_loss: 0.080824, val_loss: 0.078909, val_acc: 22.300512
+Epoch [842], train_loss: 0.080854, val_loss: 0.079555, val_acc: 22.138212
+Epoch [843], train_loss: 0.080693, val_loss: 0.081002, val_acc: 22.131710
+Epoch [844], train_loss: 0.080652, val_loss: 0.080115, val_acc: 22.150377
+Epoch [845], train_loss: 0.080730, val_loss: 0.084492, val_acc: 21.802214
+Epoch [846], train_loss: 0.080654, val_loss: 0.081282, val_acc: 22.200954
+Epoch [847], train_loss: 0.080592, val_loss: 0.079520, val_acc: 22.225138
+Epoch [848], train_loss: 0.080706, val_loss: 0.080894, val_acc: 22.144773
+Epoch [849], train_loss: 0.080600, val_loss: 0.078567, val_acc: 22.307543
+Epoch [850], train_loss: 0.080509, val_loss: 0.080663, val_acc: 22.120930
+Epoch [851], train_loss: 0.080434, val_loss: 0.080469, val_acc: 22.209629
+Epoch [852], train_loss: 0.080447, val_loss: 0.078943, val_acc: 22.300531
+Epoch [853], train_loss: 0.080456, val_loss: 0.083024, val_acc: 21.892548
+Epoch [854], train_loss: 0.080440, val_loss: 0.081038, val_acc: 22.154161
+Epoch [855], train_loss: 0.080243, val_loss: 0.078924, val_acc: 22.359043
+Epoch [856], train_loss: 0.080223, val_loss: 0.080617, val_acc: 22.095530
+Epoch [857], train_loss: 0.080324, val_loss: 0.082319, val_acc: 22.031548
+Epoch [858], train_loss: 0.080235, val_loss: 0.081138, val_acc: 22.062458
+Epoch [859], train_loss: 0.080371, val_loss: 0.080455, val_acc: 22.263344
+Epoch [860], train_loss: 0.080294, val_loss: 0.079103, val_acc: 22.354404
+Epoch [861], train_loss: 0.080276, val_loss: 0.079549, val_acc: 22.343287
+Epoch [862], train_loss: 0.080133, val_loss: 0.082605, val_acc: 21.962111
+Epoch [863], train_loss: 0.080112, val_loss: 0.082325, val_acc: 22.022306
+Epoch [864], train_loss: 0.080035, val_loss: 0.078335, val_acc: 22.262192
+Epoch [865], train_loss: 0.079976, val_loss: 0.079023, val_acc: 22.342522
+Epoch [866], train_loss: 0.079953, val_loss: 0.080518, val_acc: 22.296797
+Epoch [867], train_loss: 0.079911, val_loss: 0.079942, val_acc: 22.190430
+Epoch [868], train_loss: 0.079894, val_loss: 0.083222, val_acc: 21.980671
+Epoch [869], train_loss: 0.079893, val_loss: 0.079657, val_acc: 22.222704
+Epoch [870], train_loss: 0.079782, val_loss: 0.081262, val_acc: 22.194120
+Epoch [871], train_loss: 0.079790, val_loss: 0.079433, val_acc: 22.339094
+Epoch [872], train_loss: 0.079706, val_loss: 0.077778, val_acc: 22.488876
+Epoch [873], train_loss: 0.079616, val_loss: 0.078294, val_acc: 22.410322
+Epoch [874], train_loss: 0.079793, val_loss: 0.079109, val_acc: 22.318226
+Epoch [875], train_loss: 0.079635, val_loss: 0.082513, val_acc: 22.013279
+Epoch [876], train_loss: 0.079647, val_loss: 0.081278, val_acc: 22.124659
+Epoch [877], train_loss: 0.079550, val_loss: 0.082452, val_acc: 22.121393
+Epoch [878], train_loss: 0.079628, val_loss: 0.079111, val_acc: 22.347805
+Epoch [879], train_loss: 0.079557, val_loss: 0.077611, val_acc: 22.577549
+Epoch [880], train_loss: 0.079485, val_loss: 0.079311, val_acc: 22.372314
+Epoch [881], train_loss: 0.079421, val_loss: 0.079383, val_acc: 22.305712
+Epoch [882], train_loss: 0.079606, val_loss: 0.079702, val_acc: 22.192036
+Epoch [883], train_loss: 0.079479, val_loss: 0.080300, val_acc: 22.180752
+Epoch [884], train_loss: 0.079283, val_loss: 0.078581, val_acc: 22.410004
+Epoch [885], train_loss: 0.079459, val_loss: 0.076778, val_acc: 22.662949
+Epoch [886], train_loss: 0.079237, val_loss: 0.077908, val_acc: 22.455723
+Epoch [887], train_loss: 0.079238, val_loss: 0.079274, val_acc: 22.362087
+Epoch [888], train_loss: 0.079100, val_loss: 0.080484, val_acc: 22.222727
+Epoch [889], train_loss: 0.079112, val_loss: 0.078620, val_acc: 22.285040
+Epoch [890], train_loss: 0.079106, val_loss: 0.081295, val_acc: 22.192448
+Epoch [891], train_loss: 0.079121, val_loss: 0.079393, val_acc: 22.263433
+Epoch [892], train_loss: 0.079003, val_loss: 0.078020, val_acc: 22.489775
+Epoch [893], train_loss: 0.079030, val_loss: 0.080460, val_acc: 22.280684
+Epoch [894], train_loss: 0.078996, val_loss: 0.077450, val_acc: 22.518827
+Epoch [895], train_loss: 0.078973, val_loss: 0.077278, val_acc: 22.540302
+Epoch [896], train_loss: 0.078925, val_loss: 0.078719, val_acc: 22.451042
+Epoch [897], train_loss: 0.078970, val_loss: 0.078045, val_acc: 22.408976
+Epoch [898], train_loss: 0.078841, val_loss: 0.078456, val_acc: 22.486809
+Epoch [899], train_loss: 0.078845, val_loss: 0.077442, val_acc: 22.569315
+Epoch [900], train_loss: 0.078804, val_loss: 0.078731, val_acc: 22.520378
+Epoch [901], train_loss: 0.078671, val_loss: 0.081728, val_acc: 22.191216
+Epoch [902], train_loss: 0.078609, val_loss: 0.077000, val_acc: 22.572493
+Epoch [903], train_loss: 0.078784, val_loss: 0.077367, val_acc: 22.541904
+Epoch [904], train_loss: 0.078650, val_loss: 0.078686, val_acc: 22.417467
+Epoch [905], train_loss: 0.078821, val_loss: 0.077982, val_acc: 22.522579
+Epoch [906], train_loss: 0.078533, val_loss: 0.079140, val_acc: 22.412580
+Epoch [907], train_loss: 0.078461, val_loss: 0.076581, val_acc: 22.674587
+Epoch [908], train_loss: 0.078469, val_loss: 0.081299, val_acc: 22.223415
+Epoch [909], train_loss: 0.078514, val_loss: 0.077828, val_acc: 22.604506
+Epoch [910], train_loss: 0.078513, val_loss: 0.077482, val_acc: 22.520199
+Epoch [911], train_loss: 0.078476, val_loss: 0.077608, val_acc: 22.524570
+Epoch [912], train_loss: 0.078461, val_loss: 0.078657, val_acc: 22.425072
+Epoch [913], train_loss: 0.078388, val_loss: 0.076591, val_acc: 22.712986
+Epoch [914], train_loss: 0.078164, val_loss: 0.076681, val_acc: 22.619888
+Epoch [915], train_loss: 0.078300, val_loss: 0.076461, val_acc: 22.742723
+Epoch [916], train_loss: 0.078149, val_loss: 0.079100, val_acc: 22.443201
+Epoch [917], train_loss: 0.078205, val_loss: 0.079109, val_acc: 22.451839
+Epoch [918], train_loss: 0.078225, val_loss: 0.078282, val_acc: 22.462244
+Epoch [919], train_loss: 0.078197, val_loss: 0.079448, val_acc: 22.350584
+Epoch [920], train_loss: 0.078098, val_loss: 0.077243, val_acc: 22.629509
+Epoch [921], train_loss: 0.078195, val_loss: 0.077729, val_acc: 22.655729
+Epoch [922], train_loss: 0.078186, val_loss: 0.074670, val_acc: 22.894983
+Epoch [923], train_loss: 0.078039, val_loss: 0.076755, val_acc: 22.685179
+Epoch [924], train_loss: 0.077947, val_loss: 0.078508, val_acc: 22.540279
+Epoch [925], train_loss: 0.078020, val_loss: 0.079961, val_acc: 22.305689
+Epoch [926], train_loss: 0.077985, val_loss: 0.075710, val_acc: 22.679688
+Epoch [927], train_loss: 0.077775, val_loss: 0.079272, val_acc: 22.389362
+Epoch [928], train_loss: 0.077815, val_loss: 0.075882, val_acc: 22.739826
+Epoch [929], train_loss: 0.077802, val_loss: 0.076942, val_acc: 22.540545
+Epoch [930], train_loss: 0.077741, val_loss: 0.077008, val_acc: 22.686834
+Epoch [931], train_loss: 0.077820, val_loss: 0.076932, val_acc: 22.570705
+Epoch [932], train_loss: 0.077716, val_loss: 0.077763, val_acc: 22.565802
+Epoch [933], train_loss: 0.077852, val_loss: 0.078761, val_acc: 22.531895
+Epoch [934], train_loss: 0.077654, val_loss: 0.075775, val_acc: 22.795181
+Epoch [935], train_loss: 0.077639, val_loss: 0.077172, val_acc: 22.688290
+Epoch [936], train_loss: 0.077643, val_loss: 0.077225, val_acc: 22.704432
+Epoch [937], train_loss: 0.077623, val_loss: 0.077638, val_acc: 22.655617
+Epoch [938], train_loss: 0.077521, val_loss: 0.077409, val_acc: 22.709793
+Epoch [939], train_loss: 0.077552, val_loss: 0.077635, val_acc: 22.597967
+Epoch [940], train_loss: 0.077462, val_loss: 0.075915, val_acc: 22.712812
+Epoch [941], train_loss: 0.077500, val_loss: 0.079188, val_acc: 22.399292
+Epoch [942], train_loss: 0.077444, val_loss: 0.078458, val_acc: 22.494795
+Epoch [943], train_loss: 0.077384, val_loss: 0.076700, val_acc: 22.644789
+Epoch [944], train_loss: 0.077362, val_loss: 0.078342, val_acc: 22.497507
+Epoch [945], train_loss: 0.077407, val_loss: 0.079591, val_acc: 22.390526
+Epoch [946], train_loss: 0.077180, val_loss: 0.077249, val_acc: 22.604759
+Epoch [947], train_loss: 0.077317, val_loss: 0.078612, val_acc: 22.419691
+Epoch [948], train_loss: 0.077193, val_loss: 0.078157, val_acc: 22.579712
+Epoch [949], train_loss: 0.077275, val_loss: 0.076650, val_acc: 22.706032
+Epoch [950], train_loss: 0.077276, val_loss: 0.076443, val_acc: 22.731899
+Epoch [951], train_loss: 0.077175, val_loss: 0.076777, val_acc: 22.651251
+Epoch [952], train_loss: 0.077096, val_loss: 0.076759, val_acc: 22.720177
+Epoch [953], train_loss: 0.077146, val_loss: 0.076533, val_acc: 22.714039
+Epoch [954], train_loss: 0.077083, val_loss: 0.077289, val_acc: 22.684349
+Epoch [955], train_loss: 0.076976, val_loss: 0.078600, val_acc: 22.612921
+Epoch [956], train_loss: 0.076966, val_loss: 0.077843, val_acc: 22.605375
+Epoch [957], train_loss: 0.076875, val_loss: 0.075234, val_acc: 22.886093
+Epoch [958], train_loss: 0.076868, val_loss: 0.076782, val_acc: 22.682844
+Epoch [959], train_loss: 0.076802, val_loss: 0.077537, val_acc: 22.653984
+Epoch [960], train_loss: 0.076824, val_loss: 0.075469, val_acc: 22.759050
+Epoch [961], train_loss: 0.076842, val_loss: 0.076791, val_acc: 22.737593
+Epoch [962], train_loss: 0.076783, val_loss: 0.076393, val_acc: 22.810308
+Epoch [963], train_loss: 0.076756, val_loss: 0.077097, val_acc: 22.678101
+Epoch [964], train_loss: 0.076758, val_loss: 0.075911, val_acc: 22.828835
+Epoch [965], train_loss: 0.076716, val_loss: 0.075597, val_acc: 22.878311
+Epoch [966], train_loss: 0.076731, val_loss: 0.078315, val_acc: 22.624813
+Epoch [967], train_loss: 0.076725, val_loss: 0.075668, val_acc: 22.923777
+Epoch [968], train_loss: 0.076705, val_loss: 0.077966, val_acc: 22.640991
+Epoch [969], train_loss: 0.076643, val_loss: 0.075955, val_acc: 22.896738
+Epoch [970], train_loss: 0.076533, val_loss: 0.075184, val_acc: 22.900373
+Epoch [971], train_loss: 0.076482, val_loss: 0.074431, val_acc: 22.970127
+Epoch [972], train_loss: 0.076386, val_loss: 0.077311, val_acc: 22.727228
+Epoch [973], train_loss: 0.076397, val_loss: 0.077961, val_acc: 22.742868
+Epoch [974], train_loss: 0.076502, val_loss: 0.075773, val_acc: 22.743832
+Epoch [975], train_loss: 0.076473, val_loss: 0.077876, val_acc: 22.599415
+Epoch [976], train_loss: 0.076245, val_loss: 0.074307, val_acc: 23.015121
+Epoch [977], train_loss: 0.076284, val_loss: 0.073168, val_acc: 23.018652
+Epoch [978], train_loss: 0.076235, val_loss: 0.074213, val_acc: 22.970844
+Epoch [979], train_loss: 0.076152, val_loss: 0.077108, val_acc: 22.683550
+Epoch [980], train_loss: 0.076182, val_loss: 0.076428, val_acc: 22.731085
+Epoch [981], train_loss: 0.076234, val_loss: 0.075828, val_acc: 22.888508
+Epoch [982], train_loss: 0.076083, val_loss: 0.077906, val_acc: 22.627167
+Epoch [983], train_loss: 0.076099, val_loss: 0.077607, val_acc: 22.641041
+Epoch [984], train_loss: 0.076277, val_loss: 0.078938, val_acc: 22.500114
+Epoch [985], train_loss: 0.076101, val_loss: 0.076954, val_acc: 22.880836
+Epoch [986], train_loss: 0.076067, val_loss: 0.074812, val_acc: 22.952839
+Epoch [987], train_loss: 0.075942, val_loss: 0.078147, val_acc: 22.733261
+Epoch [988], train_loss: 0.076076, val_loss: 0.075591, val_acc: 22.846451
+Epoch [989], train_loss: 0.076035, val_loss: 0.076118, val_acc: 22.878208
+Epoch [990], train_loss: 0.076001, val_loss: 0.073679, val_acc: 23.062403
+Epoch [991], train_loss: 0.076040, val_loss: 0.074624, val_acc: 22.976173
+Epoch [992], train_loss: 0.075915, val_loss: 0.075120, val_acc: 22.965166
+Epoch [993], train_loss: 0.075918, val_loss: 0.074991, val_acc: 22.925604
+Epoch [994], train_loss: 0.075855, val_loss: 0.073124, val_acc: 23.210239
+Epoch [995], train_loss: 0.075882, val_loss: 0.074131, val_acc: 22.986351
+Epoch [996], train_loss: 0.075701, val_loss: 0.076325, val_acc: 22.835835
+Epoch [997], train_loss: 0.075737, val_loss: 0.076386, val_acc: 22.930254
+Epoch [998], train_loss: 0.075710, val_loss: 0.077822, val_acc: 22.755323
+Epoch [999], train_loss: 0.075787, val_loss: 0.072759, val_acc: 23.129541
+python3 ./UNet_V9_1.py  22986.71s user 21373.45s system 99% cpu 12:19:42.33 total
diff --git a/UNet/Train_model.sh b/UNet/Train_model.sh
index c9969a09599aa13ae9ab0c32bfa74966a80beb7b..c609f15161b36418fd589aa0d4b368f191fb9641 100644
--- a/UNet/Train_model.sh
+++ b/UNet/Train_model.sh
@@ -6,19 +6,18 @@
 #SBATCH --partition=c18g
 
 #SBATCH -J training_model
-#SBATCH -o Sim_logs/UNet_32_V9_V10_J.log
+#SBATCH -o Sim_logs/UNet_64_V14_%J.log
  
 #SBATCH --gres=gpu:1
 #SBATCH --time=90:00:00
 ### Request memory you need for your job in MB
-#SBATCH --mem-per-cpu=10000
+#SBATCH --mem-per-cpu=20000
 #SBATCH --mem-per-gpu=16000
 module load cuda
 module load python/3.7.11
 pip3 install --user -Iv -q torch==1.10.1
-time python3 ./UNet_V9_1.py
-time python3 ./UNet_V9_2.py
-time python3 ./UNet_V9_3.py
-time python3 ./UNet_V10.py
+#time python3 ./UNet_V12.py
+#time python3 ./UNet_V13.py
+time python3 ./UNet_V14.py
 #print GPU Information
 #$CUDA_ROOT/extras/demo_suite/deviceQuery -noprompt
diff --git a/UNet/Train_model15.sh b/UNet/Train_model15.sh
new file mode 100644
index 0000000000000000000000000000000000000000..cf9477de13891c1ff3c2c25f29cefb774478a102
--- /dev/null
+++ b/UNet/Train_model15.sh
@@ -0,0 +1,23 @@
+#!/usr/local_rwth/bin/zsh
+### Project account
+#SBATCH --account=rwth0744
+
+### Cluster Partition
+#SBATCH --partition=c18g
+
+#SBATCH -J training_model
+#SBATCH -o Sim_logs/UNet_64_V15_%J.log
+ 
+#SBATCH --gres=gpu:1
+#SBATCH --time=90:00:00
+### Request memory you need for your job in MB
+#SBATCH --mem-per-cpu=15000
+#SBATCH --mem-per-gpu=16000
+module load cuda
+module load python/3.7.11
+pip3 install --user -Iv -q torch==1.10.1
+#time python3 ./UNet_V12.py
+#time python3 ./UNet_V13.py
+time python3 ./UNet_V15.py
+#print GPU Information
+#$CUDA_ROOT/extras/demo_suite/deviceQuery -noprompt
diff --git a/UNet/Train_model2.sh b/UNet/Train_model2.sh
new file mode 100644
index 0000000000000000000000000000000000000000..8588d548a921811924c8208a82e9868ed73f146f
--- /dev/null
+++ b/UNet/Train_model2.sh
@@ -0,0 +1,23 @@
+#!/usr/local_rwth/bin/zsh
+### Project account
+#SBATCH --account=rwth0744
+
+### Cluster Partition
+#SBATCH --partition=c18g
+
+#SBATCH -J training_model
+#SBATCH -o Sim_logs/UNet_64_V16_%J.log
+ 
+#SBATCH --gres=gpu:1
+#SBATCH --time=90:00:00
+### Request memory you need for your job in MB
+#SBATCH --mem-per-cpu=20000
+#SBATCH --mem-per-gpu=16000
+module load cuda
+module load python/3.7.11
+pip3 install --user -Iv -q torch==1.10.1
+time python3 ./UNet_V16.py
+#time python3 ./UNet_V13.py
+#time python3 ./UNet_V14.py
+#print GPU Information
+#$CUDA_ROOT/extras/demo_suite/deviceQuery -noprompt
diff --git a/UNet/UNet_V10.py b/UNet/UNet_V10.py
index ed8320cc84c52d9d3eba9e067282eb266e71a0ad..05519330a19cd212e2d7f9fb93290f19879af87d 100644
--- a/UNet/UNet_V10.py
+++ b/UNet/UNet_V10.py
@@ -224,11 +224,11 @@ if __name__ == '__main__':
     path_to_rep = '/home/yk138599/Hiwi/damask3'
     use_seeds = True
     seed = 2193910023
-    num_epochs = 500
+    num_epochs = 230
     b_size = 32
     opt_func = torch.optim.Adam
-    lr = 0.00001
-    kernel = 5
+    lr = 0.00003
+    kernel = 7
     print(f'number auf epochs: {num_epochs}')
     print(f'batchsize: {b_size}')
     print(f'learning rate: {lr}')
diff --git a/UNet/UNet_V12.py b/UNet/UNet_V12.py
index 122915551774b1c8662f3f27e66a485ec1024e96..f4d06e02744e7606f0ab77be748c80a780c612ba 100644
--- a/UNet/UNet_V12.py
+++ b/UNet/UNet_V12.py
@@ -134,7 +134,7 @@ def accuracy(outputs, labels,normalization, threshold = 0.05):
     return percentage
     
 class UNet(UNetBase):
-    def __init__(self,kernel_size = 9, enc_chs=((6,16,32), (32,32,64), (64,64,128)), dec_chs_up=(128, 128, 64), dec_chs_conv=((192,128, 128),(160,64,64),(70,32,32)),normalization=np.array([0,1])):
+    def __init__(self,kernel_size = 9, enc_chs=((2,16,32), (32,32,64), (64,64,128)), dec_chs_up=(128, 128, 64), dec_chs_conv=((192,128, 128),(160,64,64),(66,32,32)),normalization=np.array([0,1])):
         super().__init__()
         self.encoder     = Encoder(kernel_size = kernel_size, chs = enc_chs)
         self.decoder     = Decoder(kernel_size = kernel_size, chs_upsampling = dec_chs_up, chs_conv = dec_chs_conv)
@@ -226,7 +226,7 @@ if __name__ == '__main__':
     use_seeds = False
     seed = 373686838
     num_epochs = 500
-    b_size = 8
+    b_size = 32
     opt_func = torch.optim.Adam
     lr = 0.00003
     kernel = 9
@@ -241,8 +241,8 @@ if __name__ == '__main__':
     random.seed(seed)
     np.random.seed(seed)
     device = get_default_device()
-    normalization = np.load(f'{path_to_rep}/UNet/Trainingsdata/Norm_min_max_64_angles.npy', allow_pickle = True)
-    train_dl, valid_dl = Create_Dataloader(f'{path_to_rep}/UNet/Trainingsdata/TD_norm_64_angles.pt', batch_size= b_size )
+    normalization = np.load(f'{path_to_rep}/UNet/Trainingsdata/Norm_min_max_64_phase.npy', allow_pickle = True)
+    train_dl, valid_dl = Create_Dataloader(f'{path_to_rep}/UNet/Trainingsdata/TD_norm_64_phase.pt', batch_size= b_size )
     train_dl = DeviceDataLoader(train_dl, device)
     valid_dl = DeviceDataLoader(valid_dl, device)
 
diff --git a/UNet/UNet_V13.py b/UNet/UNet_V13.py
index 9e961c38534944e82f011f6fe4003d7e10d67f2b..c4960ceb6d6ca8fb5ad5a6e6d283ca3eeeed72af 100644
--- a/UNet/UNet_V13.py
+++ b/UNet/UNet_V13.py
@@ -134,7 +134,7 @@ def accuracy(outputs, labels,normalization, threshold = 0.05):
     return percentage
     
 class UNet(UNetBase):
-    def __init__(self,kernel_size = 5, enc_chs=((6,6,16), (16,16,32), (32,32,64), (64,128,128)), dec_chs_up=(192, 256, 128, 64), dec_chs_conv=((192,64,64),(96,32,32),(48,16,16),(22,16,16)),normalization=np.array([0,1])):
+    def __init__(self,kernel_size = 5, enc_chs=((6,6,16), (16,16,32), (32,32,64), (64,128,128)), dec_chs_up=(128, 128, 64, 32), dec_chs_conv=((192,128,128),(160,64,64),(80,32,32),(38,16,1)),normalization=np.array([0,1])):
         super().__init__()
         self.encoder     = Encoder(kernel_size = kernel_size, chs = enc_chs)
         self.decoder     = Decoder(kernel_size = kernel_size, chs_upsampling = dec_chs_up, chs_conv = dec_chs_conv)
@@ -172,8 +172,8 @@ def fit(epochs, lr, model, train_loader, val_loader, path, opt_func=torch.optim.
         result['train_loss'] = torch.stack(train_losses).mean().item()
         model.epoch_end(epoch, result)
         history.append(result)
-    torch.save(model.state_dict(),f'{path}/Unet_dict_V13.pth')
-    torch.save(history,f'{path}/history_V13.pt')
+    torch.save(model.state_dict(),f'{path}/Unet_dict_V13_2.pth')
+    torch.save(history,f'{path}/history_V13_2.pt')
     return history
 
 def get_default_device():
@@ -226,7 +226,7 @@ if __name__ == '__main__':
     use_seeds = False
     seed = 373686838
     num_epochs = 500
-    b_size = 8
+    b_size = 16
     opt_func = torch.optim.Adam
     lr = 0.00003
     kernel = 9
diff --git a/UNet/UNet_V14.py b/UNet/UNet_V14.py
index e32d3668177d6adaa1dbe0ed6e456d276aba02d0..708b27da78319d4d85037bab8c6c93ccca3caccf 100644
--- a/UNet/UNet_V14.py
+++ b/UNet/UNet_V14.py
@@ -112,7 +112,7 @@ class UNetBase(nn.Module):
         input, labels = batch 
         out = self(input)                    # Generate predictions
         loss = F.l1_loss(out, labels)   # Calculate loss
-        acc = accuracy(out.detach(), labels.detach())         # Calculate accuracy
+        acc = accuracy(out.detach(), labels.detach(),self.normalization)         # Calculate accuracy
         return {'val_loss': loss.detach(), 'val_acc': acc}
         
     def validation_epoch_end(self, outputs):
@@ -225,10 +225,10 @@ if __name__ == '__main__':
     use_seeds = False
     seed = 373686838
     num_epochs = 500
-    b_size = 8
+    b_size = 32
     opt_func = torch.optim.Adam
     lr = 0.00003
-    kernel = 7
+    kernel = 9
     print(f'number auf epochs: {num_epochs}')
     print(f'batchsize: {b_size}')
     print(f'learning rate: {lr}')
diff --git a/UNet/UNet_V9_1_nopadding.py b/UNet/UNet_V15.py
similarity index 94%
rename from UNet/UNet_V9_1_nopadding.py
rename to UNet/UNet_V15.py
index 592aa4048169bb301ba798f3b35d1899443b0aae..1395565b527211a328fe08a8802fd8122243afcf 100644
--- a/UNet/UNet_V9_1_nopadding.py
+++ b/UNet/UNet_V15.py
@@ -25,12 +25,13 @@ class depthwise_separable_conv(nn.Module):
         self.pointwise_1 = nn.Conv3d(in_c, out_1_c, kernel_size=1, bias=True)
         self.batch_norm_1 = nn.BatchNorm3d(out_1_c)
         self.relu = nn.ReLU()
+        self.droptout = nn.Dropout3d(p=0.25)
         self.depthwise_2 = nn.Conv3d(out_1_c, out_1_c, kernel_size= kernel_size, padding=padding[1], groups=out_1_c, bias=True)
         self.pointwise_2 = nn.Conv3d(out_1_c, out_2_c, kernel_size=1, bias=True)
         self.batch_norm_2 = nn.BatchNorm3d(out_2_c)
     def forward(self, x):
-        x = self.batch_norm_1(self.relu(self.pointwise_1(self.depthwise_1(x))))
-        return self.batch_norm_2(self.relu(self.pointwise_2(self.depthwise_2(x))))
+        x = self.batch_norm_1(self.relu(self.droptout(self.pointwise_1(self.depthwise_1(x)))))
+        return self.batch_norm_2(self.relu(self.droptout(self.pointwise_2(self.depthwise_2(x)))))
 
 class convolution_Layer(nn.Module):
     def __init__(self, in_c, out_1_c, out_2_c, padding, kernel_size):
@@ -54,7 +55,7 @@ class head_layer(nn.Module):
         #return self.sig(self.pointwise(self.depthwise(x))) #convolution
 
 class Encoder(nn.Module):
-    def __init__(self,kernel_size, chs, padding=(("same","same"),("same","same"),("same","same"))):
+    def __init__(self,kernel_size, chs, padding=((0,"same"),("same","same"),("same","same"))):
       super().__init__()
       self.channels = chs
       self.enc_blocks = nn.ModuleList([depthwise_separable_conv(chs[i][0], chs[i][1], chs[i][2], kernel_size=kernel_size, padding=padding[i]) for i in range(len(chs))])
@@ -65,7 +66,7 @@ class Encoder(nn.Module):
     
     def forward(self, x):
       ftrs = []
-      #x = self.periodic_upsample(x)
+      x = self.periodic_upsample(x)
       for i in range(len(self.channels)):
         ftrs.append(x)
         x =self.enc_blocks[i](x)
@@ -175,8 +176,8 @@ def fit(epochs, lr, model, train_loader, val_loader, path, opt_func=torch.optim.
         result['train_loss'] = torch.stack(train_losses).mean().item()
         model.epoch_end(epoch, result)
         history.append(result)
-    torch.save(model.state_dict(),f'{path}/Unet_dict_V9_1.pth')
-    torch.save(history,f'{path}/history_V9_1.pt')
+    torch.save(model.state_dict(),f'{path}/Unet_dict_V15.pth')
+    torch.save(history,f'{path}/history_V15.pt')
     return history
 
 def get_default_device():
@@ -226,18 +227,17 @@ def Create_Dataloader(path, batch_size = 100, percent_val = 0.2):
 if __name__ == '__main__':
     #os.chdir('F:/RWTH/HiWi_IEHK/DAMASK3/UNet/Trainingsdata')
     path_to_rep = '/home/yk138599/Hiwi/damask3'
-    use_seeds = True
+    use_seeds = False
     seed = 373686838
-    num_epochs = 200
+    num_epochs = 1000
     b_size = 32
     opt_func = torch.optim.Adam
-    lr = 0.00001
-    kernel = 5
+    lr = 0.00003
+    kernel = 7
     print(f'number auf epochs: {num_epochs}')
     print(f'batchsize: {b_size}')
     print(f'learning rate: {lr}')
     print(f'kernel size is: {kernel}')
-    print('no reflecting padding')
     if not use_seeds:
       seed = random.randrange(2**32 - 1)
     print(f' seed is: {seed}')
diff --git a/UNet/UNet_V16.py b/UNet/UNet_V16.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f80c4e1beb078620fd98885e843becf365073b5
--- /dev/null
+++ b/UNet/UNet_V16.py
@@ -0,0 +1,255 @@
+
+"""UNet_V6.ipynb
+
+Automatically generated by Colaboratory.
+
+Original file is located at
+    https://colab.research.google.com/drive/1yvtk3lFo_x0ZiqtFdnR8jgcjPKy3nZA4
+"""
+
+import torch
+import torch.nn as nn
+import numpy as np
+import random
+from torch.utils.data.sampler import SubsetRandomSampler
+from torch.utils.data.dataloader import DataLoader
+from torch.utils.data import TensorDataset
+import torch.nn.functional as F
+from torch.utils.data import random_split
+from torch.nn.modules.activation import ReLU
+
+class depthwise_separable_conv(nn.Module):
+    def __init__(self, in_c, out_1_c, out_2_c, padding, kernel_size):
+        super(depthwise_separable_conv, self).__init__()
+        self.depthwise_1 = nn.Conv3d(in_c, in_c, kernel_size= kernel_size, padding=padding[0], groups=in_c, bias=True)
+        self.pointwise_1 = nn.Conv3d(in_c, out_1_c, kernel_size=1, bias=True)
+        self.batch_norm_1 = nn.BatchNorm3d(out_1_c)
+        self.relu = nn.ReLU()
+        self.droptout = nn.Dropout3d(p=0.5)
+
+        self.depthwise_2 = nn.Conv3d(out_1_c, out_1_c, kernel_size= kernel_size, padding=padding[1], groups=out_1_c, bias=True)
+        self.pointwise_2 = nn.Conv3d(out_1_c, out_2_c, kernel_size=1, bias=True)
+        self.batch_norm_2 = nn.BatchNorm3d(out_2_c)
+    def forward(self, x):
+        x = self.batch_norm_1(self.relu(self.droptout(self.pointwise_1(self.depthwise_1(x)))))
+        return self.batch_norm_2(self.relu(self.droptout(self.pointwise_2(self.depthwise_2(x)))))
+
+class convolution_Layer(nn.Module):
+    def __init__(self, in_c, out_1_c, out_2_c, padding, kernel_size):
+        super(convolution_Layer, self).__init__()
+        self.conv_1 = nn.Conv3d(in_c, out_1_c, kernel_size= kernel_size, padding=padding[0], bias=True)
+        self.batch_norm_1 = nn.BatchNorm3d(out_1_c)
+        self.relu = nn.ReLU()
+        self.conv_2 = nn.Conv3d(out_1_c, out_2_c, kernel_size= kernel_size, padding=padding[1], bias=True)
+        self.batch_norm_2 = nn.BatchNorm3d(out_2_c)
+    def forward(self, x):
+        x = self.batch_norm_1(self.relu(self.conv_1(x)))
+        return self.batch_norm_2(self.relu(self.relu(self.conv_2(x))))
+
+class head_layer(nn.Module):
+    def __init__(self, in_c, out_c = 1, padding = "same"):
+        super(head_layer, self).__init__()
+        self.conv =  nn.Conv3d(in_c, out_c, kernel_size=1, bias=True)
+        self.sig = nn.Sigmoid()
+    def forward(self, x):
+        return self.sig(self.conv(x)) #convolution
+        #return self.sig(self.pointwise(self.depthwise(x))) #convolution
+
+class Encoder(nn.Module):
+    def __init__(self,kernel_size, chs, padding=((0,"same"),("same","same"),("same","same"))):
+      super().__init__()
+      self.channels = chs
+      self.enc_blocks = nn.ModuleList([depthwise_separable_conv(chs[i][0], chs[i][1], chs[i][2], kernel_size=kernel_size, padding=padding[i]) for i in range(len(chs))])
+      self.pool       = nn.MaxPool3d(kernel_size=2, stride=2)
+      #self.batch_norm = nn.ModuleList([nn.BatchNorm3d( chs[i][2]) for i in range(len(chs))])
+      self.periodic_upsample = nn.ReflectionPad3d(int((kernel_size-1)/2))
+
+    
+    def forward(self, x):
+      ftrs = []
+      x = self.periodic_upsample(x)
+      for i in range(len(self.channels)):
+        ftrs.append(x)
+        x =self.enc_blocks[i](x)
+        #print(f'size of ftrs: {ftrs[i].size()}')
+        x = self.pool(x)
+        #print(f'size of x after pooling{x.size()}')
+      ftrs.append(x)
+      #print(f'size of ftrs: {ftrs[3].size()}')
+      #print(f'length of ftrs: {len(ftrs)}')
+      return ftrs
+
+class Decoder(nn.Module):
+    def __init__(self,kernel_size, chs_upsampling, chs_conv, padding=(("same","same"),("same","same"),("same","same"))):
+        super().__init__()
+        assert len(chs_conv) == len(chs_upsampling)
+        self.chs         = chs_upsampling
+        self.upconvs    = nn.ModuleList([nn.ConvTranspose3d(chs_upsampling[i], chs_upsampling[i], 2, 2) for i in range(len(chs_upsampling))])
+        self.dec_blocks = nn.ModuleList([depthwise_separable_conv(chs_conv[i][0], chs_conv[i][1], chs_conv[i][2], kernel_size=kernel_size, padding=padding[i]) for i in range(len(chs_conv))])
+        self.head = head_layer(chs_conv[-1][2])
+    def forward(self, x, encoder_features):
+        for i in range(len(self.chs)):
+            x        = self.upconvs[i](x)
+            #print(f'size after upsampling: {x.size()}')
+            enc_ftrs = self.crop(encoder_features[i], x)
+            x        = torch.cat([x, enc_ftrs], dim=1)
+            #print(f'size after cropping&cat: {x.size()}')
+
+            x        = self.dec_blocks[i](x)
+            #print(f'size after convolution: {x.size()}')
+        x = self.head(x)    
+        return x
+    
+    def crop(self, tensor, target_tensor):
+        target_size = target_tensor.size()[2]
+        tensor_size = tensor.size()[2]
+        delta = tensor_size - target_size
+        delta = delta // 2
+        return tensor[:,:,delta:tensor_size-delta,delta:tensor_size-delta,delta:tensor_size-delta]
+
+class UNetBase(nn.Module):
+    def training_step(self, batch):
+        input, labels = batch 
+        out = self(input)                  # Generate predictions
+        loss = F.l1_loss(out, labels) # Calculate loss
+        return loss
+    
+    def validation_step(self, batch):
+        input, labels = batch 
+        out = self(input)                    # Generate predictions
+        loss = F.l1_loss(out, labels)   # Calculate loss
+        acc = accuracy(out.detach(), labels.detach(),normalization=self.normalization)         # Calculate accuracy
+        return {'val_loss': loss.detach(), 'val_acc': acc}
+        
+    def validation_epoch_end(self, outputs):
+        batch_losses = [x['val_loss'] for x in outputs]
+        epoch_loss = torch.stack(batch_losses).mean()   # Combine losses
+        batch_accs = [x['val_acc'] for x in outputs]
+        epoch_acc = torch.stack(batch_accs).mean()      # Combine accuracies
+        return {'val_loss': epoch_loss.item(), 'val_acc': epoch_acc.item()}
+    
+    def epoch_end(self, epoch, result):
+        print("Epoch [{}], train_loss: {:.6f}, val_loss: {:.6f}, val_acc: {:.6f}".format(
+            epoch, result['train_loss'], result['val_loss'], result['val_acc']))
+        
+def accuracy(outputs, labels,normalization, threshold = 0.05):
+    error = (abs((outputs) - (labels)))/(outputs+normalization[0]/normalization[1])
+    right_predic = torch.sum(error < threshold)
+    percentage = ((right_predic/torch.numel(error))*100.)
+    return percentage
+    
+class UNet(UNetBase):
+    def __init__(self,kernel_size = 5, enc_chs=((6,16,32), (32,32,64), (64,64,128)), dec_chs_up=(128, 128, 64), dec_chs_conv=((192,128, 128),(160,64,64),(70,32,32)),normalization=np.array([0,1])):
+        super().__init__()
+        self.encoder     = Encoder(kernel_size = kernel_size, chs = enc_chs)
+        self.decoder     = Decoder(kernel_size = kernel_size, chs_upsampling = dec_chs_up, chs_conv = dec_chs_conv)
+        #self.head        = depthwise_separable_conv(1, 1, padding = "same", kernel_size=1)
+        self.normalization = normalization
+
+
+    def forward(self, x):
+        enc_ftrs = self.encoder(x)
+        out      = self.decoder(enc_ftrs[::-1][0], enc_ftrs[::-1][1:])
+        #out      = self.head(out)
+        return out
+
+@torch.no_grad()
+def evaluate(model, val_loader):
+    model.eval()
+    outputs = [model.validation_step(batch) for batch in val_loader]
+    return model.validation_epoch_end(outputs)
+
+def fit(epochs, lr, model, train_loader, val_loader, path, opt_func=torch.optim.Adam):
+    history = []
+    optimizer = opt_func(model.parameters(), lr, eps=1e-07)
+    for epoch in range(epochs):
+        # Training Phase 
+        model.train()
+        train_losses = []
+        for batch in train_loader:
+            loss = model.training_step(batch)
+            train_losses.append(loss)
+            loss.backward()
+            optimizer.step()
+            optimizer.zero_grad()
+        # Validation phase
+        result = evaluate(model, val_loader)
+        result['train_loss'] = torch.stack(train_losses).mean().item()
+        model.epoch_end(epoch, result)
+        history.append(result)
+    torch.save(model.state_dict(),f'{path}/Unet_dict_V11.pth')
+    torch.save(history,f'{path}/history_V11.pt')
+    return history
+
+def get_default_device():
+    """Pick GPU if available, else CPU"""
+    if torch.cuda.is_available():
+        return torch.device('cuda')
+    else:
+      print('no GPU found')
+      return torch.device('cpu')
+      
+def to_device(data, device):
+    """Move tensor(s) to chosen device"""
+    if isinstance(data, (list,tuple)):
+        return [to_device(x, device) for x in data]
+    return data.to(device, non_blocking=True)
+
+class DeviceDataLoader():
+    """Wrap a dataloader to move data to a device"""
+    def __init__(self, dl, device):
+        self.dl = dl
+        self.device = device
+        
+    def __iter__(self):
+        """Yield a batch of data after moving it to device"""
+        for b in self.dl: 
+            yield to_device(b, self.device)
+
+    def __len__(self):
+        """Number of batches"""
+        return len(self.dl)
+
+def Create_Dataloader(path, batch_size = 100, percent_val = 0.2):
+    dataset = torch.load(path) # create the pytorch dataset 
+    #size_data = 500 #shrink dataset for colab
+    #rest = len(dataset) -size_data
+    #dataset,_ = torch.utils.data.random_split(dataset, [size_data, rest])
+    val_size = int(len(dataset) * percent_val)
+    train_size = len(dataset) - val_size
+
+    train_ds, val_ds = random_split(dataset, [train_size, val_size])
+    # Create DataLoader
+    train_dl = DataLoader(train_ds, batch_size, shuffle=True, num_workers=1, pin_memory=True)
+    valid_dl = DataLoader(val_ds, batch_size, num_workers=1, pin_memory=True)
+    
+    return train_dl, valid_dl
+
+if __name__ == '__main__':
+    #os.chdir('F:/RWTH/HiWi_IEHK/DAMASK3/UNet/Trainingsdata')
+    path_to_rep = '/home/yk138599/Hiwi/damask3'
+    use_seeds = True
+    seed = 373686838
+    num_epochs = 10000
+    b_size = 32
+    opt_func = torch.optim.Adam
+    lr = 0.00003
+    kernel = 7
+    print(f'number auf epochs: {num_epochs}')
+    print(f'batchsize: {b_size}')
+    print(f'learning rate: {lr}')
+    print(f'kernel size is: {kernel}')
+    if not use_seeds:
+      seed = random.randrange(2**32 - 1)
+    print(f' seed is: {seed}')
+    torch.manual_seed(seed)
+    random.seed(seed)
+    np.random.seed(seed)
+    device = get_default_device()
+    normalization = np.load(f'{path_to_rep}/UNet/Trainingsdata/Norm_min_max_32_angles.npy', allow_pickle = True)
+    train_dl, valid_dl = Create_Dataloader(f'{path_to_rep}/UNet/Trainingsdata/TD_norm_32_angles.pt', batch_size= b_size )
+    train_dl = DeviceDataLoader(train_dl, device)
+    valid_dl = DeviceDataLoader(valid_dl, device)
+
+    model = to_device(UNet(kernel_size=kernel,normalization=normalization).double(), device)
+    history = fit(num_epochs, lr, model, train_dl, valid_dl,f'{path_to_rep}/UNet/output', opt_func)
diff --git a/UNet/UNet_V9_1.py b/UNet/UNet_V9_1.py
index 675b6d135852edcde26d9fac56b93ffe1661720a..ea851deabbf1de277ddfd6837ba07ef8cdfca4f7 100644
--- a/UNet/UNet_V9_1.py
+++ b/UNet/UNet_V9_1.py
@@ -54,18 +54,16 @@ class head_layer(nn.Module):
         #return self.sig(self.pointwise(self.depthwise(x))) #convolution
 
 class Encoder(nn.Module):
-    def __init__(self,kernel_size, chs, padding=((0,"same"),("same","same"),("same","same"))):
+    def __init__(self,kernel_size, chs, padding=(("same","same"),("same","same"),("same","same"))):
       super().__init__()
       self.channels = chs
       self.enc_blocks = nn.ModuleList([depthwise_separable_conv(chs[i][0], chs[i][1], chs[i][2], kernel_size=kernel_size, padding=padding[i]) for i in range(len(chs))])
       self.pool       = nn.MaxPool3d(kernel_size=2, stride=2)
       #self.batch_norm = nn.ModuleList([nn.BatchNorm3d( chs[i][2]) for i in range(len(chs))])
-      self.periodic_upsample = nn.ReflectionPad3d(int((kernel_size-1)/2))
 
     
     def forward(self, x):
       ftrs = []
-      x = self.periodic_upsample(x)
       for i in range(len(self.channels)):
         ftrs.append(x)
         x =self.enc_blocks[i](x)
diff --git a/UNet/core.ncg05.hpc.itc.rwth-aachen.de.120012.7 b/UNet/core.ncg05.hpc.itc.rwth-aachen.de.120012.7
new file mode 100644
index 0000000000000000000000000000000000000000..d4b727b97eb6b678a192ba1b831596cc043c613f
Binary files /dev/null and b/UNet/core.ncg05.hpc.itc.rwth-aachen.de.120012.7 differ
diff --git a/UNet/core.ncg21.hpc.itc.rwth-aachen.de.42655.7 b/UNet/core.ncg21.hpc.itc.rwth-aachen.de.42655.7
new file mode 100644
index 0000000000000000000000000000000000000000..6a3aeca25208d739c62a572e46c03fa9fbe171c2
Binary files /dev/null and b/UNet/core.ncg21.hpc.itc.rwth-aachen.de.42655.7 differ
diff --git a/UNet/core.ncg21.hpc.itc.rwth-aachen.de.53659.7 b/UNet/core.ncg21.hpc.itc.rwth-aachen.de.53659.7
new file mode 100644
index 0000000000000000000000000000000000000000..99d9e1592ff250ca4e5217c3d3b6c43fab393839
Binary files /dev/null and b/UNet/core.ncg21.hpc.itc.rwth-aachen.de.53659.7 differ
diff --git a/UNet/core.nrg05.hpc.itc.rwth-aachen.de.75892.6 b/UNet/core.nrg05.hpc.itc.rwth-aachen.de.75892.6
new file mode 100644
index 0000000000000000000000000000000000000000..023619fd8e7c563f5dcd305a761bb092dae3fdf1
Binary files /dev/null and b/UNet/core.nrg05.hpc.itc.rwth-aachen.de.75892.6 differ