14장 모델의 성능 향상시키기¶
데이터의 확인과 검증셋¶
In [1]:
Copied!
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from sklearn.model_selection import train_test_split
import pandas as pd
# 데이터를 입력합니다.
df = pd.read_csv('./data/wine.csv', header=None)
# 데이터를 미리 보겠습니다.
df
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from sklearn.model_selection import train_test_split
import pandas as pd
# 데이터를 입력합니다.
df = pd.read_csv('./data/wine.csv', header=None)
# 데이터를 미리 보겠습니다.
df
Out[1]:
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 7.4 | 0.70 | 0.00 | 1.9 | 0.076 | 11.0 | 34.0 | 0.99780 | 3.51 | 0.56 | 9.4 | 5 | 1 |
| 1 | 7.8 | 0.88 | 0.00 | 2.6 | 0.098 | 25.0 | 67.0 | 0.99680 | 3.20 | 0.68 | 9.8 | 5 | 1 |
| 2 | 7.8 | 0.76 | 0.04 | 2.3 | 0.092 | 15.0 | 54.0 | 0.99700 | 3.26 | 0.65 | 9.8 | 5 | 1 |
| 3 | 11.2 | 0.28 | 0.56 | 1.9 | 0.075 | 17.0 | 60.0 | 0.99800 | 3.16 | 0.58 | 9.8 | 6 | 1 |
| 4 | 7.4 | 0.70 | 0.00 | 1.9 | 0.076 | 11.0 | 34.0 | 0.99780 | 3.51 | 0.56 | 9.4 | 5 | 1 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 6492 | 6.2 | 0.21 | 0.29 | 1.6 | 0.039 | 24.0 | 92.0 | 0.99114 | 3.27 | 0.50 | 11.2 | 6 | 0 |
| 6493 | 6.6 | 0.32 | 0.36 | 8.0 | 0.047 | 57.0 | 168.0 | 0.99490 | 3.15 | 0.46 | 9.6 | 5 | 0 |
| 6494 | 6.5 | 0.24 | 0.19 | 1.2 | 0.041 | 30.0 | 111.0 | 0.99254 | 2.99 | 0.46 | 9.4 | 6 | 0 |
| 6495 | 5.5 | 0.29 | 0.30 | 1.1 | 0.022 | 20.0 | 110.0 | 0.98869 | 3.34 | 0.38 | 12.8 | 7 | 0 |
| 6496 | 6.0 | 0.21 | 0.38 | 0.8 | 0.020 | 22.0 | 98.0 | 0.98941 | 3.26 | 0.32 | 11.8 | 6 | 0 |
6497 rows × 13 columns
In [2]:
Copied!
# 와인의 속성을 X로 와인의 분류를 y로 저장합니다.
X = df.iloc[:,0:12]
y = df.iloc[:,12]
# 와인의 속성을 X로 와인의 분류를 y로 저장합니다.
X = df.iloc[:,0:12]
y = df.iloc[:,12]
In [3]:
Copied!
#학습셋과 테스트셋으로 나눕니다.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)
# 모델 구조를 설정합니다.
model = Sequential()
model.add(Dense(30, input_dim=12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.summary()
#모델을 컴파일합니다.
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# 모델을 실행합니다.
history=model.fit(X_train, y_train, epochs=50, batch_size=500, validation_split=0.25) # 0.8 x 0.25 = 0.2
#학습셋과 테스트셋으로 나눕니다.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)
# 모델 구조를 설정합니다.
model = Sequential()
model.add(Dense(30, input_dim=12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.summary()
#모델을 컴파일합니다.
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# 모델을 실행합니다.
history=model.fit(X_train, y_train, epochs=50, batch_size=500, validation_split=0.25) # 0.8 x 0.25 = 0.2
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense (Dense) (None, 30) 390 _________________________________________________________________ dense_1 (Dense) (None, 12) 372 _________________________________________________________________ dense_2 (Dense) (None, 8) 104 _________________________________________________________________ dense_3 (Dense) (None, 1) 9 ================================================================= Total params: 875 Trainable params: 875 Non-trainable params: 0 _________________________________________________________________ Epoch 1/50 8/8 [==============================] - 1s 23ms/step - loss: 2.9423 - accuracy: 0.7519 - val_loss: 2.2360 - val_accuracy: 0.7562 Epoch 2/50 8/8 [==============================] - ETA: 0s - loss: 2.0859 - accuracy: 0.76 - 0s 6ms/step - loss: 1.7741 - accuracy: 0.7729 - val_loss: 1.3866 - val_accuracy: 0.7892 Epoch 3/50 8/8 [==============================] - 0s 6ms/step - loss: 1.0029 - accuracy: 0.8181 - val_loss: 0.6412 - val_accuracy: 0.8469 Epoch 4/50 8/8 [==============================] - 0s 5ms/step - loss: 0.4439 - accuracy: 0.8571 - val_loss: 0.4369 - val_accuracy: 0.8062 Epoch 5/50 8/8 [==============================] - 0s 6ms/step - loss: 0.4373 - accuracy: 0.8011 - val_loss: 0.3428 - val_accuracy: 0.8585 Epoch 6/50 8/8 [==============================] - 0s 7ms/step - loss: 0.3310 - accuracy: 0.8758 - val_loss: 0.3595 - val_accuracy: 0.8808 Epoch 7/50 8/8 [==============================] - 0s 5ms/step - loss: 0.3168 - accuracy: 0.8861 - val_loss: 0.3108 - val_accuracy: 0.8869 Epoch 8/50 8/8 [==============================] - 0s 6ms/step - loss: 0.2782 - accuracy: 0.8891 - val_loss: 0.2877 - val_accuracy: 0.8769 Epoch 9/50 8/8 [==============================] - 0s 6ms/step - loss: 0.2509 - accuracy: 0.9012 - val_loss: 0.2469 - val_accuracy: 0.9108 Epoch 10/50 8/8 [==============================] - 0s 6ms/step - loss: 0.2162 - accuracy: 0.9264 - val_loss: 0.2369 - val_accuracy: 0.9200 Epoch 11/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1951 - accuracy: 0.9361 - val_loss: 0.2283 - val_accuracy: 0.9177 Epoch 12/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1892 - accuracy: 0.9369 - val_loss: 0.2269 - val_accuracy: 0.9246 Epoch 13/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1843 - accuracy: 0.9361 - val_loss: 0.2160 - val_accuracy: 0.9269 Epoch 14/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1781 - accuracy: 0.9374 - val_loss: 0.2083 - val_accuracy: 0.9277 Epoch 15/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1740 - accuracy: 0.9366 - val_loss: 0.2016 - val_accuracy: 0.9277 Epoch 16/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1708 - accuracy: 0.9371 - val_loss: 0.1986 - val_accuracy: 0.9285 Epoch 17/50 8/8 [==============================] - 0s 7ms/step - loss: 0.1682 - accuracy: 0.9356 - val_loss: 0.1956 - val_accuracy: 0.9300 Epoch 18/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1663 - accuracy: 0.9371 - val_loss: 0.1965 - val_accuracy: 0.9308 Epoch 19/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1635 - accuracy: 0.9379 - val_loss: 0.1916 - val_accuracy: 0.9285 Epoch 20/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1613 - accuracy: 0.9397 - val_loss: 0.1906 - val_accuracy: 0.9315 Epoch 21/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1600 - accuracy: 0.9397 - val_loss: 0.1877 - val_accuracy: 0.9308 Epoch 22/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1587 - accuracy: 0.9389 - val_loss: 0.1874 - val_accuracy: 0.9331 Epoch 23/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1562 - accuracy: 0.9410 - val_loss: 0.1875 - val_accuracy: 0.9323 Epoch 24/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1537 - accuracy: 0.9418 - val_loss: 0.1841 - val_accuracy: 0.9323 Epoch 25/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1539 - accuracy: 0.9410 - val_loss: 0.1800 - val_accuracy: 0.9338 Epoch 26/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1503 - accuracy: 0.9433 - val_loss: 0.1836 - val_accuracy: 0.9331 Epoch 27/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1503 - accuracy: 0.9428 - val_loss: 0.1795 - val_accuracy: 0.9354 Epoch 28/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1472 - accuracy: 0.9441 - val_loss: 0.1775 - val_accuracy: 0.9392 Epoch 29/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1472 - accuracy: 0.9443 - val_loss: 0.1784 - val_accuracy: 0.9338 Epoch 30/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1436 - accuracy: 0.9469 - val_loss: 0.1735 - val_accuracy: 0.9377 Epoch 31/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1426 - accuracy: 0.9482 - val_loss: 0.1751 - val_accuracy: 0.9438 Epoch 32/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1412 - accuracy: 0.9484 - val_loss: 0.1798 - val_accuracy: 0.9346 Epoch 33/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1410 - accuracy: 0.9469 - val_loss: 0.1727 - val_accuracy: 0.9377 Epoch 34/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1375 - accuracy: 0.9482 - val_loss: 0.1673 - val_accuracy: 0.9415 Epoch 35/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1363 - accuracy: 0.9494 - val_loss: 0.1660 - val_accuracy: 0.9415 Epoch 36/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1340 - accuracy: 0.9505 - val_loss: 0.1656 - val_accuracy: 0.9438 Epoch 37/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1349 - accuracy: 0.9502 - val_loss: 0.1699 - val_accuracy: 0.9392 Epoch 38/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1320 - accuracy: 0.9518 - val_loss: 0.1665 - val_accuracy: 0.9408 Epoch 39/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1307 - accuracy: 0.9525 - val_loss: 0.1619 - val_accuracy: 0.9462 Epoch 40/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1286 - accuracy: 0.9515 - val_loss: 0.1610 - val_accuracy: 0.9469 Epoch 41/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1280 - accuracy: 0.9515 - val_loss: 0.1602 - val_accuracy: 0.9477 Epoch 42/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1256 - accuracy: 0.9523 - val_loss: 0.1616 - val_accuracy: 0.9431 Epoch 43/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1258 - accuracy: 0.9518 - val_loss: 0.1619 - val_accuracy: 0.9431 Epoch 44/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1246 - accuracy: 0.9541 - val_loss: 0.1629 - val_accuracy: 0.9423 Epoch 45/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1224 - accuracy: 0.9530 - val_loss: 0.1570 - val_accuracy: 0.9477 Epoch 46/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1240 - accuracy: 0.9525 - val_loss: 0.1558 - val_accuracy: 0.9485 Epoch 47/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1204 - accuracy: 0.9543 - val_loss: 0.1557 - val_accuracy: 0.9485 Epoch 48/50 8/8 [==============================] - 0s 5ms/step - loss: 0.1181 - accuracy: 0.9556 - val_loss: 0.1552 - val_accuracy: 0.9462 Epoch 49/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1193 - accuracy: 0.9564 - val_loss: 0.1593 - val_accuracy: 0.9446 Epoch 50/50 8/8 [==============================] - 0s 6ms/step - loss: 0.1161 - accuracy: 0.9574 - val_loss: 0.1523 - val_accuracy: 0.9500
In [4]:
Copied!
# 테스트 결과를 출력합니다.
score=model.evaluate(X_test, y_test)
print('Test accuracy:', score[1])
# 테스트 결과를 출력합니다.
score=model.evaluate(X_test, y_test)
print('Test accuracy:', score[1])
41/41 [==============================] - 0s 1ms/step - loss: 0.1438 - accuracy: 0.9415 Test accuracy: 0.9415384531021118
2. 모델 업데이트하기¶
기본 코드 불러오기¶
In [5]:
Copied!
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.callbacks import ModelCheckpoint
from sklearn.model_selection import train_test_split
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# 데이터를 입력합니다.
df = pd.read_csv('./data/wine.csv', header=None)
# 와인의 속성을 X로 와인의 분류를 y로 저장합니다.
X = df.iloc[:,0:12]
y = df.iloc[:,12]
#학습셋과 테스트셋으로 나눕니다.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)
# 모델 구조를 설정합니다.
model = Sequential()
model.add(Dense(30, input_dim=12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.summary()
#모델을 컴파일합니다.
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.callbacks import ModelCheckpoint
from sklearn.model_selection import train_test_split
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# 데이터를 입력합니다.
df = pd.read_csv('./data/wine.csv', header=None)
# 와인의 속성을 X로 와인의 분류를 y로 저장합니다.
X = df.iloc[:,0:12]
y = df.iloc[:,12]
#학습셋과 테스트셋으로 나눕니다.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)
# 모델 구조를 설정합니다.
model = Sequential()
model.add(Dense(30, input_dim=12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.summary()
#모델을 컴파일합니다.
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_4 (Dense) (None, 30) 390 _________________________________________________________________ dense_5 (Dense) (None, 12) 372 _________________________________________________________________ dense_6 (Dense) (None, 8) 104 _________________________________________________________________ dense_7 (Dense) (None, 1) 9 ================================================================= Total params: 875 Trainable params: 875 Non-trainable params: 0 _________________________________________________________________
모델의 저장 설정 및 실행¶
In [6]:
Copied!
# 모델 저장의 조건을 설정합니다.
modelpath="./data/model/all/{epoch:02d}-{val_accuracy:.4f}.keras"
checkpointer = ModelCheckpoint(filepath=modelpath, verbose=1)
# 모델을 실행합니다.
history=model.fit(X_train, y_train, epochs=50, batch_size=500, validation_split=0.25, verbose=0, callbacks=[checkpointer])
# 모델 저장의 조건을 설정합니다.
modelpath="./data/model/all/{epoch:02d}-{val_accuracy:.4f}.keras"
checkpointer = ModelCheckpoint(filepath=modelpath, verbose=1)
# 모델을 실행합니다.
history=model.fit(X_train, y_train, epochs=50, batch_size=500, validation_split=0.25, verbose=0, callbacks=[checkpointer])
Epoch 00001: saving model to ../data/model/all\01-0.7646.hdf5 Epoch 00002: saving model to ../data/model/all\02-0.7646.hdf5 Epoch 00003: saving model to ../data/model/all\03-0.7646.hdf5 Epoch 00004: saving model to ../data/model/all\04-0.7646.hdf5 Epoch 00005: saving model to ../data/model/all\05-0.7646.hdf5 Epoch 00006: saving model to ../data/model/all\06-0.7646.hdf5 Epoch 00007: saving model to ../data/model/all\07-0.8669.hdf5 Epoch 00008: saving model to ../data/model/all\08-0.8900.hdf5 Epoch 00009: saving model to ../data/model/all\09-0.9031.hdf5 Epoch 00010: saving model to ../data/model/all\10-0.9031.hdf5 Epoch 00011: saving model to ../data/model/all\11-0.9085.hdf5 Epoch 00012: saving model to ../data/model/all\12-0.9131.hdf5 Epoch 00013: saving model to ../data/model/all\13-0.9177.hdf5 Epoch 00014: saving model to ../data/model/all\14-0.9215.hdf5 Epoch 00015: saving model to ../data/model/all\15-0.9277.hdf5 Epoch 00016: saving model to ../data/model/all\16-0.9285.hdf5 Epoch 00017: saving model to ../data/model/all\17-0.9269.hdf5 Epoch 00018: saving model to ../data/model/all\18-0.9177.hdf5 Epoch 00019: saving model to ../data/model/all\19-0.9246.hdf5 Epoch 00020: saving model to ../data/model/all\20-0.9331.hdf5 Epoch 00021: saving model to ../data/model/all\21-0.9331.hdf5 Epoch 00022: saving model to ../data/model/all\22-0.9331.hdf5 Epoch 00023: saving model to ../data/model/all\23-0.9338.hdf5 Epoch 00024: saving model to ../data/model/all\24-0.9338.hdf5 Epoch 00025: saving model to ../data/model/all\25-0.9331.hdf5 Epoch 00026: saving model to ../data/model/all\26-0.9323.hdf5 Epoch 00027: saving model to ../data/model/all\27-0.9338.hdf5 Epoch 00028: saving model to ../data/model/all\28-0.9338.hdf5 Epoch 00029: saving model to ../data/model/all\29-0.9338.hdf5 Epoch 00030: saving model to ../data/model/all\30-0.9346.hdf5 Epoch 00031: saving model to ../data/model/all\31-0.9354.hdf5 Epoch 00032: saving model to ../data/model/all\32-0.9377.hdf5 Epoch 00033: saving model to ../data/model/all\33-0.9377.hdf5 Epoch 00034: saving model to ../data/model/all\34-0.9385.hdf5 Epoch 00035: saving model to ../data/model/all\35-0.9385.hdf5 Epoch 00036: saving model to ../data/model/all\36-0.9392.hdf5 Epoch 00037: saving model to ../data/model/all\37-0.9385.hdf5 Epoch 00038: saving model to ../data/model/all\38-0.9392.hdf5 Epoch 00039: saving model to ../data/model/all\39-0.9385.hdf5 Epoch 00040: saving model to ../data/model/all\40-0.9385.hdf5 Epoch 00041: saving model to ../data/model/all\41-0.9392.hdf5 Epoch 00042: saving model to ../data/model/all\42-0.9369.hdf5 Epoch 00043: saving model to ../data/model/all\43-0.9392.hdf5 Epoch 00044: saving model to ../data/model/all\44-0.9392.hdf5 Epoch 00045: saving model to ../data/model/all\45-0.9392.hdf5 Epoch 00046: saving model to ../data/model/all\46-0.9400.hdf5 Epoch 00047: saving model to ../data/model/all\47-0.9408.hdf5 Epoch 00048: saving model to ../data/model/all\48-0.9408.hdf5 Epoch 00049: saving model to ../data/model/all\49-0.9408.hdf5 Epoch 00050: saving model to ../data/model/all\50-0.9408.hdf5
In [7]:
Copied!
# 테스트 결과를 출력합니다.
score=model.evaluate(X_test, y_test)
print('Test accuracy:', score[1])
# 테스트 결과를 출력합니다.
score=model.evaluate(X_test, y_test)
print('Test accuracy:', score[1])
41/41 [==============================] - 0s 2ms/step - loss: 0.1686 - accuracy: 0.9392 Test accuracy: 0.939230740070343
3. 그래프로 과적합 확인하기¶
In [8]:
Copied!
# 그래프 확인을 위한 긴 학습 (컴퓨터 환경에 따라 시간이 다소 걸릴수 있습니다)
history=model.fit(X_train, y_train, epochs=2000, batch_size=500, verbose=0, validation_split=0.25)
# 그래프 확인을 위한 긴 학습 (컴퓨터 환경에 따라 시간이 다소 걸릴수 있습니다)
history=model.fit(X_train, y_train, epochs=2000, batch_size=500, verbose=0, validation_split=0.25)
In [9]:
Copied!
# history에 저장된 학습 결과를 확인해 보겠습니다.
hist_df=pd.DataFrame(history.history)
hist_df
# history에 저장된 학습 결과를 확인해 보겠습니다.
hist_df=pd.DataFrame(history.history)
hist_df
Out[9]:
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 0 | 0.147854 | 0.947395 | 0.163222 | 0.938462 |
| 1 | 0.146364 | 0.948165 | 0.161854 | 0.940769 |
| 2 | 0.145011 | 0.947909 | 0.160797 | 0.940000 |
| 3 | 0.144104 | 0.947909 | 0.158959 | 0.946154 |
| 4 | 0.142609 | 0.948935 | 0.159250 | 0.940769 |
| ... | ... | ... | ... | ... |
| 1995 | 0.030691 | 0.992045 | 0.071841 | 0.983077 |
| 1996 | 0.030978 | 0.992558 | 0.070400 | 0.985385 |
| 1997 | 0.030977 | 0.992045 | 0.074907 | 0.985385 |
| 1998 | 0.032843 | 0.990762 | 0.078405 | 0.984615 |
| 1999 | 0.033346 | 0.990762 | 0.068213 | 0.984615 |
2000 rows × 4 columns
In [10]:
Copied!
# y_vloss에 테스트셋(여기서는 검증셋)의 오차를 저장합니다.
y_vloss=hist_df['val_loss']
# y_loss에 학습셋의 오차를 저장합니다.
y_loss=hist_df['loss']
# x 값을 지정하고 테스트셋(검증셋)의 오차를 빨간색으로, 학습셋의 오차를 파란색으로 표시합니다.
x_len = np.arange(len(y_loss))
plt.plot(x_len, y_vloss, "o", c="red", markersize=2, label='Testset_loss')
plt.plot(x_len, y_loss, "o", c="blue", markersize=2, label='Trainset_loss')
plt.legend(loc='upper right')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.show()
# y_vloss에 테스트셋(여기서는 검증셋)의 오차를 저장합니다.
y_vloss=hist_df['val_loss']
# y_loss에 학습셋의 오차를 저장합니다.
y_loss=hist_df['loss']
# x 값을 지정하고 테스트셋(검증셋)의 오차를 빨간색으로, 학습셋의 오차를 파란색으로 표시합니다.
x_len = np.arange(len(y_loss))
plt.plot(x_len, y_vloss, "o", c="red", markersize=2, label='Testset_loss')
plt.plot(x_len, y_loss, "o", c="blue", markersize=2, label='Trainset_loss')
plt.legend(loc='upper right')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.show()
4. 학습의 자동 중단¶
기본 코드 불러오기¶
In [11]:
Copied!
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from sklearn.model_selection import train_test_split
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping
import os
import pandas as pd
# 데이터를 입력합니다.
df = pd.read_csv('./data/wine.csv', header=None)
# 와인의 속성을 X로 와인의 분류를 y로 저장합니다.
X = df.iloc[:,0:12]
y = df.iloc[:,12]
#학습셋과 테스트셋으로 나눕니다.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)
# 모델 구조를 설정합니다.
model = Sequential()
model.add(Dense(30, input_dim=12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.summary()
#모델을 컴파일합니다.
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from sklearn.model_selection import train_test_split
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping
import os
import pandas as pd
# 데이터를 입력합니다.
df = pd.read_csv('./data/wine.csv', header=None)
# 와인의 속성을 X로 와인의 분류를 y로 저장합니다.
X = df.iloc[:,0:12]
y = df.iloc[:,12]
#학습셋과 테스트셋으로 나눕니다.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)
# 모델 구조를 설정합니다.
model = Sequential()
model.add(Dense(30, input_dim=12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.summary()
#모델을 컴파일합니다.
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
Model: "sequential_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_8 (Dense) (None, 30) 390 _________________________________________________________________ dense_9 (Dense) (None, 12) 372 _________________________________________________________________ dense_10 (Dense) (None, 8) 104 _________________________________________________________________ dense_11 (Dense) (None, 1) 9 ================================================================= Total params: 875 Trainable params: 875 Non-trainable params: 0 _________________________________________________________________
학습의 자동 중단 및 최적화 모델 저장¶
In [12]:
Copied!
# 학습이 언제 자동 중단 될지를 설정합니다.
early_stopping_callback = EarlyStopping(monitor='val_loss', patience=20)
#최적화 모델이 저장될 폴더와 모델의 이름을 정합니다.
modelpath="./data/model/Ch14-4-bestmodel.keras"
# 최적화 모델을 업데이트하고 저장합니다.
checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=0, save_best_only=True)
#모델을 실행합니다.
history=model.fit(X_train, y_train, epochs=2000, batch_size=500, validation_split=0.25, verbose=1, callbacks=[early_stopping_callback,checkpointer], verbose=0) #verbose=1 진행 화면 출력, verbose=0 진행 화면 출력 안함.
# 학습이 언제 자동 중단 될지를 설정합니다.
early_stopping_callback = EarlyStopping(monitor='val_loss', patience=20)
#최적화 모델이 저장될 폴더와 모델의 이름을 정합니다.
modelpath="./data/model/Ch14-4-bestmodel.keras"
# 최적화 모델을 업데이트하고 저장합니다.
checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=0, save_best_only=True)
#모델을 실행합니다.
history=model.fit(X_train, y_train, epochs=2000, batch_size=500, validation_split=0.25, verbose=1, callbacks=[early_stopping_callback,checkpointer], verbose=0) #verbose=1 진행 화면 출력, verbose=0 진행 화면 출력 안함.
Epoch 1/2000 8/8 [==============================] - 0s 18ms/step - loss: 21.4771 - accuracy: 0.2494 - val_loss: 14.8183 - val_accuracy: 0.2462 Epoch 2/2000 8/8 [==============================] - 0s 5ms/step - loss: 11.2518 - accuracy: 0.2494 - val_loss: 7.2853 - val_accuracy: 0.2462 Epoch 3/2000 8/8 [==============================] - 0s 5ms/step - loss: 4.9423 - accuracy: 0.2476 - val_loss: 1.8426 - val_accuracy: 0.2200 Epoch 4/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.7835 - accuracy: 0.6074 - val_loss: 0.3719 - val_accuracy: 0.8031 Epoch 5/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.4234 - accuracy: 0.8004 - val_loss: 0.4128 - val_accuracy: 0.8092 Epoch 6/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.4370 - accuracy: 0.8163 - val_loss: 0.3641 - val_accuracy: 0.8300 Epoch 7/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.3622 - accuracy: 0.8399 - val_loss: 0.2913 - val_accuracy: 0.8692 Epoch 8/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.3164 - accuracy: 0.8799 - val_loss: 0.3087 - val_accuracy: 0.9000 Epoch 9/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.3129 - accuracy: 0.8835 - val_loss: 0.2807 - val_accuracy: 0.8900 Epoch 10/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.3031 - accuracy: 0.8737 - val_loss: 0.2762 - val_accuracy: 0.8908 Epoch 11/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2984 - accuracy: 0.8812 - val_loss: 0.2730 - val_accuracy: 0.9015 Epoch 12/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2931 - accuracy: 0.8909 - val_loss: 0.2693 - val_accuracy: 0.9085 Epoch 13/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2887 - accuracy: 0.8945 - val_loss: 0.2633 - val_accuracy: 0.9108 Epoch 14/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2845 - accuracy: 0.8958 - val_loss: 0.2577 - val_accuracy: 0.9123 Epoch 15/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2801 - accuracy: 0.8984 - val_loss: 0.2529 - val_accuracy: 0.9162 Epoch 16/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2744 - accuracy: 0.9038 - val_loss: 0.2461 - val_accuracy: 0.9208 Epoch 17/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2662 - accuracy: 0.9112 - val_loss: 0.2346 - val_accuracy: 0.9277 Epoch 18/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2577 - accuracy: 0.9197 - val_loss: 0.2285 - val_accuracy: 0.9308 Epoch 19/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2539 - accuracy: 0.9202 - val_loss: 0.2227 - val_accuracy: 0.9308 Epoch 20/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2496 - accuracy: 0.9212 - val_loss: 0.2175 - val_accuracy: 0.9315 Epoch 21/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2455 - accuracy: 0.9215 - val_loss: 0.2134 - val_accuracy: 0.9338 Epoch 22/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2418 - accuracy: 0.9212 - val_loss: 0.2092 - val_accuracy: 0.9346 Epoch 23/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2383 - accuracy: 0.9212 - val_loss: 0.2057 - val_accuracy: 0.9354 Epoch 24/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2347 - accuracy: 0.9220 - val_loss: 0.2018 - val_accuracy: 0.9354 Epoch 25/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2310 - accuracy: 0.9243 - val_loss: 0.1990 - val_accuracy: 0.9346 Epoch 26/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2270 - accuracy: 0.9240 - val_loss: 0.1952 - val_accuracy: 0.9369 Epoch 27/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2237 - accuracy: 0.9246 - val_loss: 0.1920 - val_accuracy: 0.9377 Epoch 28/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2209 - accuracy: 0.9261 - val_loss: 0.1899 - val_accuracy: 0.9377 Epoch 29/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2183 - accuracy: 0.9264 - val_loss: 0.1872 - val_accuracy: 0.9377 Epoch 30/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2158 - accuracy: 0.9271 - val_loss: 0.1835 - val_accuracy: 0.9392 Epoch 31/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2146 - accuracy: 0.9281 - val_loss: 0.1839 - val_accuracy: 0.9369 Epoch 32/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2128 - accuracy: 0.9297 - val_loss: 0.1806 - val_accuracy: 0.9392 Epoch 33/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2100 - accuracy: 0.9307 - val_loss: 0.1789 - val_accuracy: 0.9400 Epoch 34/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2083 - accuracy: 0.9323 - val_loss: 0.1777 - val_accuracy: 0.9392 Epoch 35/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2066 - accuracy: 0.9305 - val_loss: 0.1752 - val_accuracy: 0.9423 Epoch 36/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.2043 - accuracy: 0.9333 - val_loss: 0.1738 - val_accuracy: 0.9438 Epoch 37/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2025 - accuracy: 0.9325 - val_loss: 0.1726 - val_accuracy: 0.9423 Epoch 38/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.2015 - accuracy: 0.9341 - val_loss: 0.1703 - val_accuracy: 0.9446 Epoch 39/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1999 - accuracy: 0.9325 - val_loss: 0.1695 - val_accuracy: 0.9438 Epoch 40/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1980 - accuracy: 0.9348 - val_loss: 0.1678 - val_accuracy: 0.9462 Epoch 41/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1962 - accuracy: 0.9341 - val_loss: 0.1665 - val_accuracy: 0.9469 Epoch 42/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1946 - accuracy: 0.9338 - val_loss: 0.1649 - val_accuracy: 0.9469 Epoch 43/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1932 - accuracy: 0.9364 - val_loss: 0.1634 - val_accuracy: 0.9485 Epoch 44/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1929 - accuracy: 0.9358 - val_loss: 0.1608 - val_accuracy: 0.9462 Epoch 45/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1907 - accuracy: 0.9348 - val_loss: 0.1614 - val_accuracy: 0.9485 Epoch 46/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1889 - accuracy: 0.9369 - val_loss: 0.1597 - val_accuracy: 0.9477 Epoch 47/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1878 - accuracy: 0.9389 - val_loss: 0.1590 - val_accuracy: 0.9500 Epoch 48/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1858 - accuracy: 0.9364 - val_loss: 0.1567 - val_accuracy: 0.9508 Epoch 49/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1842 - accuracy: 0.9389 - val_loss: 0.1552 - val_accuracy: 0.9508 Epoch 50/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1831 - accuracy: 0.9371 - val_loss: 0.1531 - val_accuracy: 0.9500 Epoch 51/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1809 - accuracy: 0.9400 - val_loss: 0.1521 - val_accuracy: 0.9508 Epoch 52/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1799 - accuracy: 0.9389 - val_loss: 0.1497 - val_accuracy: 0.9477 Epoch 53/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1788 - accuracy: 0.9397 - val_loss: 0.1524 - val_accuracy: 0.9508 Epoch 54/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1766 - accuracy: 0.9407 - val_loss: 0.1473 - val_accuracy: 0.9508 Epoch 55/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1738 - accuracy: 0.9423 - val_loss: 0.1487 - val_accuracy: 0.9515 Epoch 56/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1712 - accuracy: 0.9420 - val_loss: 0.1436 - val_accuracy: 0.9508 Epoch 57/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1703 - accuracy: 0.9423 - val_loss: 0.1478 - val_accuracy: 0.9500 Epoch 58/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1677 - accuracy: 0.9423 - val_loss: 0.1410 - val_accuracy: 0.9515 Epoch 59/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1646 - accuracy: 0.9433 - val_loss: 0.1406 - val_accuracy: 0.9515 Epoch 60/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1625 - accuracy: 0.9425 - val_loss: 0.1380 - val_accuracy: 0.9508 Epoch 61/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1606 - accuracy: 0.9433 - val_loss: 0.1392 - val_accuracy: 0.9515 Epoch 62/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1587 - accuracy: 0.9425 - val_loss: 0.1346 - val_accuracy: 0.9523 Epoch 63/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1570 - accuracy: 0.9433 - val_loss: 0.1328 - val_accuracy: 0.9531 Epoch 64/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1550 - accuracy: 0.9446 - val_loss: 0.1312 - val_accuracy: 0.9523 Epoch 65/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1550 - accuracy: 0.9456 - val_loss: 0.1370 - val_accuracy: 0.9523 Epoch 66/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1512 - accuracy: 0.9433 - val_loss: 0.1284 - val_accuracy: 0.9538 Epoch 67/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1504 - accuracy: 0.9435 - val_loss: 0.1288 - val_accuracy: 0.9546 Epoch 68/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1483 - accuracy: 0.9446 - val_loss: 0.1269 - val_accuracy: 0.9554 Epoch 69/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1473 - accuracy: 0.9453 - val_loss: 0.1299 - val_accuracy: 0.9538 Epoch 70/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1468 - accuracy: 0.9456 - val_loss: 0.1250 - val_accuracy: 0.9554 Epoch 71/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1473 - accuracy: 0.9453 - val_loss: 0.1261 - val_accuracy: 0.9546 Epoch 72/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1430 - accuracy: 0.9461 - val_loss: 0.1232 - val_accuracy: 0.9577 Epoch 73/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1418 - accuracy: 0.9471 - val_loss: 0.1210 - val_accuracy: 0.9569 Epoch 74/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1410 - accuracy: 0.9471 - val_loss: 0.1222 - val_accuracy: 0.9600 Epoch 75/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1387 - accuracy: 0.9489 - val_loss: 0.1196 - val_accuracy: 0.9608 Epoch 76/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1376 - accuracy: 0.9492 - val_loss: 0.1187 - val_accuracy: 0.9608 Epoch 77/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1366 - accuracy: 0.9492 - val_loss: 0.1167 - val_accuracy: 0.9577 Epoch 78/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1362 - accuracy: 0.9484 - val_loss: 0.1173 - val_accuracy: 0.9608 Epoch 79/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1345 - accuracy: 0.9500 - val_loss: 0.1200 - val_accuracy: 0.9592 Epoch 80/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1342 - accuracy: 0.9494 - val_loss: 0.1151 - val_accuracy: 0.9592 Epoch 81/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1368 - accuracy: 0.9497 - val_loss: 0.1128 - val_accuracy: 0.9608 Epoch 82/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1343 - accuracy: 0.9507 - val_loss: 0.1237 - val_accuracy: 0.9592 Epoch 83/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1365 - accuracy: 0.9497 - val_loss: 0.1117 - val_accuracy: 0.9623 Epoch 84/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1313 - accuracy: 0.9551 - val_loss: 0.1108 - val_accuracy: 0.9608 Epoch 85/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1307 - accuracy: 0.9512 - val_loss: 0.1189 - val_accuracy: 0.9623 Epoch 86/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1301 - accuracy: 0.9523 - val_loss: 0.1122 - val_accuracy: 0.9615 Epoch 87/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1268 - accuracy: 0.9543 - val_loss: 0.1086 - val_accuracy: 0.9631 Epoch 88/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1255 - accuracy: 0.9541 - val_loss: 0.1085 - val_accuracy: 0.9631 Epoch 89/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1251 - accuracy: 0.9538 - val_loss: 0.1133 - val_accuracy: 0.9638 Epoch 90/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1236 - accuracy: 0.9546 - val_loss: 0.1067 - val_accuracy: 0.9623 Epoch 91/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1232 - accuracy: 0.9561 - val_loss: 0.1058 - val_accuracy: 0.9631 Epoch 92/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1225 - accuracy: 0.9546 - val_loss: 0.1085 - val_accuracy: 0.9631 Epoch 93/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1207 - accuracy: 0.9556 - val_loss: 0.1058 - val_accuracy: 0.9677 Epoch 94/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1199 - accuracy: 0.9579 - val_loss: 0.1070 - val_accuracy: 0.9669 Epoch 95/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1188 - accuracy: 0.9577 - val_loss: 0.1031 - val_accuracy: 0.9638 Epoch 96/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1186 - accuracy: 0.9569 - val_loss: 0.1048 - val_accuracy: 0.9669 Epoch 97/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1170 - accuracy: 0.9577 - val_loss: 0.1018 - val_accuracy: 0.9646 Epoch 98/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1172 - accuracy: 0.9577 - val_loss: 0.1028 - val_accuracy: 0.9685 Epoch 99/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1162 - accuracy: 0.9571 - val_loss: 0.1023 - val_accuracy: 0.9677 Epoch 100/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1158 - accuracy: 0.9615 - val_loss: 0.1001 - val_accuracy: 0.9654 Epoch 101/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1156 - accuracy: 0.9571 - val_loss: 0.1099 - val_accuracy: 0.9662 Epoch 102/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1156 - accuracy: 0.9574 - val_loss: 0.0994 - val_accuracy: 0.9677 Epoch 103/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1152 - accuracy: 0.9613 - val_loss: 0.0987 - val_accuracy: 0.9638 Epoch 104/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1133 - accuracy: 0.9597 - val_loss: 0.0999 - val_accuracy: 0.9700 Epoch 105/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1117 - accuracy: 0.9605 - val_loss: 0.0989 - val_accuracy: 0.9708 Epoch 106/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1103 - accuracy: 0.9618 - val_loss: 0.0975 - val_accuracy: 0.9700 Epoch 107/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1101 - accuracy: 0.9607 - val_loss: 0.0962 - val_accuracy: 0.9700 Epoch 108/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1094 - accuracy: 0.9620 - val_loss: 0.1003 - val_accuracy: 0.9723 Epoch 109/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1097 - accuracy: 0.9602 - val_loss: 0.0961 - val_accuracy: 0.9708 Epoch 110/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1084 - accuracy: 0.9633 - val_loss: 0.0948 - val_accuracy: 0.9700 Epoch 111/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1076 - accuracy: 0.9633 - val_loss: 0.0967 - val_accuracy: 0.9731 Epoch 112/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1067 - accuracy: 0.9633 - val_loss: 0.0948 - val_accuracy: 0.9715 Epoch 113/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1059 - accuracy: 0.9636 - val_loss: 0.0953 - val_accuracy: 0.9731 Epoch 114/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1066 - accuracy: 0.9636 - val_loss: 0.0932 - val_accuracy: 0.9685 Epoch 115/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1066 - accuracy: 0.9630 - val_loss: 0.0929 - val_accuracy: 0.9723 Epoch 116/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1062 - accuracy: 0.9600 - val_loss: 0.1002 - val_accuracy: 0.9723 Epoch 117/2000
8/8 [==============================] - 0s 5ms/step - loss: 0.1041 - accuracy: 0.9664 - val_loss: 0.0917 - val_accuracy: 0.9715 Epoch 118/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1027 - accuracy: 0.9646 - val_loss: 0.0965 - val_accuracy: 0.9723 Epoch 119/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1030 - accuracy: 0.9654 - val_loss: 0.0916 - val_accuracy: 0.9731 Epoch 120/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1021 - accuracy: 0.9659 - val_loss: 0.0908 - val_accuracy: 0.9723 Epoch 121/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1014 - accuracy: 0.9648 - val_loss: 0.0928 - val_accuracy: 0.9746 Epoch 122/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.1014 - accuracy: 0.9666 - val_loss: 0.0926 - val_accuracy: 0.9738 Epoch 123/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.1004 - accuracy: 0.9669 - val_loss: 0.0910 - val_accuracy: 0.9754 Epoch 124/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0998 - accuracy: 0.9669 - val_loss: 0.0898 - val_accuracy: 0.9754 Epoch 125/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0993 - accuracy: 0.9677 - val_loss: 0.0890 - val_accuracy: 0.9692 Epoch 126/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0994 - accuracy: 0.9666 - val_loss: 0.0921 - val_accuracy: 0.9769 Epoch 127/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0991 - accuracy: 0.9656 - val_loss: 0.0889 - val_accuracy: 0.9762 Epoch 128/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0977 - accuracy: 0.9695 - val_loss: 0.0883 - val_accuracy: 0.9754 Epoch 129/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0973 - accuracy: 0.9700 - val_loss: 0.0883 - val_accuracy: 0.9754 Epoch 130/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0969 - accuracy: 0.9700 - val_loss: 0.0881 - val_accuracy: 0.9754 Epoch 131/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0963 - accuracy: 0.9700 - val_loss: 0.0873 - val_accuracy: 0.9754 Epoch 132/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0959 - accuracy: 0.9672 - val_loss: 0.0919 - val_accuracy: 0.9769 Epoch 133/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0963 - accuracy: 0.9705 - val_loss: 0.0861 - val_accuracy: 0.9746 Epoch 134/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0962 - accuracy: 0.9705 - val_loss: 0.0857 - val_accuracy: 0.9738 Epoch 135/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0945 - accuracy: 0.9702 - val_loss: 0.0855 - val_accuracy: 0.9738 Epoch 136/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0941 - accuracy: 0.9710 - val_loss: 0.0854 - val_accuracy: 0.9769 Epoch 137/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0933 - accuracy: 0.9702 - val_loss: 0.0850 - val_accuracy: 0.9777 Epoch 138/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0930 - accuracy: 0.9700 - val_loss: 0.0865 - val_accuracy: 0.9777 Epoch 139/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0932 - accuracy: 0.9713 - val_loss: 0.0866 - val_accuracy: 0.9785 Epoch 140/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0925 - accuracy: 0.9700 - val_loss: 0.0839 - val_accuracy: 0.9754 Epoch 141/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0916 - accuracy: 0.9718 - val_loss: 0.0841 - val_accuracy: 0.9785 Epoch 142/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0911 - accuracy: 0.9710 - val_loss: 0.0862 - val_accuracy: 0.9785 Epoch 143/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0914 - accuracy: 0.9682 - val_loss: 0.0840 - val_accuracy: 0.9792 Epoch 144/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0909 - accuracy: 0.9713 - val_loss: 0.0837 - val_accuracy: 0.9785 Epoch 145/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0901 - accuracy: 0.9720 - val_loss: 0.0827 - val_accuracy: 0.9762 Epoch 146/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0900 - accuracy: 0.9707 - val_loss: 0.0829 - val_accuracy: 0.9785 Epoch 147/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0904 - accuracy: 0.9710 - val_loss: 0.0834 - val_accuracy: 0.9785 Epoch 148/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0894 - accuracy: 0.9720 - val_loss: 0.0849 - val_accuracy: 0.9777 Epoch 149/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0888 - accuracy: 0.9713 - val_loss: 0.0855 - val_accuracy: 0.9769 Epoch 150/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0902 - accuracy: 0.9713 - val_loss: 0.0817 - val_accuracy: 0.9754 Epoch 151/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0890 - accuracy: 0.9728 - val_loss: 0.0816 - val_accuracy: 0.9754 Epoch 152/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0880 - accuracy: 0.9702 - val_loss: 0.0882 - val_accuracy: 0.9769 Epoch 153/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0868 - accuracy: 0.9723 - val_loss: 0.0803 - val_accuracy: 0.9777 Epoch 154/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0910 - accuracy: 0.9687 - val_loss: 0.0916 - val_accuracy: 0.9746 Epoch 155/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0909 - accuracy: 0.9697 - val_loss: 0.0813 - val_accuracy: 0.9754 Epoch 156/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0880 - accuracy: 0.9713 - val_loss: 0.0807 - val_accuracy: 0.9762 Epoch 157/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0886 - accuracy: 0.9687 - val_loss: 0.0955 - val_accuracy: 0.9723 Epoch 158/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0901 - accuracy: 0.9707 - val_loss: 0.0799 - val_accuracy: 0.9792 Epoch 159/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0863 - accuracy: 0.9710 - val_loss: 0.0832 - val_accuracy: 0.9715 Epoch 160/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0899 - accuracy: 0.9692 - val_loss: 0.0855 - val_accuracy: 0.9777 Epoch 161/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0859 - accuracy: 0.9710 - val_loss: 0.0804 - val_accuracy: 0.9792 Epoch 162/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0854 - accuracy: 0.9718 - val_loss: 0.0833 - val_accuracy: 0.9715 Epoch 163/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0876 - accuracy: 0.9707 - val_loss: 0.0805 - val_accuracy: 0.9785 Epoch 164/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0856 - accuracy: 0.9705 - val_loss: 0.0848 - val_accuracy: 0.9762 Epoch 165/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0852 - accuracy: 0.9731 - val_loss: 0.0785 - val_accuracy: 0.9792 Epoch 166/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0823 - accuracy: 0.9725 - val_loss: 0.0782 - val_accuracy: 0.9792 Epoch 167/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0826 - accuracy: 0.9725 - val_loss: 0.0805 - val_accuracy: 0.9792 Epoch 168/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0824 - accuracy: 0.9723 - val_loss: 0.0798 - val_accuracy: 0.9800 Epoch 169/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0809 - accuracy: 0.9738 - val_loss: 0.0777 - val_accuracy: 0.9785 Epoch 170/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0810 - accuracy: 0.9733 - val_loss: 0.0789 - val_accuracy: 0.9800 Epoch 171/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0811 - accuracy: 0.9715 - val_loss: 0.0787 - val_accuracy: 0.9800 Epoch 172/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0809 - accuracy: 0.9733 - val_loss: 0.0773 - val_accuracy: 0.9792 Epoch 173/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0805 - accuracy: 0.9731 - val_loss: 0.0772 - val_accuracy: 0.9785 Epoch 174/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0798 - accuracy: 0.9736 - val_loss: 0.0779 - val_accuracy: 0.9800
Epoch 175/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0798 - accuracy: 0.9723 - val_loss: 0.0766 - val_accuracy: 0.9792 Epoch 176/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0789 - accuracy: 0.9743 - val_loss: 0.0774 - val_accuracy: 0.9792 Epoch 177/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0791 - accuracy: 0.9733 - val_loss: 0.0779 - val_accuracy: 0.9777 Epoch 178/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0809 - accuracy: 0.9743 - val_loss: 0.0786 - val_accuracy: 0.9792 Epoch 179/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0804 - accuracy: 0.9749 - val_loss: 0.0811 - val_accuracy: 0.9777 Epoch 180/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0797 - accuracy: 0.9749 - val_loss: 0.0759 - val_accuracy: 0.9785 Epoch 181/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0780 - accuracy: 0.9736 - val_loss: 0.0759 - val_accuracy: 0.9792 Epoch 182/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0778 - accuracy: 0.9738 - val_loss: 0.0763 - val_accuracy: 0.9800 Epoch 183/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0770 - accuracy: 0.9741 - val_loss: 0.0765 - val_accuracy: 0.9800 Epoch 184/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0780 - accuracy: 0.9736 - val_loss: 0.0755 - val_accuracy: 0.9792 Epoch 185/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0764 - accuracy: 0.9738 - val_loss: 0.0754 - val_accuracy: 0.9777 Epoch 186/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0777 - accuracy: 0.9741 - val_loss: 0.0752 - val_accuracy: 0.9792 Epoch 187/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0762 - accuracy: 0.9749 - val_loss: 0.0773 - val_accuracy: 0.9792 Epoch 188/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0763 - accuracy: 0.9741 - val_loss: 0.0749 - val_accuracy: 0.9800 Epoch 189/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0756 - accuracy: 0.9746 - val_loss: 0.0752 - val_accuracy: 0.9808 Epoch 190/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0754 - accuracy: 0.9764 - val_loss: 0.0753 - val_accuracy: 0.9792 Epoch 191/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0780 - accuracy: 0.9756 - val_loss: 0.0745 - val_accuracy: 0.9777 Epoch 192/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0756 - accuracy: 0.9759 - val_loss: 0.0787 - val_accuracy: 0.9777 Epoch 193/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0753 - accuracy: 0.9751 - val_loss: 0.0746 - val_accuracy: 0.9800 Epoch 194/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0753 - accuracy: 0.9746 - val_loss: 0.0749 - val_accuracy: 0.9777 Epoch 195/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0737 - accuracy: 0.9756 - val_loss: 0.0738 - val_accuracy: 0.9808 Epoch 196/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0737 - accuracy: 0.9759 - val_loss: 0.0732 - val_accuracy: 0.9800 Epoch 197/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0750 - accuracy: 0.9759 - val_loss: 0.0780 - val_accuracy: 0.9769 Epoch 198/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0795 - accuracy: 0.9720 - val_loss: 0.0756 - val_accuracy: 0.9792 Epoch 199/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0765 - accuracy: 0.9741 - val_loss: 0.0817 - val_accuracy: 0.9769 Epoch 200/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0764 - accuracy: 0.9741 - val_loss: 0.0738 - val_accuracy: 0.9785 Epoch 201/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0727 - accuracy: 0.9751 - val_loss: 0.0753 - val_accuracy: 0.9800 Epoch 202/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0706 - accuracy: 0.9769 - val_loss: 0.0728 - val_accuracy: 0.9800 Epoch 203/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0715 - accuracy: 0.9766 - val_loss: 0.0752 - val_accuracy: 0.9800 Epoch 204/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0707 - accuracy: 0.9769 - val_loss: 0.0742 - val_accuracy: 0.9785 Epoch 205/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0721 - accuracy: 0.9759 - val_loss: 0.0744 - val_accuracy: 0.9785 Epoch 206/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0736 - accuracy: 0.9761 - val_loss: 0.0735 - val_accuracy: 0.9815 Epoch 207/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0736 - accuracy: 0.9741 - val_loss: 0.0833 - val_accuracy: 0.9769 Epoch 208/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0785 - accuracy: 0.9702 - val_loss: 0.0725 - val_accuracy: 0.9808 Epoch 209/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0717 - accuracy: 0.9756 - val_loss: 0.0722 - val_accuracy: 0.9808 Epoch 210/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0707 - accuracy: 0.9766 - val_loss: 0.0724 - val_accuracy: 0.9808 Epoch 211/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0703 - accuracy: 0.9772 - val_loss: 0.0714 - val_accuracy: 0.9815 Epoch 212/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0679 - accuracy: 0.9782 - val_loss: 0.0716 - val_accuracy: 0.9800 Epoch 213/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0678 - accuracy: 0.9782 - val_loss: 0.0715 - val_accuracy: 0.9800 Epoch 214/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0677 - accuracy: 0.9772 - val_loss: 0.0707 - val_accuracy: 0.9800 Epoch 215/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0680 - accuracy: 0.9774 - val_loss: 0.0750 - val_accuracy: 0.9777 Epoch 216/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0699 - accuracy: 0.9761 - val_loss: 0.0763 - val_accuracy: 0.9769 Epoch 217/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0712 - accuracy: 0.9736 - val_loss: 0.0711 - val_accuracy: 0.9800 Epoch 218/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0719 - accuracy: 0.9761 - val_loss: 0.0726 - val_accuracy: 0.9800 Epoch 219/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0694 - accuracy: 0.9784 - val_loss: 0.0730 - val_accuracy: 0.9800 Epoch 220/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0708 - accuracy: 0.9751 - val_loss: 0.0716 - val_accuracy: 0.9808 Epoch 221/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0686 - accuracy: 0.9779 - val_loss: 0.0731 - val_accuracy: 0.9785 Epoch 222/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0698 - accuracy: 0.9769 - val_loss: 0.0697 - val_accuracy: 0.9800 Epoch 223/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0695 - accuracy: 0.9764 - val_loss: 0.0767 - val_accuracy: 0.9769 Epoch 224/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0676 - accuracy: 0.9769 - val_loss: 0.0704 - val_accuracy: 0.9792 Epoch 225/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0659 - accuracy: 0.9769 - val_loss: 0.0709 - val_accuracy: 0.9800 Epoch 226/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0659 - accuracy: 0.9777 - val_loss: 0.0717 - val_accuracy: 0.9792 Epoch 227/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0642 - accuracy: 0.9792 - val_loss: 0.0702 - val_accuracy: 0.9808 Epoch 228/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0651 - accuracy: 0.9772 - val_loss: 0.0697 - val_accuracy: 0.9800 Epoch 229/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0660 - accuracy: 0.9774 - val_loss: 0.0691 - val_accuracy: 0.9800 Epoch 230/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0642 - accuracy: 0.9772 - val_loss: 0.0703 - val_accuracy: 0.9792 Epoch 231/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0636 - accuracy: 0.9787 - val_loss: 0.0691 - val_accuracy: 0.9808 Epoch 232/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0647 - accuracy: 0.9774 - val_loss: 0.0699 - val_accuracy: 0.9808
Epoch 233/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0655 - accuracy: 0.9777 - val_loss: 0.0760 - val_accuracy: 0.9762 Epoch 234/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0747 - accuracy: 0.9738 - val_loss: 0.0866 - val_accuracy: 0.9746 Epoch 235/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0759 - accuracy: 0.9710 - val_loss: 0.0723 - val_accuracy: 0.9808 Epoch 236/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0686 - accuracy: 0.9777 - val_loss: 0.0687 - val_accuracy: 0.9800 Epoch 237/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0638 - accuracy: 0.9787 - val_loss: 0.0685 - val_accuracy: 0.9808 Epoch 238/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0628 - accuracy: 0.9772 - val_loss: 0.0684 - val_accuracy: 0.9800 Epoch 239/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0626 - accuracy: 0.9792 - val_loss: 0.0687 - val_accuracy: 0.9808 Epoch 240/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0632 - accuracy: 0.9787 - val_loss: 0.0688 - val_accuracy: 0.9808 Epoch 241/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0634 - accuracy: 0.9782 - val_loss: 0.0685 - val_accuracy: 0.9800 Epoch 242/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0619 - accuracy: 0.9792 - val_loss: 0.0677 - val_accuracy: 0.9815 Epoch 243/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0614 - accuracy: 0.9790 - val_loss: 0.0681 - val_accuracy: 0.9800 Epoch 244/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0616 - accuracy: 0.9784 - val_loss: 0.0685 - val_accuracy: 0.9800 Epoch 245/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0616 - accuracy: 0.9792 - val_loss: 0.0696 - val_accuracy: 0.9808 Epoch 246/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0622 - accuracy: 0.9774 - val_loss: 0.0679 - val_accuracy: 0.9800 Epoch 247/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0618 - accuracy: 0.9779 - val_loss: 0.0688 - val_accuracy: 0.9815 Epoch 248/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0624 - accuracy: 0.9766 - val_loss: 0.0675 - val_accuracy: 0.9831 Epoch 249/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0612 - accuracy: 0.9795 - val_loss: 0.0672 - val_accuracy: 0.9831 Epoch 250/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0606 - accuracy: 0.9790 - val_loss: 0.0669 - val_accuracy: 0.9815 Epoch 251/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0608 - accuracy: 0.9782 - val_loss: 0.0714 - val_accuracy: 0.9792 Epoch 252/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0626 - accuracy: 0.9792 - val_loss: 0.0673 - val_accuracy: 0.9808 Epoch 253/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0601 - accuracy: 0.9784 - val_loss: 0.0675 - val_accuracy: 0.9815 Epoch 254/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0610 - accuracy: 0.9792 - val_loss: 0.0702 - val_accuracy: 0.9800 Epoch 255/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0624 - accuracy: 0.9769 - val_loss: 0.0689 - val_accuracy: 0.9823 Epoch 256/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0611 - accuracy: 0.9779 - val_loss: 0.0685 - val_accuracy: 0.9815 Epoch 257/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0621 - accuracy: 0.9792 - val_loss: 0.0666 - val_accuracy: 0.9808 Epoch 258/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0593 - accuracy: 0.9800 - val_loss: 0.0669 - val_accuracy: 0.9823 Epoch 259/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0597 - accuracy: 0.9792 - val_loss: 0.0683 - val_accuracy: 0.9808 Epoch 260/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0592 - accuracy: 0.9810 - val_loss: 0.0672 - val_accuracy: 0.9815 Epoch 261/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0592 - accuracy: 0.9805 - val_loss: 0.0668 - val_accuracy: 0.9831 Epoch 262/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0587 - accuracy: 0.9808 - val_loss: 0.0666 - val_accuracy: 0.9815 Epoch 263/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0587 - accuracy: 0.9800 - val_loss: 0.0674 - val_accuracy: 0.9823 Epoch 264/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0603 - accuracy: 0.9787 - val_loss: 0.0716 - val_accuracy: 0.9815 Epoch 265/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0625 - accuracy: 0.9774 - val_loss: 0.0691 - val_accuracy: 0.9831 Epoch 266/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0615 - accuracy: 0.9805 - val_loss: 0.0662 - val_accuracy: 0.9823 Epoch 267/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0590 - accuracy: 0.9802 - val_loss: 0.0667 - val_accuracy: 0.9815 Epoch 268/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0600 - accuracy: 0.9795 - val_loss: 0.0671 - val_accuracy: 0.9823 Epoch 269/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0591 - accuracy: 0.9810 - val_loss: 0.0658 - val_accuracy: 0.9815 Epoch 270/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0602 - accuracy: 0.9795 - val_loss: 0.0734 - val_accuracy: 0.9785 Epoch 271/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0637 - accuracy: 0.9782 - val_loss: 0.0684 - val_accuracy: 0.9792 Epoch 272/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0621 - accuracy: 0.9802 - val_loss: 0.0658 - val_accuracy: 0.9823 Epoch 273/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0591 - accuracy: 0.9800 - val_loss: 0.0673 - val_accuracy: 0.9831 Epoch 274/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0588 - accuracy: 0.9797 - val_loss: 0.0674 - val_accuracy: 0.9823 Epoch 275/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0584 - accuracy: 0.9808 - val_loss: 0.0667 - val_accuracy: 0.9800 Epoch 276/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0583 - accuracy: 0.9808 - val_loss: 0.0653 - val_accuracy: 0.9808 Epoch 277/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0588 - accuracy: 0.9802 - val_loss: 0.0683 - val_accuracy: 0.9792 Epoch 278/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0576 - accuracy: 0.9810 - val_loss: 0.0654 - val_accuracy: 0.9815 Epoch 279/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0568 - accuracy: 0.9808 - val_loss: 0.0665 - val_accuracy: 0.9823 Epoch 280/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0590 - accuracy: 0.9805 - val_loss: 0.0712 - val_accuracy: 0.9823 Epoch 281/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0617 - accuracy: 0.9784 - val_loss: 0.0662 - val_accuracy: 0.9823 Epoch 282/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0604 - accuracy: 0.9805 - val_loss: 0.0653 - val_accuracy: 0.9823 Epoch 283/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0565 - accuracy: 0.9808 - val_loss: 0.0707 - val_accuracy: 0.9800 Epoch 284/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0587 - accuracy: 0.9808 - val_loss: 0.0676 - val_accuracy: 0.9800 Epoch 285/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0623 - accuracy: 0.9774 - val_loss: 0.0663 - val_accuracy: 0.9838 Epoch 286/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0580 - accuracy: 0.9802 - val_loss: 0.0700 - val_accuracy: 0.9815 Epoch 287/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0584 - accuracy: 0.9808 - val_loss: 0.0658 - val_accuracy: 0.9823 Epoch 288/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0569 - accuracy: 0.9810 - val_loss: 0.0668 - val_accuracy: 0.9800 Epoch 289/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0588 - accuracy: 0.9808 - val_loss: 0.0686 - val_accuracy: 0.9792 Epoch 290/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0582 - accuracy: 0.9802 - val_loss: 0.0684 - val_accuracy: 0.9792
Epoch 291/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0575 - accuracy: 0.9805 - val_loss: 0.0648 - val_accuracy: 0.9815 Epoch 292/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0561 - accuracy: 0.9813 - val_loss: 0.0648 - val_accuracy: 0.9823 Epoch 293/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0570 - accuracy: 0.9802 - val_loss: 0.0706 - val_accuracy: 0.9823 Epoch 294/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0600 - accuracy: 0.9813 - val_loss: 0.0698 - val_accuracy: 0.9808 Epoch 295/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0620 - accuracy: 0.9787 - val_loss: 0.0669 - val_accuracy: 0.9823 Epoch 296/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0600 - accuracy: 0.9810 - val_loss: 0.0654 - val_accuracy: 0.9815 Epoch 297/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0567 - accuracy: 0.9808 - val_loss: 0.0660 - val_accuracy: 0.9808 Epoch 298/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0547 - accuracy: 0.9820 - val_loss: 0.0645 - val_accuracy: 0.9823 Epoch 299/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0553 - accuracy: 0.9815 - val_loss: 0.0686 - val_accuracy: 0.9815 Epoch 300/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0553 - accuracy: 0.9802 - val_loss: 0.0649 - val_accuracy: 0.9823 Epoch 301/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0538 - accuracy: 0.9828 - val_loss: 0.0643 - val_accuracy: 0.9815 Epoch 302/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0574 - accuracy: 0.9815 - val_loss: 0.0641 - val_accuracy: 0.9823 Epoch 303/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0566 - accuracy: 0.9813 - val_loss: 0.0677 - val_accuracy: 0.9800 Epoch 304/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0547 - accuracy: 0.9823 - val_loss: 0.0658 - val_accuracy: 0.9800 Epoch 305/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0555 - accuracy: 0.9833 - val_loss: 0.0644 - val_accuracy: 0.9815 Epoch 306/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0538 - accuracy: 0.9826 - val_loss: 0.0640 - val_accuracy: 0.9838 Epoch 307/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0530 - accuracy: 0.9828 - val_loss: 0.0653 - val_accuracy: 0.9823 Epoch 308/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0541 - accuracy: 0.9823 - val_loss: 0.0646 - val_accuracy: 0.9823 Epoch 309/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0533 - accuracy: 0.9815 - val_loss: 0.0641 - val_accuracy: 0.9831 Epoch 310/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0529 - accuracy: 0.9828 - val_loss: 0.0655 - val_accuracy: 0.9815 Epoch 311/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0536 - accuracy: 0.9826 - val_loss: 0.0662 - val_accuracy: 0.9800 Epoch 312/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0565 - accuracy: 0.9818 - val_loss: 0.0655 - val_accuracy: 0.9800 Epoch 313/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0568 - accuracy: 0.9818 - val_loss: 0.0643 - val_accuracy: 0.9846 Epoch 314/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0530 - accuracy: 0.9820 - val_loss: 0.0654 - val_accuracy: 0.9838 Epoch 315/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0529 - accuracy: 0.9823 - val_loss: 0.0644 - val_accuracy: 0.9815 Epoch 316/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0532 - accuracy: 0.9815 - val_loss: 0.0658 - val_accuracy: 0.9800 Epoch 317/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0528 - accuracy: 0.9831 - val_loss: 0.0638 - val_accuracy: 0.9838 Epoch 318/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0525 - accuracy: 0.9836 - val_loss: 0.0651 - val_accuracy: 0.9831 Epoch 319/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0522 - accuracy: 0.9831 - val_loss: 0.0637 - val_accuracy: 0.9823 Epoch 320/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0519 - accuracy: 0.9836 - val_loss: 0.0642 - val_accuracy: 0.9838 Epoch 321/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0519 - accuracy: 0.9833 - val_loss: 0.0676 - val_accuracy: 0.9800 Epoch 322/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0536 - accuracy: 0.9818 - val_loss: 0.0655 - val_accuracy: 0.9808 Epoch 323/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0530 - accuracy: 0.9826 - val_loss: 0.0639 - val_accuracy: 0.9838 Epoch 324/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0515 - accuracy: 0.9843 - val_loss: 0.0655 - val_accuracy: 0.9838 Epoch 325/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0534 - accuracy: 0.9836 - val_loss: 0.0642 - val_accuracy: 0.9831 Epoch 326/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0532 - accuracy: 0.9826 - val_loss: 0.0671 - val_accuracy: 0.9838 Epoch 327/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0530 - accuracy: 0.9828 - val_loss: 0.0645 - val_accuracy: 0.9815 Epoch 328/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0529 - accuracy: 0.9838 - val_loss: 0.0639 - val_accuracy: 0.9823 Epoch 329/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0527 - accuracy: 0.9823 - val_loss: 0.0649 - val_accuracy: 0.9808 Epoch 330/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0532 - accuracy: 0.9818 - val_loss: 0.0647 - val_accuracy: 0.9815 Epoch 331/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0520 - accuracy: 0.9833 - val_loss: 0.0644 - val_accuracy: 0.9846 Epoch 332/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0511 - accuracy: 0.9836 - val_loss: 0.0636 - val_accuracy: 0.9838 Epoch 333/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0509 - accuracy: 0.9828 - val_loss: 0.0638 - val_accuracy: 0.9831 Epoch 334/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0521 - accuracy: 0.9823 - val_loss: 0.0636 - val_accuracy: 0.9838 Epoch 335/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0513 - accuracy: 0.9836 - val_loss: 0.0674 - val_accuracy: 0.9831 Epoch 336/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0564 - accuracy: 0.9820 - val_loss: 0.0649 - val_accuracy: 0.9815 Epoch 337/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0541 - accuracy: 0.9826 - val_loss: 0.0652 - val_accuracy: 0.9808 Epoch 338/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0517 - accuracy: 0.9818 - val_loss: 0.0638 - val_accuracy: 0.9831 Epoch 339/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0518 - accuracy: 0.9823 - val_loss: 0.0643 - val_accuracy: 0.9815 Epoch 340/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0504 - accuracy: 0.9831 - val_loss: 0.0639 - val_accuracy: 0.9838 Epoch 341/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0500 - accuracy: 0.9836 - val_loss: 0.0642 - val_accuracy: 0.9823 Epoch 342/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0502 - accuracy: 0.9831 - val_loss: 0.0641 - val_accuracy: 0.9846 Epoch 343/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0510 - accuracy: 0.9836 - val_loss: 0.0646 - val_accuracy: 0.9846 Epoch 344/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0514 - accuracy: 0.9823 - val_loss: 0.0651 - val_accuracy: 0.9846 Epoch 345/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0505 - accuracy: 0.9841 - val_loss: 0.0641 - val_accuracy: 0.9831 Epoch 346/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0499 - accuracy: 0.9838 - val_loss: 0.0653 - val_accuracy: 0.9815 Epoch 347/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0506 - accuracy: 0.9838 - val_loss: 0.0640 - val_accuracy: 0.9831 Epoch 348/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0497 - accuracy: 0.9831 - val_loss: 0.0673 - val_accuracy: 0.9808
Epoch 349/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0523 - accuracy: 0.9838 - val_loss: 0.0655 - val_accuracy: 0.9838 Epoch 350/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0546 - accuracy: 0.9831 - val_loss: 0.0659 - val_accuracy: 0.9838 Epoch 351/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0520 - accuracy: 0.9815 - val_loss: 0.0638 - val_accuracy: 0.9846 Epoch 352/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0498 - accuracy: 0.9833 - val_loss: 0.0639 - val_accuracy: 0.9823 Epoch 353/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0509 - accuracy: 0.9813 - val_loss: 0.0643 - val_accuracy: 0.9838 Epoch 354/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0516 - accuracy: 0.9823 - val_loss: 0.0633 - val_accuracy: 0.9854 Epoch 355/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0498 - accuracy: 0.9843 - val_loss: 0.0642 - val_accuracy: 0.9815 Epoch 356/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0502 - accuracy: 0.9833 - val_loss: 0.0632 - val_accuracy: 0.9846 Epoch 357/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0497 - accuracy: 0.9856 - val_loss: 0.0634 - val_accuracy: 0.9831 Epoch 358/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0491 - accuracy: 0.9831 - val_loss: 0.0639 - val_accuracy: 0.9831 Epoch 359/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0491 - accuracy: 0.9836 - val_loss: 0.0633 - val_accuracy: 0.9831 Epoch 360/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0492 - accuracy: 0.9836 - val_loss: 0.0640 - val_accuracy: 0.9846 Epoch 361/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0491 - accuracy: 0.9831 - val_loss: 0.0634 - val_accuracy: 0.9838 Epoch 362/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0497 - accuracy: 0.9831 - val_loss: 0.0653 - val_accuracy: 0.9838 Epoch 363/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0490 - accuracy: 0.9826 - val_loss: 0.0652 - val_accuracy: 0.9831 Epoch 364/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0521 - accuracy: 0.9823 - val_loss: 0.0654 - val_accuracy: 0.9831 Epoch 365/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0515 - accuracy: 0.9826 - val_loss: 0.0644 - val_accuracy: 0.9831 Epoch 366/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0507 - accuracy: 0.9831 - val_loss: 0.0708 - val_accuracy: 0.9800 Epoch 367/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0507 - accuracy: 0.9838 - val_loss: 0.0643 - val_accuracy: 0.9831 Epoch 368/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0497 - accuracy: 0.9826 - val_loss: 0.0641 - val_accuracy: 0.9854 Epoch 369/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0522 - accuracy: 0.9813 - val_loss: 0.0642 - val_accuracy: 0.9846 Epoch 370/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0490 - accuracy: 0.9820 - val_loss: 0.0666 - val_accuracy: 0.9838 Epoch 371/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0503 - accuracy: 0.9831 - val_loss: 0.0631 - val_accuracy: 0.9838 Epoch 372/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0514 - accuracy: 0.9826 - val_loss: 0.0667 - val_accuracy: 0.9815 Epoch 373/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0496 - accuracy: 0.9843 - val_loss: 0.0644 - val_accuracy: 0.9823 Epoch 374/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0487 - accuracy: 0.9843 - val_loss: 0.0628 - val_accuracy: 0.9854 Epoch 375/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0494 - accuracy: 0.9828 - val_loss: 0.0685 - val_accuracy: 0.9838 Epoch 376/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0506 - accuracy: 0.9846 - val_loss: 0.0640 - val_accuracy: 0.9831 Epoch 377/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0506 - accuracy: 0.9841 - val_loss: 0.0670 - val_accuracy: 0.9815 Epoch 378/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0480 - accuracy: 0.9851 - val_loss: 0.0637 - val_accuracy: 0.9831 Epoch 379/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0495 - accuracy: 0.9828 - val_loss: 0.0634 - val_accuracy: 0.9846 Epoch 380/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0515 - accuracy: 0.9833 - val_loss: 0.0664 - val_accuracy: 0.9831 Epoch 381/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0488 - accuracy: 0.9838 - val_loss: 0.0631 - val_accuracy: 0.9838 Epoch 382/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0476 - accuracy: 0.9841 - val_loss: 0.0648 - val_accuracy: 0.9823 Epoch 383/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0514 - accuracy: 0.9815 - val_loss: 0.0659 - val_accuracy: 0.9815 Epoch 384/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0484 - accuracy: 0.9826 - val_loss: 0.0653 - val_accuracy: 0.9823 Epoch 385/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0505 - accuracy: 0.9828 - val_loss: 0.0697 - val_accuracy: 0.9808 Epoch 386/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0515 - accuracy: 0.9815 - val_loss: 0.0722 - val_accuracy: 0.9800 Epoch 387/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0494 - accuracy: 0.9831 - val_loss: 0.0646 - val_accuracy: 0.9846 Epoch 388/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0488 - accuracy: 0.9838 - val_loss: 0.0637 - val_accuracy: 0.9846 Epoch 389/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0476 - accuracy: 0.9831 - val_loss: 0.0631 - val_accuracy: 0.9854 Epoch 390/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0468 - accuracy: 0.9843 - val_loss: 0.0628 - val_accuracy: 0.9838 Epoch 391/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0464 - accuracy: 0.9851 - val_loss: 0.0669 - val_accuracy: 0.9815 Epoch 392/2000 8/8 [==============================] - 0s 6ms/step - loss: 0.0487 - accuracy: 0.9836 - val_loss: 0.0656 - val_accuracy: 0.9823 Epoch 393/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0485 - accuracy: 0.9828 - val_loss: 0.0632 - val_accuracy: 0.9846 Epoch 394/2000 8/8 [==============================] - 0s 5ms/step - loss: 0.0500 - accuracy: 0.9828 - val_loss: 0.0651 - val_accuracy: 0.9846
In [13]:
Copied!
# 테스트 결과를 출력합니다.
score=model.evaluate(X_test, y_test)
print('Test accuracy:', score[1])
# 테스트 결과를 출력합니다.
score=model.evaluate(X_test, y_test)
print('Test accuracy:', score[1])
41/41 [==============================] - 0s 1ms/step - loss: 0.0472 - accuracy: 0.9885 Test accuracy: 0.9884615540504456