15장 실제 데이터로 만들어 보는 모델¶
데이터 파악하기¶
In [1]:
Copied!
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
#데이터를 불러 옵니다.
df = pd.read_csv("./data/house_train.csv")
#데이터를 미리 살펴 보겠습니다.
df
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
#데이터를 불러 옵니다.
df = pd.read_csv("./data/house_train.csv")
#데이터를 미리 살펴 보겠습니다.
df
Out[1]:
| Id | MSSubClass | MSZoning | LotFrontage | LotArea | Street | Alley | LotShape | LandContour | Utilities | ... | PoolArea | PoolQC | Fence | MiscFeature | MiscVal | MoSold | YrSold | SaleType | SaleCondition | SalePrice | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 1 | 60 | RL | 65.0 | 8450 | Pave | NaN | Reg | Lvl | AllPub | ... | 0 | NaN | NaN | NaN | 0 | 2 | 2008 | WD | Normal | 208500 |
| 1 | 2 | 20 | RL | 80.0 | 9600 | Pave | NaN | Reg | Lvl | AllPub | ... | 0 | NaN | NaN | NaN | 0 | 5 | 2007 | WD | Normal | 181500 |
| 2 | 3 | 60 | RL | 68.0 | 11250 | Pave | NaN | IR1 | Lvl | AllPub | ... | 0 | NaN | NaN | NaN | 0 | 9 | 2008 | WD | Normal | 223500 |
| 3 | 4 | 70 | RL | 60.0 | 9550 | Pave | NaN | IR1 | Lvl | AllPub | ... | 0 | NaN | NaN | NaN | 0 | 2 | 2006 | WD | Abnorml | 140000 |
| 4 | 5 | 60 | RL | 84.0 | 14260 | Pave | NaN | IR1 | Lvl | AllPub | ... | 0 | NaN | NaN | NaN | 0 | 12 | 2008 | WD | Normal | 250000 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1455 | 1456 | 60 | RL | 62.0 | 7917 | Pave | NaN | Reg | Lvl | AllPub | ... | 0 | NaN | NaN | NaN | 0 | 8 | 2007 | WD | Normal | 175000 |
| 1456 | 1457 | 20 | RL | 85.0 | 13175 | Pave | NaN | Reg | Lvl | AllPub | ... | 0 | NaN | MnPrv | NaN | 0 | 2 | 2010 | WD | Normal | 210000 |
| 1457 | 1458 | 70 | RL | 66.0 | 9042 | Pave | NaN | Reg | Lvl | AllPub | ... | 0 | NaN | GdPrv | Shed | 2500 | 5 | 2010 | WD | Normal | 266500 |
| 1458 | 1459 | 20 | RL | 68.0 | 9717 | Pave | NaN | Reg | Lvl | AllPub | ... | 0 | NaN | NaN | NaN | 0 | 4 | 2010 | WD | Normal | 142125 |
| 1459 | 1460 | 20 | RL | 75.0 | 9937 | Pave | NaN | Reg | Lvl | AllPub | ... | 0 | NaN | NaN | NaN | 0 | 6 | 2008 | WD | Normal | 147500 |
1460 rows × 81 columns
In [2]:
Copied!
#데이터가 어떤 유형으로 이루어져 있는지 알아봅니다.
df.dtypes
#데이터가 어떤 유형으로 이루어져 있는지 알아봅니다.
df.dtypes
Out[2]:
Id int64
MSSubClass int64
MSZoning object
LotFrontage float64
LotArea int64
...
MoSold int64
YrSold int64
SaleType object
SaleCondition object
SalePrice int64
Length: 81, dtype: object
2. 결측치, 카테고리 변수 처리하기¶
In [3]:
Copied!
#속성별로 결측치가 몇 개인지 확인합니다.
df.isnull().sum().sort_values(ascending=False).head(20)
#속성별로 결측치가 몇 개인지 확인합니다.
df.isnull().sum().sort_values(ascending=False).head(20)
Out[3]:
PoolQC 1453 MiscFeature 1406 Alley 1369 Fence 1179 FireplaceQu 690 LotFrontage 259 GarageYrBlt 81 GarageCond 81 GarageType 81 GarageFinish 81 GarageQual 81 BsmtFinType2 38 BsmtExposure 38 BsmtQual 37 BsmtCond 37 BsmtFinType1 37 MasVnrArea 8 MasVnrType 8 Electrical 1 Id 0 dtype: int64
In [4]:
Copied!
#카테고리형 변수를 0과 1로 이루어진 변수로 바꾸어 줍니다.(12장 3절)
df = pd.get_dummies(df)
#결측치를 전체 칼럼의 평균으로 대체하여 채워줍니다.
df = df.fillna(df.mean())
#업데이트된 데이터프레임을 출력해 봅니다.
df
#카테고리형 변수를 0과 1로 이루어진 변수로 바꾸어 줍니다.(12장 3절)
df = pd.get_dummies(df)
#결측치를 전체 칼럼의 평균으로 대체하여 채워줍니다.
df = df.fillna(df.mean())
#업데이트된 데이터프레임을 출력해 봅니다.
df
Out[4]:
| Id | MSSubClass | LotFrontage | LotArea | OverallQual | OverallCond | YearBuilt | YearRemodAdd | MasVnrArea | BsmtFinSF1 | ... | SaleType_ConLw | SaleType_New | SaleType_Oth | SaleType_WD | SaleCondition_Abnorml | SaleCondition_AdjLand | SaleCondition_Alloca | SaleCondition_Family | SaleCondition_Normal | SaleCondition_Partial | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 1 | 60 | 65.0 | 8450 | 7 | 5 | 2003 | 2003 | 196.0 | 706 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
| 1 | 2 | 20 | 80.0 | 9600 | 6 | 8 | 1976 | 1976 | 0.0 | 978 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
| 2 | 3 | 60 | 68.0 | 11250 | 7 | 5 | 2001 | 2002 | 162.0 | 486 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
| 3 | 4 | 70 | 60.0 | 9550 | 7 | 5 | 1915 | 1970 | 0.0 | 216 | ... | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 |
| 4 | 5 | 60 | 84.0 | 14260 | 8 | 5 | 2000 | 2000 | 350.0 | 655 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1455 | 1456 | 60 | 62.0 | 7917 | 6 | 5 | 1999 | 2000 | 0.0 | 0 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
| 1456 | 1457 | 20 | 85.0 | 13175 | 6 | 6 | 1978 | 1988 | 119.0 | 790 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
| 1457 | 1458 | 70 | 66.0 | 9042 | 7 | 9 | 1941 | 2006 | 0.0 | 275 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
| 1458 | 1459 | 20 | 68.0 | 9717 | 5 | 6 | 1950 | 1996 | 0.0 | 49 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
| 1459 | 1460 | 20 | 75.0 | 9937 | 5 | 6 | 1965 | 1965 | 0.0 | 830 | ... | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 |
1460 rows × 290 columns
3. 속성별 관련도 추출하기¶
In [5]:
Copied!
#데이터 사이의 상관 관계를 저장합니다.
df_corr=df.corr()
#집 값과 관련이 큰 것부터 순서대로 저장합니다.
df_corr_sort=df_corr.sort_values('SalePrice', ascending=False)
#집 값과 관련도가 가장 큰 10개의 속성들을 출력합니다.
df_corr_sort['SalePrice'].head(10)
#데이터 사이의 상관 관계를 저장합니다.
df_corr=df.corr()
#집 값과 관련이 큰 것부터 순서대로 저장합니다.
df_corr_sort=df_corr.sort_values('SalePrice', ascending=False)
#집 값과 관련도가 가장 큰 10개의 속성들을 출력합니다.
df_corr_sort['SalePrice'].head(10)
Out[5]:
SalePrice 1.000000 OverallQual 0.790982 GrLivArea 0.708624 GarageCars 0.640409 GarageArea 0.623431 TotalBsmtSF 0.613581 1stFlrSF 0.605852 FullBath 0.560664 BsmtQual_Ex 0.553105 TotRmsAbvGrd 0.533723 Name: SalePrice, dtype: float64
In [6]:
Copied!
#집 값과 관련도가 가장 높은 속성들을 추출해서 상관도 그래프를 그려봅니다.
cols=['SalePrice','OverallQual','GrLivArea','GarageCars','GarageArea','TotalBsmtSF']
sns.pairplot(df[cols])
plt.show();
#집 값과 관련도가 가장 높은 속성들을 추출해서 상관도 그래프를 그려봅니다.
cols=['SalePrice','OverallQual','GrLivArea','GarageCars','GarageArea','TotalBsmtSF']
sns.pairplot(df[cols])
plt.show();
4. 주택 가격 예측 모델¶
In [7]:
Copied!
#집 값을 제외한 나머지 열을 저장합니다.
cols_train=['OverallQual','GrLivArea','GarageCars','GarageArea','TotalBsmtSF']
X_train_pre = df[cols_train]
#집 값을 저장합니다.
y = df['SalePrice'].values
#집 값을 제외한 나머지 열을 저장합니다.
cols_train=['OverallQual','GrLivArea','GarageCars','GarageArea','TotalBsmtSF']
X_train_pre = df[cols_train]
#집 값을 저장합니다.
y = df['SalePrice'].values
In [8]:
Copied!
#전체의 80%를 학습셋으로, 20%를 테스트셋으로 지정합니다.
X_train, X_test, y_train, y_test = train_test_split(X_train_pre, y, test_size=0.2)
#전체의 80%를 학습셋으로, 20%를 테스트셋으로 지정합니다.
X_train, X_test, y_train, y_test = train_test_split(X_train_pre, y, test_size=0.2)
In [9]:
Copied!
#모델의 구조를 설정합니다.
model = Sequential()
model.add(Dense(10, input_dim=X_train.shape[1], activation='relu'))
model.add(Dense(30, activation='relu'))
model.add(Dense(40, activation='relu'))
model.add(Dense(1))
model.summary()
#모델을 실행합니다.
model.compile(optimizer ='adam', loss = 'mean_squared_error')
# 20회 이상 결과가 향상되지 않으면 자동으로 중단되게끔 합니다.
early_stopping_callback = EarlyStopping(monitor='val_loss', patience=20)
# 모델의 이름을 정합니다.
modelpath="./data/model/Ch15-house.keras"
# 최적화 모델을 업데이트하고 저장합니다.
checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=0, save_best_only=True)
#실행 관련 설정을 하는 부분입니다. 전체의 20%를 검증셋으로 설정합니다.
history = model.fit(X_train, y_train, validation_split=0.25, epochs=2000, batch_size=32, callbacks=[early_stopping_callback, checkpointer])
#모델의 구조를 설정합니다.
model = Sequential()
model.add(Dense(10, input_dim=X_train.shape[1], activation='relu'))
model.add(Dense(30, activation='relu'))
model.add(Dense(40, activation='relu'))
model.add(Dense(1))
model.summary()
#모델을 실행합니다.
model.compile(optimizer ='adam', loss = 'mean_squared_error')
# 20회 이상 결과가 향상되지 않으면 자동으로 중단되게끔 합니다.
early_stopping_callback = EarlyStopping(monitor='val_loss', patience=20)
# 모델의 이름을 정합니다.
modelpath="./data/model/Ch15-house.keras"
# 최적화 모델을 업데이트하고 저장합니다.
checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=0, save_best_only=True)
#실행 관련 설정을 하는 부분입니다. 전체의 20%를 검증셋으로 설정합니다.
history = model.fit(X_train, y_train, validation_split=0.25, epochs=2000, batch_size=32, callbacks=[early_stopping_callback, checkpointer])
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense (Dense) (None, 10) 60 _________________________________________________________________ dense_1 (Dense) (None, 30) 330 _________________________________________________________________ dense_2 (Dense) (None, 40) 1240 _________________________________________________________________ dense_3 (Dense) (None, 1) 41 ================================================================= Total params: 1,671 Trainable params: 1,671 Non-trainable params: 0 _________________________________________________________________ Epoch 1/2000 28/28 [==============================] - 0s 5ms/step - loss: 39256875008.0000 - val_loss: 38050066432.0000 Epoch 2/2000 28/28 [==============================] - 0s 2ms/step - loss: 38973657088.0000 - val_loss: 37643145216.0000 Epoch 3/2000 28/28 [==============================] - 0s 2ms/step - loss: 38286217216.0000 - val_loss: 36586713088.0000 Epoch 4/2000 28/28 [==============================] - 0s 2ms/step - loss: 36632485888.0000 - val_loss: 34174711808.0000 Epoch 5/2000 28/28 [==============================] - 0s 2ms/step - loss: 33148624896.0000 - val_loss: 29543153664.0000 Epoch 6/2000 28/28 [==============================] - 0s 2ms/step - loss: 26954598400.0000 - val_loss: 22077181952.0000 Epoch 7/2000 28/28 [==============================] - 0s 2ms/step - loss: 18263263232.0000 - val_loss: 12826006528.0000 Epoch 8/2000 28/28 [==============================] - 0s 2ms/step - loss: 9259721728.0000 - val_loss: 5357407744.0000 Epoch 9/2000 28/28 [==============================] - 0s 2ms/step - loss: 3547030016.0000 - val_loss: 2336531712.0000 Epoch 10/2000 28/28 [==============================] - 0s 2ms/step - loss: 2140108800.0000 - val_loss: 2084260096.0000 Epoch 11/2000 28/28 [==============================] - 0s 2ms/step - loss: 2092065024.0000 - val_loss: 2082420480.0000 Epoch 12/2000 28/28 [==============================] - 0s 2ms/step - loss: 2087802368.0000 - val_loss: 2082359552.0000 Epoch 13/2000 28/28 [==============================] - 0s 2ms/step - loss: 2089032704.0000 - val_loss: 2079805568.0000 Epoch 14/2000 28/28 [==============================] - 0s 2ms/step - loss: 2084963968.0000 - val_loss: 2078715264.0000 Epoch 15/2000 28/28 [==============================] - 0s 2ms/step - loss: 2080833536.0000 - val_loss: 2075409024.0000 Epoch 16/2000 28/28 [==============================] - 0s 2ms/step - loss: 2077572480.0000 - val_loss: 2073420160.0000 Epoch 17/2000 28/28 [==============================] - 0s 2ms/step - loss: 2078156288.0000 - val_loss: 2072047488.0000 Epoch 18/2000 28/28 [==============================] - 0s 2ms/step - loss: 2071485568.0000 - val_loss: 2070525312.0000 Epoch 19/2000 28/28 [==============================] - 0s 2ms/step - loss: 2075272960.0000 - val_loss: 2069213440.0000 Epoch 20/2000 28/28 [==============================] - 0s 2ms/step - loss: 2063917184.0000 - val_loss: 2071647104.0000 Epoch 21/2000 28/28 [==============================] - 0s 2ms/step - loss: 2069193856.0000 - val_loss: 2065380480.0000 Epoch 22/2000 28/28 [==============================] - 0s 2ms/step - loss: 2065246592.0000 - val_loss: 2067953920.0000 Epoch 23/2000 28/28 [==============================] - 0s 2ms/step - loss: 2066002048.0000 - val_loss: 2062074112.0000 Epoch 24/2000 28/28 [==============================] - 0s 2ms/step - loss: 2055972096.0000 - val_loss: 2062223616.0000 Epoch 25/2000 28/28 [==============================] - 0s 2ms/step - loss: 2057389696.0000 - val_loss: 2059269504.0000 Epoch 26/2000 28/28 [==============================] - 0s 2ms/step - loss: 2049077760.0000 - val_loss: 2057606016.0000 Epoch 27/2000 28/28 [==============================] - 0s 2ms/step - loss: 2046871552.0000 - val_loss: 2056435968.0000 Epoch 28/2000 28/28 [==============================] - 0s 2ms/step - loss: 2045918080.0000 - val_loss: 2055096704.0000 Epoch 29/2000 28/28 [==============================] - 0s 2ms/step - loss: 2050693760.0000 - val_loss: 2053517568.0000 Epoch 30/2000 28/28 [==============================] - 0s 2ms/step - loss: 2041932032.0000 - val_loss: 2053052160.0000 Epoch 31/2000 28/28 [==============================] - 0s 2ms/step - loss: 2042496512.0000 - val_loss: 2052208384.0000 Epoch 32/2000 28/28 [==============================] - 0s 2ms/step - loss: 2038375808.0000 - val_loss: 2049638912.0000 Epoch 33/2000 28/28 [==============================] - 0s 2ms/step - loss: 2037641728.0000 - val_loss: 2048640768.0000 Epoch 34/2000 28/28 [==============================] - 0s 2ms/step - loss: 2031928960.0000 - val_loss: 2050563584.0000 Epoch 35/2000 28/28 [==============================] - 0s 2ms/step - loss: 2036310144.0000 - val_loss: 2047705600.0000 Epoch 36/2000 28/28 [==============================] - 0s 2ms/step - loss: 2029841536.0000 - val_loss: 2045090176.0000 Epoch 37/2000 28/28 [==============================] - 0s 2ms/step - loss: 2034075520.0000 - val_loss: 2045132544.0000 Epoch 38/2000 28/28 [==============================] - 0s 2ms/step - loss: 2025575040.0000 - val_loss: 2042395776.0000 Epoch 39/2000 28/28 [==============================] - 0s 2ms/step - loss: 2026456704.0000 - val_loss: 2041343104.0000 Epoch 40/2000 28/28 [==============================] - 0s 2ms/step - loss: 2022501504.0000 - val_loss: 2040617344.0000 Epoch 41/2000 28/28 [==============================] - 0s 2ms/step - loss: 2023640448.0000 - val_loss: 2039605376.0000 Epoch 42/2000 28/28 [==============================] - 0s 2ms/step - loss: 2022020224.0000 - val_loss: 2038057216.0000 Epoch 43/2000 28/28 [==============================] - 0s 2ms/step - loss: 2019413632.0000 - val_loss: 2037319680.0000 Epoch 44/2000 28/28 [==============================] - 0s 2ms/step - loss: 2022714368.0000 - val_loss: 2036136192.0000 Epoch 45/2000 28/28 [==============================] - 0s 2ms/step - loss: 2021474048.0000 - val_loss: 2035520128.0000 Epoch 46/2000 28/28 [==============================] - 0s 2ms/step - loss: 2013154048.0000 - val_loss: 2034018432.0000 Epoch 47/2000 28/28 [==============================] - 0s 2ms/step - loss: 2020812032.0000 - val_loss: 2038613120.0000 Epoch 48/2000 28/28 [==============================] - 0s 2ms/step - loss: 2020621824.0000 - val_loss: 2034382848.0000 Epoch 49/2000 28/28 [==============================] - 0s 2ms/step - loss: 2009394560.0000 - val_loss: 2032808704.0000 Epoch 50/2000 28/28 [==============================] - 0s 2ms/step - loss: 2014462080.0000 - val_loss: 2031219200.0000 Epoch 51/2000 28/28 [==============================] - 0s 2ms/step - loss: 2012724992.0000 - val_loss: 2030108032.0000 Epoch 52/2000 28/28 [==============================] - 0s 2ms/step - loss: 2003490176.0000 - val_loss: 2029069568.0000 Epoch 53/2000 28/28 [==============================] - 0s 2ms/step - loss: 2005520256.0000 - val_loss: 2028788992.0000 Epoch 54/2000 28/28 [==============================] - 0s 2ms/step - loss: 1996721024.0000 - val_loss: 2027076480.0000 Epoch 55/2000 28/28 [==============================] - 0s 2ms/step - loss: 1996136064.0000 - val_loss: 2026162432.0000 Epoch 56/2000 28/28 [==============================] - 0s 2ms/step - loss: 2000532736.0000 - val_loss: 2028895744.0000 Epoch 57/2000 28/28 [==============================] - 0s 2ms/step - loss: 1997962240.0000 - val_loss: 2025465088.0000 Epoch 58/2000 28/28 [==============================] - 0s 2ms/step - loss: 1994542080.0000 - val_loss: 2025333120.0000 Epoch 59/2000 28/28 [==============================] - 0s 2ms/step - loss: 1996151552.0000 - val_loss: 2023987584.0000 Epoch 60/2000 28/28 [==============================] - 0s 2ms/step - loss: 1991216896.0000 - val_loss: 2022501120.0000 Epoch 61/2000 28/28 [==============================] - 0s 2ms/step - loss: 1993646848.0000 - val_loss: 2021855616.0000 Epoch 62/2000 28/28 [==============================] - 0s 2ms/step - loss: 2001903488.0000 - val_loss: 2023514624.0000 Epoch 63/2000 28/28 [==============================] - 0s 2ms/step - loss: 1992752512.0000 - val_loss: 2020616320.0000 Epoch 64/2000 28/28 [==============================] - 0s 2ms/step - loss: 1985111168.0000 - val_loss: 2019932416.0000 Epoch 65/2000 28/28 [==============================] - 0s 2ms/step - loss: 1983430528.0000 - val_loss: 2019394176.0000 Epoch 66/2000 28/28 [==============================] - 0s 2ms/step - loss: 1982655360.0000 - val_loss: 2018924288.0000 Epoch 67/2000 28/28 [==============================] - 0s 2ms/step - loss: 1983551360.0000 - val_loss: 2018926464.0000 Epoch 68/2000 28/28 [==============================] - 0s 2ms/step - loss: 1984211456.0000 - val_loss: 2017812608.0000 Epoch 69/2000 28/28 [==============================] - 0s 2ms/step - loss: 1986807296.0000 - val_loss: 2018194816.0000 Epoch 70/2000 28/28 [==============================] - 0s 2ms/step - loss: 1979727360.0000 - val_loss: 2017011072.0000 Epoch 71/2000 28/28 [==============================] - 0s 2ms/step - loss: 1977666048.0000 - val_loss: 2016382976.0000 Epoch 72/2000 28/28 [==============================] - 0s 2ms/step - loss: 1980729600.0000 - val_loss: 2015734528.0000 Epoch 73/2000 28/28 [==============================] - 0s 2ms/step - loss: 1976288256.0000 - val_loss: 2015277312.0000 Epoch 74/2000 28/28 [==============================] - 0s 2ms/step - loss: 1973532672.0000 - val_loss: 2014985728.0000 Epoch 75/2000 28/28 [==============================] - 0s 2ms/step - loss: 1977727488.0000 - val_loss: 2015338880.0000 Epoch 76/2000 28/28 [==============================] - 0s 2ms/step - loss: 1977571200.0000 - val_loss: 2014164096.0000 Epoch 77/2000 28/28 [==============================] - 0s 2ms/step - loss: 1978516480.0000 - val_loss: 2015382656.0000 Epoch 78/2000 28/28 [==============================] - 0s 2ms/step - loss: 1970997888.0000 - val_loss: 2015171456.0000 Epoch 79/2000 28/28 [==============================] - 0s 2ms/step - loss: 1971897856.0000 - val_loss: 2017903232.0000 Epoch 80/2000 28/28 [==============================] - 0s 2ms/step - loss: 1971577088.0000 - val_loss: 2012867328.0000 Epoch 81/2000 28/28 [==============================] - 0s 2ms/step - loss: 1975466496.0000 - val_loss: 2012767488.0000 Epoch 82/2000 28/28 [==============================] - 0s 2ms/step - loss: 1975942400.0000 - val_loss: 2012589952.0000 Epoch 83/2000 28/28 [==============================] - 0s 2ms/step - loss: 1968400384.0000 - val_loss: 2013249024.0000 Epoch 84/2000 28/28 [==============================] - 0s 2ms/step - loss: 1971393408.0000 - val_loss: 2014398720.0000 Epoch 85/2000 28/28 [==============================] - 0s 2ms/step - loss: 1968194304.0000 - val_loss: 2011679872.0000 Epoch 86/2000 28/28 [==============================] - 0s 2ms/step - loss: 1971006848.0000 - val_loss: 2014609664.0000 Epoch 87/2000 28/28 [==============================] - 0s 2ms/step - loss: 1962764160.0000 - val_loss: 2011645056.0000 Epoch 88/2000 28/28 [==============================] - 0s 2ms/step - loss: 1971817600.0000 - val_loss: 2013581952.0000 Epoch 89/2000 28/28 [==============================] - 0s 2ms/step - loss: 1964001408.0000 - val_loss: 2012368128.0000 Epoch 90/2000 28/28 [==============================] - 0s 2ms/step - loss: 1963850624.0000 - val_loss: 2010386816.0000 Epoch 91/2000 28/28 [==============================] - 0s 2ms/step - loss: 1976837888.0000 - val_loss: 2011142016.0000 Epoch 92/2000 28/28 [==============================] - 0s 2ms/step - loss: 1967519104.0000 - val_loss: 2011235200.0000 Epoch 93/2000 28/28 [==============================] - 0s 2ms/step - loss: 1974522752.0000 - val_loss: 2014482432.0000 Epoch 94/2000 28/28 [==============================] - 0s 2ms/step - loss: 1961725312.0000 - val_loss: 2009963264.0000 Epoch 95/2000 28/28 [==============================] - 0s 2ms/step - loss: 1991654784.0000 - val_loss: 2010047872.0000 Epoch 96/2000 28/28 [==============================] - 0s 2ms/step - loss: 1968258816.0000 - val_loss: 2011137536.0000 Epoch 97/2000 28/28 [==============================] - 0s 2ms/step - loss: 1964474240.0000 - val_loss: 2014822400.0000 Epoch 98/2000 28/28 [==============================] - 0s 2ms/step - loss: 1959593984.0000 - val_loss: 2009040640.0000 Epoch 99/2000 28/28 [==============================] - 0s 2ms/step - loss: 1972654208.0000 - val_loss: 2012499712.0000 Epoch 100/2000 28/28 [==============================] - 0s 2ms/step - loss: 1961606784.0000 - val_loss: 2008893824.0000 Epoch 101/2000 28/28 [==============================] - 0s 2ms/step - loss: 1957312640.0000 - val_loss: 2009353344.0000 Epoch 102/2000 28/28 [==============================] - 0s 2ms/step - loss: 1966720384.0000 - val_loss: 2013813504.0000 Epoch 103/2000 28/28 [==============================] - 0s 2ms/step - loss: 1965130752.0000 - val_loss: 2008940800.0000 Epoch 104/2000 28/28 [==============================] - 0s 2ms/step - loss: 1961052288.0000 - val_loss: 2008467712.0000 Epoch 105/2000 28/28 [==============================] - 0s 2ms/step - loss: 1959811712.0000 - val_loss: 2008732544.0000 Epoch 106/2000 28/28 [==============================] - 0s 2ms/step - loss: 1955101568.0000 - val_loss: 2008449664.0000 Epoch 107/2000 28/28 [==============================] - 0s 2ms/step - loss: 1955417088.0000 - val_loss: 2008361088.0000 Epoch 108/2000 28/28 [==============================] - 0s 2ms/step - loss: 1961396096.0000 - val_loss: 2008428544.0000 Epoch 109/2000 28/28 [==============================] - 0s 2ms/step - loss: 1955363840.0000 - val_loss: 2009443328.0000 Epoch 110/2000 28/28 [==============================] - 0s 2ms/step - loss: 1962255616.0000 - val_loss: 2012226560.0000 Epoch 111/2000 28/28 [==============================] - 0s 2ms/step - loss: 1971003264.0000 - val_loss: 2008275584.0000 Epoch 112/2000 28/28 [==============================] - 0s 2ms/step - loss: 1982744064.0000 - val_loss: 2008528000.0000 Epoch 113/2000 28/28 [==============================] - 0s 2ms/step - loss: 1957559424.0000 - val_loss: 2008099072.0000 Epoch 114/2000 28/28 [==============================] - 0s 2ms/step - loss: 1955127168.0000 - val_loss: 2008033536.0000 Epoch 115/2000 28/28 [==============================] - 0s 2ms/step - loss: 1961434752.0000 - val_loss: 2014499712.0000 Epoch 116/2000 28/28 [==============================] - 0s 2ms/step - loss: 1957375232.0000 - val_loss: 2011667072.0000 Epoch 117/2000 28/28 [==============================] - 0s 2ms/step - loss: 1953398272.0000 - val_loss: 2007985792.0000 Epoch 118/2000 28/28 [==============================] - 0s 2ms/step - loss: 1960678400.0000 - val_loss: 2007955072.0000 Epoch 119/2000 28/28 [==============================] - 0s 2ms/step - loss: 1960155264.0000 - val_loss: 2009542016.0000 Epoch 120/2000 28/28 [==============================] - 0s 2ms/step - loss: 1954212864.0000 - val_loss: 2008314368.0000 Epoch 121/2000 28/28 [==============================] - 0s 2ms/step - loss: 1952613760.0000 - val_loss: 2008600960.0000 Epoch 122/2000 28/28 [==============================] - 0s 2ms/step - loss: 1955046656.0000 - val_loss: 2010091008.0000 Epoch 123/2000 28/28 [==============================] - 0s 2ms/step - loss: 1951067776.0000 - val_loss: 2008035200.0000 Epoch 124/2000 28/28 [==============================] - 0s 2ms/step - loss: 1956455040.0000 - val_loss: 2009811840.0000 Epoch 125/2000 28/28 [==============================] - 0s 2ms/step - loss: 1953920384.0000 - val_loss: 2007924096.0000 Epoch 126/2000 28/28 [==============================] - 0s 2ms/step - loss: 1967028224.0000 - val_loss: 2009794944.0000 Epoch 127/2000 28/28 [==============================] - 0s 2ms/step - loss: 1965339904.0000 - val_loss: 2009009280.0000 Epoch 128/2000 28/28 [==============================] - 0s 2ms/step - loss: 1967171328.0000 - val_loss: 2007939968.0000 Epoch 129/2000 28/28 [==============================] - 0s 2ms/step - loss: 1953992320.0000 - val_loss: 2012665984.0000 Epoch 130/2000
28/28 [==============================] - 0s 2ms/step - loss: 1948087168.0000 - val_loss: 2010243456.0000 Epoch 131/2000 28/28 [==============================] - 0s 2ms/step - loss: 1952553728.0000 - val_loss: 2007966208.0000 Epoch 132/2000 28/28 [==============================] - 0s 2ms/step - loss: 1960191232.0000 - val_loss: 2008038016.0000 Epoch 133/2000 28/28 [==============================] - 0s 2ms/step - loss: 1950433536.0000 - val_loss: 2008432896.0000 Epoch 134/2000 28/28 [==============================] - 0s 2ms/step - loss: 1951147392.0000 - val_loss: 2009483520.0000 Epoch 135/2000 28/28 [==============================] - 0s 2ms/step - loss: 1963911168.0000 - val_loss: 2009271424.0000 Epoch 136/2000 28/28 [==============================] - 0s 2ms/step - loss: 1978464896.0000 - val_loss: 2018575488.0000 Epoch 137/2000 28/28 [==============================] - 0s 2ms/step - loss: 1978488320.0000 - val_loss: 2014206592.0000 Epoch 138/2000 28/28 [==============================] - 0s 2ms/step - loss: 1946758272.0000 - val_loss: 2016159616.0000 Epoch 139/2000 28/28 [==============================] - 0s 2ms/step - loss: 1957642880.0000 - val_loss: 2008652416.0000 Epoch 140/2000 28/28 [==============================] - 0s 2ms/step - loss: 1950573440.0000 - val_loss: 2008302208.0000 Epoch 141/2000 28/28 [==============================] - 0s 2ms/step - loss: 1952018304.0000 - val_loss: 2008199040.0000 Epoch 142/2000 28/28 [==============================] - 0s 2ms/step - loss: 1948704000.0000 - val_loss: 2009249536.0000 Epoch 143/2000 28/28 [==============================] - 0s 2ms/step - loss: 1947969280.0000 - val_loss: 2008606336.0000 Epoch 144/2000 28/28 [==============================] - 0s 2ms/step - loss: 1948913408.0000 - val_loss: 2009052544.0000 Epoch 145/2000 28/28 [==============================] - 0s 2ms/step - loss: 1962943104.0000 - val_loss: 2011970944.0000
In [10]:
Copied!
# 예측 값과 실제 값, 실행 번호가 들어갈 빈 리스트를 만듭니다.
real_prices =[]
pred_prices = []
X_num = []
# 25개의 샘플을 뽑아 실제 값, 예측 값을 출력해 봅니다.
n_iter = 0
Y_prediction = model.predict(X_test).flatten()
for i in range(25):
real = y_test[i]
prediction = Y_prediction[i]
print("실제가격: {:.2f}, 예상가격: {:.2f}".format(real, prediction))
real_prices.append(real)
pred_prices.append(prediction)
n_iter = n_iter + 1
X_num.append(n_iter)
# 예측 값과 실제 값, 실행 번호가 들어갈 빈 리스트를 만듭니다.
real_prices =[]
pred_prices = []
X_num = []
# 25개의 샘플을 뽑아 실제 값, 예측 값을 출력해 봅니다.
n_iter = 0
Y_prediction = model.predict(X_test).flatten()
for i in range(25):
real = y_test[i]
prediction = Y_prediction[i]
print("실제가격: {:.2f}, 예상가격: {:.2f}".format(real, prediction))
real_prices.append(real)
pred_prices.append(prediction)
n_iter = n_iter + 1
X_num.append(n_iter)
실제가격: 262500.00, 예상가격: 240051.36 실제가격: 78000.00, 예상가격: 118369.56 실제가격: 181000.00, 예상가격: 184155.28 실제가격: 118000.00, 예상가격: 130422.16 실제가격: 142953.00, 예상가격: 178254.34 실제가격: 189000.00, 예상가격: 238503.16 실제가격: 185900.00, 예상가격: 178877.31 실제가격: 264561.00, 예상가격: 229130.41 실제가격: 157000.00, 예상가격: 170969.47 실제가격: 205000.00, 예상가격: 199601.78 실제가격: 109900.00, 예상가격: 97085.41 실제가격: 173900.00, 예상가격: 176588.67 실제가격: 97000.00, 예상가격: 78377.69 실제가격: 237000.00, 예상가격: 210369.81 실제가격: 192500.00, 예상가격: 201920.70 실제가격: 212000.00, 예상가격: 199950.33 실제가격: 80000.00, 예상가격: 95282.45 실제가격: 180500.00, 예상가격: 203932.06 실제가격: 135000.00, 예상가격: 229143.97 실제가격: 197000.00, 예상가격: 177454.97 실제가격: 227000.00, 예상가격: 248955.09 실제가격: 180000.00, 예상가격: 154568.53 실제가격: 381000.00, 예상가격: 328032.09 실제가격: 127000.00, 예상가격: 116693.46 실제가격: 485000.00, 예상가격: 357789.88
In [11]:
Copied!
#그래프를 통해 샘플로 뽑은 25개의 값을 비교해 봅니다.
plt.plot(X_num, pred_prices, label='predicted price')
plt.plot(X_num, real_prices, label='real price')
plt.legend()
plt.show()
#그래프를 통해 샘플로 뽑은 25개의 값을 비교해 봅니다.
plt.plot(X_num, pred_prices, label='predicted price')
plt.plot(X_num, real_prices, label='real price')
plt.legend()
plt.show()