Difference makes the DIFFERENCE
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
%matplotlib inline
df = pd.read_csv('/content/fake_reg.csv')
df.head()
price | feature1 | feature2 | |
---|---|---|---|
0 | 461.527929 | 999.787558 | 999.766096 |
1 | 548.130011 | 998.861615 | 1001.042403 |
2 | 410.297162 | 1000.070267 | 998.844015 |
3 | 540.382220 | 999.952251 | 1000.440940 |
4 | 546.024553 | 1000.446011 | 1000.338531 |
# consider this as regression problem, where
# based on feature 1 and feature2, we need to predict the price
sns.pairplot(df)
<seaborn.axisgrid.PairGrid at 0x7fa2d26a5ed0>
# create test, train split
from sklearn.model_selection import train_test_split
# and convert the dataset into values because tensorflow dont accept pandas data frame or series
X = df[['feature1', 'feature2']].values
y = df['price'].values
X
array([[ 999.78755752, 999.7660962 ], [ 998.86161491, 1001.04240315], [1000.07026691, 998.84401463], ..., [1001.45164617, 998.84760554], [1000.77102275, 998.56285086], [ 999.2322436 , 1001.45140713]])
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.30, random_state=42)
X_train.shape
(700, 2)
X_test.shape
(300, 2)
# normalise or scale the dataset
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
# help(MinMaxScaler)
# if requred, read throug the help section by typing 'help(MinMaxScalar)
# No need to scale the label but the features only
scaler.fit(X_train)
MinMaxScaler()
X_train = scaler.transform(X_train)
X_test = scaler.transform(X_test)
X_train.min()
0.0
X_train.max()
1.0
X_train
array([[0.74046017, 0.32583248], [0.43166001, 0.2555088 ], [0.18468554, 0.70500664], ..., [0.54913363, 0.79933822], [0.2834197 , 0.38818708], [0.56282703, 0.42371827]])
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
model = Sequential()
model.add(Dense(4, activation='relu'))
model.add(Dense(4, activation = "relu"))
model.add(Dense(4, activation = "relu"))
model.add(Dense(1))
model.compile(optimizer="rmsprop", loss='mse')
model.fit(x=X_train, y=y_train, epochs = 250)
Epoch 1/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2021 Epoch 2/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2882 Epoch 3/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5449 Epoch 4/250 22/22 [==============================] - 0s 2ms/step - loss: 23.4926 Epoch 5/250 22/22 [==============================] - 0s 3ms/step - loss: 24.6201 Epoch 6/250 22/22 [==============================] - 0s 3ms/step - loss: 23.7742 Epoch 7/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3183 Epoch 8/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1381 Epoch 9/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3549 Epoch 10/250 22/22 [==============================] - 0s 4ms/step - loss: 24.3110 Epoch 11/250 22/22 [==============================] - 0s 3ms/step - loss: 24.6376 Epoch 12/250 22/22 [==============================] - 0s 3ms/step - loss: 24.0384 Epoch 13/250 22/22 [==============================] - 0s 3ms/step - loss: 24.1267 Epoch 14/250 22/22 [==============================] - 0s 3ms/step - loss: 24.2055 Epoch 15/250 22/22 [==============================] - 0s 3ms/step - loss: 24.7081 Epoch 16/250 22/22 [==============================] - 0s 3ms/step - loss: 24.0606 Epoch 17/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1664 Epoch 18/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3561 Epoch 19/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3632 Epoch 20/250 22/22 [==============================] - 0s 3ms/step - loss: 24.4066 Epoch 21/250 22/22 [==============================] - 0s 3ms/step - loss: 24.0745 Epoch 22/250 22/22 [==============================] - 0s 3ms/step - loss: 24.1912 Epoch 23/250 22/22 [==============================] - 0s 3ms/step - loss: 24.4251 Epoch 24/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4877 Epoch 25/250 22/22 [==============================] - 0s 3ms/step - loss: 24.2361 Epoch 26/250 22/22 [==============================] - 0s 2ms/step - loss: 23.9286 Epoch 27/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2013 Epoch 28/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4589 Epoch 29/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4260 Epoch 30/250 22/22 [==============================] - 0s 2ms/step - loss: 23.9780 Epoch 31/250 22/22 [==============================] - 0s 4ms/step - loss: 24.1774 Epoch 32/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3303 Epoch 33/250 22/22 [==============================] - 0s 3ms/step - loss: 24.2686 Epoch 34/250 22/22 [==============================] - 0s 3ms/step - loss: 24.5968 Epoch 35/250 22/22 [==============================] - 0s 3ms/step - loss: 24.2579 Epoch 36/250 22/22 [==============================] - 0s 3ms/step - loss: 24.4497 Epoch 37/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3964 Epoch 38/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4294 Epoch 39/250 22/22 [==============================] - 0s 3ms/step - loss: 24.4171 Epoch 40/250 22/22 [==============================] - 0s 3ms/step - loss: 24.5096 Epoch 41/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1096 Epoch 42/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5023 Epoch 43/250 22/22 [==============================] - 0s 3ms/step - loss: 24.7560 Epoch 44/250 22/22 [==============================] - 0s 3ms/step - loss: 24.4277 Epoch 45/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3015 Epoch 46/250 22/22 [==============================] - 0s 3ms/step - loss: 24.2988 Epoch 47/250 22/22 [==============================] - 0s 3ms/step - loss: 24.4406 Epoch 48/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3641 Epoch 49/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3656 Epoch 50/250 22/22 [==============================] - 0s 2ms/step - loss: 24.8987 Epoch 51/250 22/22 [==============================] - 0s 3ms/step - loss: 24.0434 Epoch 52/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3114 Epoch 53/250 22/22 [==============================] - 0s 3ms/step - loss: 24.0254 Epoch 54/250 22/22 [==============================] - 0s 3ms/step - loss: 24.4083 Epoch 55/250 22/22 [==============================] - 0s 3ms/step - loss: 24.4475 Epoch 56/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3145 Epoch 57/250 22/22 [==============================] - 0s 5ms/step - loss: 24.4794 Epoch 58/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3725 Epoch 59/250 22/22 [==============================] - 0s 3ms/step - loss: 24.6375 Epoch 60/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5955 Epoch 61/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4699 Epoch 62/250 22/22 [==============================] - 0s 3ms/step - loss: 23.9523 Epoch 63/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4700 Epoch 64/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3923 Epoch 65/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0723 Epoch 66/250 22/22 [==============================] - 0s 2ms/step - loss: 23.8749 Epoch 67/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4307 Epoch 68/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1323 Epoch 69/250 22/22 [==============================] - 0s 3ms/step - loss: 24.2195 Epoch 70/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2206 Epoch 71/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5046 Epoch 72/250 22/22 [==============================] - 0s 3ms/step - loss: 24.3642 Epoch 73/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2649 Epoch 74/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3417 Epoch 75/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3061 Epoch 76/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3012 Epoch 77/250 22/22 [==============================] - 0s 3ms/step - loss: 24.2340 Epoch 78/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2617 Epoch 79/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4010 Epoch 80/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1290 Epoch 81/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0816 Epoch 82/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2827 Epoch 83/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1365 Epoch 84/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2153 Epoch 85/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2936 Epoch 86/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2648 Epoch 87/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0684 Epoch 88/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5049 Epoch 89/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2773 Epoch 90/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3830 Epoch 91/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2693 Epoch 92/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2628 Epoch 93/250 22/22 [==============================] - 0s 1ms/step - loss: 24.3257 Epoch 94/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2392 Epoch 95/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5691 Epoch 96/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3787 Epoch 97/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2215 Epoch 98/250 22/22 [==============================] - 0s 2ms/step - loss: 23.8202 Epoch 99/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1144 Epoch 100/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2367 Epoch 101/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1516 Epoch 102/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2913 Epoch 103/250 22/22 [==============================] - 0s 2ms/step - loss: 24.7692 Epoch 104/250 22/22 [==============================] - 0s 1ms/step - loss: 24.1539 Epoch 105/250 22/22 [==============================] - 0s 1ms/step - loss: 23.8611 Epoch 106/250 22/22 [==============================] - 0s 2ms/step - loss: 23.9971 Epoch 107/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1035 Epoch 108/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3654 Epoch 109/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0588 Epoch 110/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4332 Epoch 111/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2540 Epoch 112/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2551 Epoch 113/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5121 Epoch 114/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6285 Epoch 115/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5548 Epoch 116/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5045 Epoch 117/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5942 Epoch 118/250 22/22 [==============================] - 0s 2ms/step - loss: 23.6348 Epoch 119/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5779 Epoch 120/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6205 Epoch 121/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4149 Epoch 122/250 22/22 [==============================] - 0s 2ms/step - loss: 23.8001 Epoch 123/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4824 Epoch 124/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1463 Epoch 125/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2978 Epoch 126/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2441 Epoch 127/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3653 Epoch 128/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3846 Epoch 129/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2313 Epoch 130/250 22/22 [==============================] - 0s 1ms/step - loss: 24.5292 Epoch 131/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0321 Epoch 132/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1436 Epoch 133/250 22/22 [==============================] - 0s 1ms/step - loss: 24.3465 Epoch 134/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6819 Epoch 135/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0843 Epoch 136/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1636 Epoch 137/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5991 Epoch 138/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5355 Epoch 139/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2291 Epoch 140/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4753 Epoch 141/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3483 Epoch 142/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1712 Epoch 143/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6259 Epoch 144/250 22/22 [==============================] - 0s 2ms/step - loss: 24.8681 Epoch 145/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3802 Epoch 146/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4746 Epoch 147/250 22/22 [==============================] - 0s 2ms/step - loss: 23.9587 Epoch 148/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3929 Epoch 149/250 22/22 [==============================] - 0s 2ms/step - loss: 23.9384 Epoch 150/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5323 Epoch 151/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4623 Epoch 152/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6169 Epoch 153/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4987 Epoch 154/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1072 Epoch 155/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1062 Epoch 156/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1921 Epoch 157/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0414 Epoch 158/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3024 Epoch 159/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2404 Epoch 160/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3603 Epoch 161/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0775 Epoch 162/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3982 Epoch 163/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4710 Epoch 164/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2896 Epoch 165/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3714 Epoch 166/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2259 Epoch 167/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6570 Epoch 168/250 22/22 [==============================] - 0s 2ms/step - loss: 23.9927 Epoch 169/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6605 Epoch 170/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3853 Epoch 171/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1116 Epoch 172/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1977 Epoch 173/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6523 Epoch 174/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1437 Epoch 175/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5628 Epoch 176/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1733 Epoch 177/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1545 Epoch 178/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6782 Epoch 179/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5265 Epoch 180/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5222 Epoch 181/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2190 Epoch 182/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0897 Epoch 183/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5576 Epoch 184/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5399 Epoch 185/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3545 Epoch 186/250 22/22 [==============================] - 0s 2ms/step - loss: 23.9272 Epoch 187/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1280 Epoch 188/250 22/22 [==============================] - 0s 2ms/step - loss: 24.8770 Epoch 189/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5467 Epoch 190/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3382 Epoch 191/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4816 Epoch 192/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1703 Epoch 193/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5132 Epoch 194/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4400 Epoch 195/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2927 Epoch 196/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1765 Epoch 197/250 22/22 [==============================] - 0s 2ms/step - loss: 24.7389 Epoch 198/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5436 Epoch 199/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4114 Epoch 200/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5023 Epoch 201/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3604 Epoch 202/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1969 Epoch 203/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2007 Epoch 204/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3450 Epoch 205/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6767 Epoch 206/250 22/22 [==============================] - 0s 2ms/step - loss: 23.9474 Epoch 207/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4251 Epoch 208/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2889 Epoch 209/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0246 Epoch 210/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0154 Epoch 211/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5941 Epoch 212/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1882 Epoch 213/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2154 Epoch 214/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3963 Epoch 215/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0128 Epoch 216/250 22/22 [==============================] - 0s 3ms/step - loss: 24.6367 Epoch 217/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2675 Epoch 218/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5879 Epoch 219/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2869 Epoch 220/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2627 Epoch 221/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1074 Epoch 222/250 22/22 [==============================] - 0s 2ms/step - loss: 24.6439 Epoch 223/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4381 Epoch 224/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5369 Epoch 225/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1022 Epoch 226/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2265 Epoch 227/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1496 Epoch 228/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1179 Epoch 229/250 22/22 [==============================] - 0s 2ms/step - loss: 24.5185 Epoch 230/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3337 Epoch 231/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1804 Epoch 232/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2180 Epoch 233/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2324 Epoch 234/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2262 Epoch 235/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0912 Epoch 236/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4821 Epoch 237/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3106 Epoch 238/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4344 Epoch 239/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4802 Epoch 240/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4350 Epoch 241/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1894 Epoch 242/250 22/22 [==============================] - 0s 2ms/step - loss: 24.1196 Epoch 243/250 22/22 [==============================] - 0s 2ms/step - loss: 24.0202 Epoch 244/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3948 Epoch 245/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2129 Epoch 246/250 22/22 [==============================] - 0s 2ms/step - loss: 24.7444 Epoch 247/250 22/22 [==============================] - 0s 2ms/step - loss: 24.3731 Epoch 248/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2083 Epoch 249/250 22/22 [==============================] - 0s 2ms/step - loss: 24.4584 Epoch 250/250 22/22 [==============================] - 0s 2ms/step - loss: 24.2388
<keras.callbacks.History at 0x7fa25bc759d0>
model.history.history
{'loss': [256572.046875, 256367.515625, 256177.96875, 255969.921875, 255740.828125, 255486.265625, 255206.03125, 254901.25, 254568.4375, 254202.859375, 253800.84375, 253350.265625, 252851.265625, 252299.71875, 251694.015625, 251027.15625, 250294.75, 249493.078125, 248619.171875, 247676.390625, 246652.25, 245534.1875, 244333.1875, 243036.234375, 241640.4375, 240145.84375, 238531.9375, 236814.5625, 234994.671875, 233047.9375, 230978.90625, 228776.609375, 226436.796875, 223975.046875, 221366.1875, 218615.90625, 215713.890625, 212662.5625, 209457.6875, 206100.953125, 202581.140625, 198914.28125, 195057.984375, 191074.40625, 186926.78125, 182605.71875, 178122.71875, 173493.625, 168702.921875, 163747.59375, 158636.171875, 153380.859375, 147997.640625, 142461.8125, 136816.359375, 131068.8359375, 125187.2421875, 119193.6796875, 113158.640625, 107010.2421875, 100834.1484375, 94595.5546875, 88363.609375, 82119.484375, 75879.0859375, 69665.140625, 63536.6328125, 57533.8515625, 51664.30859375, 45917.84375, 40393.5078125, 35064.0078125, 30006.33203125, 25265.31640625, 20843.11328125, 16854.552734375, 13272.578125, 10164.947265625, 7588.80712890625, 5602.05810546875, 4188.60546875, 3337.8125, 2982.46533203125, 2896.08154296875, 2854.520263671875, 2815.706787109375, 2771.947998046875, 2732.728515625, 2689.320068359375, 2647.334228515625, 2610.560791015625, 2574.21923828125, 2532.7578125, 2497.965576171875, 2460.290771484375, 2417.96435546875, 2381.38134765625, 2346.566650390625, 2312.5732421875, 2273.019287109375, 2241.18212890625, 2203.78173828125, 2164.943359375, 2135.482421875, 2098.81396484375, 2063.52783203125, 2029.579345703125, 1992.4945068359375, 1955.4969482421875, 1922.6810302734375, 1889.75048828125, 1857.3846435546875, 1818.62353515625, 1785.3614501953125, 1750.5054931640625, 1718.0311279296875, 1683.3194580078125, 1649.578369140625, 1614.3935546875, 1575.2589111328125, 1544.287841796875, 1510.8096923828125, 1475.779052734375, 1443.775390625, 1414.1778564453125, 1380.3551025390625, 1344.8077392578125, 1312.089111328125, 1279.7847900390625, 1248.6295166015625, 1217.0859375, 1180.9456787109375, 1155.2806396484375, 1125.015380859375, 1099.8720703125, 1070.0064697265625, 1041.020751953125, 1014.7119750976562, 986.2686767578125, 959.3794555664062, 935.0941772460938, 909.951904296875, 883.1526489257812, 855.6653442382812, 828.5133056640625, 801.3621215820312, 774.6409912109375, 747.5872192382812, 722.4879760742188, 697.6157836914062, 677.3233032226562, 652.6466674804688, 626.4276733398438, 601.671630859375, 580.1514282226562, 558.9360961914062, 539.7982788085938, 518.058837890625, 494.1725769042969, 473.908203125, 450.6574401855469, 427.4228210449219, 407.8333435058594, 385.520263671875, 369.1092834472656, 346.7403259277344, 334.2724609375, 315.586181640625, 299.3265380859375, 282.21661376953125, 265.4549255371094, 250.22999572753906, 236.50067138671875, 221.37860107421875, 207.23692321777344, 192.8123016357422, 181.2073974609375, 169.5457305908203, 158.15614318847656, 146.18380737304688, 134.43270874023438, 124.99821472167969, 116.7092514038086, 107.42877197265625, 98.04393768310547, 89.8097915649414, 80.06987762451172, 74.47185516357422, 68.15503692626953, 61.96371078491211, 56.354034423828125, 52.5781135559082, 48.67445373535156, 45.53632354736328, 42.20598602294922, 39.4415283203125, 36.46059799194336, 34.38557434082031, 32.91263198852539, 31.126068115234375, 29.82269287109375, 29.08582878112793, 27.707895278930664, 26.97379493713379, 26.495267868041992, 26.45466423034668, 25.580915451049805, 25.453462600708008, 25.348770141601562, 24.757976531982422, 25.330747604370117, 24.738906860351562, 24.67300796508789, 24.469894409179688, 24.374359130859375, 24.60072898864746, 24.245256423950195, 24.414562225341797, 24.620513916015625, 24.377910614013672, 24.04123878479004, 24.57693099975586, 24.388919830322266, 24.219173431396484, 24.378971099853516, 24.30501365661621, 24.48757553100586, 24.383054733276367, 24.285554885864258, 24.646129608154297, 24.462970733642578, 24.12499237060547, 24.14937973022461, 24.333009719848633, 24.554513931274414, 24.275711059570312, 24.358694076538086, 24.228403091430664, 24.289846420288086, 24.76703453063965, 24.149415969848633, 24.046483993530273, 24.071815490722656, 23.92632293701172, 24.532142639160156, 24.749914169311523, 24.25486946105957, 24.304670333862305, 24.236284255981445, 23.766464233398438]}
loss_df = pd.DataFrame(model.history.history)
loss_df.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7fa25bddc850>
model.evaluate(X_test, y_test, verbose = 0)
25.009714126586914
model.evaluate(X_train,y_train, verbose = 3 )
24.01677703857422
test_predictions = model.predict(X_test)
test_predictions
array([[405.06985], [623.33136], [591.92316], [572.01245], [366.38934], [578.968 ], [514.83344], [458.8928 ], [549.03033], [447.30484], [611.57715], [548.73895], [418.88043], [408.64896], [651.0408 ], [437.08276], [508.2425 ], [659.7209 ], [662.3756 ], [565.32855], [334.00137], [444.61594], [382.30347], [378.53406], [566.3838 ], [610.38074], [532.1772 ], [427.64233], [655.23486], [413.8644 ], [442.42065], [484.9126 ], [438.1969 ], [681.6411 ], [424.5428 ], [417.465 ], [501.71875], [550.2641 ], [509.51538], [395.13196], [618.3842 ], [416.41095], [604.24146], [445.74643], [501.7747 ], [581.5117 ], [668.8 ], [490.18997], [318.35724], [485.21155], [517.05194], [381.57352], [541.70087], [408.3758 ], [641.3148 ], [490.86148], [627.7333 ], [626.79803], [446.85458], [484.48703], [490.9208 ], [474.32666], [682.5859 ], [403.0684 ], [700.93396], [586.18933], [582.9135 ], [537.6466 ], [484.3618 ], [516.34717], [361.00427], [540.5449 ], [570.4192 ], [528.2499 ], [453.5069 ], [530.9317 ], [506.99905], [443.11868], [543.15704], [640.5515 ], [465.79477], [567.0515 ], [690.5667 ], [458.52057], [708.70984], [472.4834 ], [403.06332], [584.896 ], [436.54315], [488.75186], [616.83264], [439.26276], [455.11356], [435.04376], [506.70016], [608.2418 ], [321.40756], [436.0566 ], [536.1338 ], [518.4238 ], [604.8458 ], [525.23724], [333.91547], [575.959 ], [431.56796], [562.3486 ], [513.1956 ], [390.8673 ], [566.04395], [454.5859 ], [448.2718 ], [640.7583 ], [524.1675 ], [550.4805 ], [417.4784 ], [478.5295 ], [586.31744], [667.1741 ], [700.2619 ], [659.19116], [560.35895], [502.96515], [390.13855], [281.11826], [479.40378], [616.1689 ], [373.0969 ], [512.0524 ], [510.81815], [493.2759 ], [480.24887], [423.57587], [493.24786], [471.26056], [600.2448 ], [573.37756], [414.75076], [630.44574], [466.16125], [564.1327 ], [405.47202], [531.9684 ], [572.4318 ], [356.86008], [549.6302 ], [603.15405], [384.08636], [542.1864 ], [562.39026], [452.67438], [632.0046 ], [372.03677], [474.04315], [528.662 ], [372.08386], [461.02655], [436.09967], [498.28693], [345.89633], [395.07632], [604.38116], [506.4469 ], [468.4045 ], [490.15884], [535.0751 ], [344.2807 ], [512.1184 ], [250.57906], [503.88028], [540.8969 ], [489.16257], [470.87903], [392.33386], [416.04172], [549.37286], [475.5845 ], [579.7455 ], [489.563 ], [600.7843 ], [546.7931 ], [541.759 ], [500.3749 ], [645.6394 ], [560.214 ], [577.737 ], [443.9203 ], [415.30664], [419.8243 ], [568.92145], [608.8123 ], [437.54663], [487.76324], [587.55054], [525.1769 ], [357.14728], [645.2205 ], [527.82904], [336.99448], [492.4843 ], [409.9426 ], [606.402 ], [346.31693], [522.2128 ], [404.4764 ], [258.1915 ], [519.69073], [340.679 ], [361.831 ], [576.2177 ], [416.46265], [550.78394], [520.87897], [510.55246], [324.53278], [403.93765], [602.2005 ], [617.6617 ], [603.0705 ], [566.08905], [473.21838], [460.15936], [508.22592], [445.69302], [511.10815], [502.17154], [399.92477], [605.32947], [257.9128 ], [628.05505], [588.93463], [326.92877], [479.34045], [595.14575], [378.18112], [459.9446 ], [324.79074], [518.4878 ], [409.40533], [556.04675], [641.694 ], [536.6195 ], [502.97104], [634.8414 ], [515.03577], [531.5037 ], [518.6183 ], [457.23022], [505.62198], [460.8963 ], [591.5011 ], [465.702 ], [426.96902], [541.3878 ], [493.59402], [679.4696 ], [372.551 ], [551.4076 ], [577.7646 ], [433.75317], [542.82 ], [585.79913], [579.0469 ], [720.7482 ], [432.5301 ], [398.3806 ], [313.78818], [448.07065], [387.87238], [542.9997 ], [522.5298 ], [564.28656], [447.80322], [534.2046 ], [381.5742 ], [501.3084 ], [636.9072 ], [496.34296], [568.1877 ], [469.90048], [273.087 ], [517.1568 ], [621.2014 ], [350.29446], [450.2327 ], [499.081 ], [542.76935], [611.6272 ], [387.9596 ], [449.1869 ], [482.20422], [598.1141 ], [499.1719 ], [321.23654], [554.9411 ], [444.4191 ], [528.82935], [515.10876], [609.4433 ], [416.73828], [410.65424]], dtype=float32)
test_predictions = pd.Series(test_predictions.reshape(300,))
pred_df = pd.DataFrame(y_test, columns=['Test True Y'])
pred_df = pd.concat([pred_df, test_predictions], axis = 1)
pred_df
Test True Y | 0 | |
---|---|---|
0 | 402.296319 | 405.069855 |
1 | 624.156198 | 623.331360 |
2 | 582.455066 | 591.923157 |
3 | 578.588606 | 572.012451 |
4 | 371.224104 | 366.389343 |
... | ... | ... |
295 | 525.704657 | 528.829346 |
296 | 502.909473 | 515.108765 |
297 | 612.727910 | 609.443298 |
298 | 417.569725 | 416.738281 |
299 | 410.538250 | 410.654236 |
300 rows × 2 columns
pred_df.columns = ['Test True Y', 'Model Predictions']
plt.figure(figsize=(10, 6))
sns.scatterplot(data = pred_df, x='Test True Y', y='Model Predictions')
<matplotlib.axes._subplots.AxesSubplot at 0x7fa255ee1310>
from sklearn.metrics import mean_absolute_error, mean_squared_error
mean_absolute_error(pred_df['Test True Y'], pred_df['Model Predictions'])
3.9980359288852494
df.describe()
price | feature1 | feature2 | |
---|---|---|---|
count | 1000.000000 | 1000.000000 | 1000.000000 |
mean | 498.673029 | 1000.014171 | 999.979847 |
std | 93.785431 | 0.974018 | 0.948330 |
min | 223.346793 | 997.058347 | 996.995651 |
25% | 433.025732 | 999.332068 | 999.316106 |
50% | 502.382117 | 1000.009915 | 1000.002243 |
75% | 564.921588 | 1000.637580 | 1000.645380 |
max | 774.407854 | 1003.207934 | 1002.666308 |
mean_squared_error(pred_df['Test True Y'], pred_df['Model Predictions'])
24.94500463893638
# to get the root mean squared error, then raise to the power by 0.5
mean_squared_error(pred_df['Test True Y'], pred_df['Model Predictions']) ** 0.5
4.994497436072661
# predicting on brand new data set
new_gem =[[ 998, 1000]]
scaler.transform(new_gem)
array([[0.14117652, 0.53968792]])
new_gem = scaler.transform(new_gem)
model.predict(new_gem)
array([[419.4626]], dtype=float32)
# to save a model that is doing well, then from tensorflow models import load model
from tensorflow.keras.models import load_model
model.save('new_mygem')
INFO:tensorflow:Assets written to: new_mygem/assets
# ** LAST CELL ** #