Friday, March 25, 2022

Building your first Neural Network in a couple lines of code like a Deep Learning in Python

 Code 1

(.env) [boris@fedora34server DATASET]$ cat makeAdamRegL2.py

from tensorflow.keras.layers import Dropout

from tensorflow.keras import regularizers

from tensorflow.keras.optimizers import Adam

from tensorflow.keras.optimizers import SGD

from tensorflow.keras.models import Sequential

from tensorflow.keras.layers import Dense

from sklearn.model_selection import train_test_split

from sklearn import preprocessing

import pandas as pd

import matplotlib.pyplot as plt

df = pd.read_csv('housepricedata.csv')

# print(df)

dataset = df.values

X = dataset[:,0:10]

Y = dataset[:,10]

min_max_scaler = preprocessing.MinMaxScaler()

X_scale = min_max_scaler.fit_transform(X)

# print(X_scale)

# This tells scikit-learn that your val_and_test size will be 30% of the 

# overall dataset. The code will store the split data into the first four 

# variables on the left of the equal sign as the variable names suggest.

X_train, X_val_and_test, Y_train, Y_val_and_test = train_test_split(X_scale, Y, test_size=0.3)

# The code below will split the val_and_test size equally to the 

# validation set and the test set.

X_val, X_test, Y_val, Y_test = train_test_split(X_val_and_test, Y_val_and_test, test_size=0.5)

# print(X_train.shape, X_val.shape, X_test.shape, Y_train.shape, Y_val.shape, Y_test.shape)


model_3 = Sequential([

    Dense(1000, activation='relu', kernel_regularizer=regularizers.l2(0.01), input_shape=(10,)),

    Dropout(0.3),

    Dense(1000, activation='relu', kernel_regularizer=regularizers.l2(0.01)),

    Dropout(0.3),

    Dense(1000, activation='relu', kernel_regularizer=regularizers.l2(0.01)),

    Dropout(0.3),

    Dense(1000, activation='relu', kernel_regularizer=regularizers.l2(0.01)),

    Dropout(0.3),

    Dense(1, activation='sigmoid', kernel_regularizer=regularizers.l2(0.01)),

])

adam = Adam()

model_3.compile(optimizer='adam',

              loss='binary_crossentropy',

              metrics=['acc'])

hist_3 = model_3.fit(X_train, Y_train,

          batch_size=32, epochs=100,

          validation_data=(X_val, Y_val))

plt.figure(1)

plt.subplot(211)

plt.plot(hist_3.history['acc'])

plt.plot(hist_3.history['val_acc'])

plt.title('Model Accuracy')

plt.ylabel('Accuracy')

plt.xlabel('Epoch')

plt.legend(['Training', 'Validation'], loc='lower right')

# summarize history for loss

plt.subplot(212)

plt.plot(hist_3.history['loss'])

plt.plot(hist_3.history['val_loss'])

plt.title('Model Loss')

plt.ylabel('Loss')

plt.xlabel('Epoch')

plt.legend(['Training', 'Validation'], loc='upper right')

plt.tight_layout()

plt.show()

















References

https://www.freecodecamp.org/news/how-to-build-your-first-neural-network-to-predict-house-prices-with-keras-f8db83049159/

https://www.codesofinterest.com/2020/01/fixing-keyerror-acc-valacc-keras.html


No comments:

Post a Comment