From 2a2c1b33c484c6ae2f85379b389acf929a1f9c99 Mon Sep 17 00:00:00 2001 From: Nikolas <147613528+stamatisn@users.noreply.github.com> Date: Sat, 28 Dec 2024 19:12:04 +0200 Subject: [PATCH] Create ML Write the code for the pull request (#36) --- Projects/ML | 95 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 Projects/ML diff --git a/Projects/ML b/Projects/ML new file mode 100644 index 0000000..c4a1e01 --- /dev/null +++ b/Projects/ML @@ -0,0 +1,95 @@ +import random +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import tensorflow as tf + +random.seed(0) +np.random.seed(seed=0) +tf.random.set_seed(seed=0) + +(x_train, y_train), (x_test, y_test ) = tf.keras.datasets.mnist.load_data() +x_train.shape, y_train.shape, x_test.shape, y_test.shape + +print( + f'Training Size - Inputs:{x_train.shape}, Targets:{y_train.shape}' + f'\nTest Size - Inputs:{x_test.shape}, Targets:{y_test.shape}' +) + +rows = 5 +digits_per_row = 5 + +fig, axes = plt.subplots(nrows=rows, ncols=digits_per_row, figsize=(6, 6)) +axes = axes.flatten() + +# Selecting random ids from 0 to 60000 +total_digits = rows*digits_per_row +random_ids= np.random.choice(x_train.shape[0], total_digits, replace=False) + +# Plotting the selected digits. +for i, ax in enumerate(axes): + idx = random_ids[i] + ax.imshow(x_train[idx], cmap='gray') + ax.set_title(f'Class: {y_train[idx]}') + ax.axis('off') +plt.tight_layout() +plt.show() + +x_train = x_train.reshape((60000, 784)) +y_train = tf.one_hot(y_train, depth=10) +x_test = x_test.reshape((10000, 784)) +y_test = tf.one_hot(y_test, depth=10) + +print( + f'Training Size - Inputs:{x_train.shape}, Targets:{y_train.shape}' + f'\nTest Size - Inputs:{x_test.shape}, Targets:{y_test.shape}' +) + +activation = 'tanh' +loss = 'categorical_crossentropy' # Do not change this loss function. +metrics = ['accuracy'] +learning_rate = 0.001 +optimizer = tf.keras.optimizers.SGD(learning_rate=learning_rate) # Do not change this optimizer. +epochs = 10 + +model = tf.keras.Sequential([ + tf.keras.layers.Input(shape=(784,), name='input'), + tf.keras.layers.Dense(units=256, activation=activation, name='hidden-1'), + tf.keras.layers.Dense(units=256, activation=activation, name='hidden-2'), + tf.keras.layers.Dense(units=10, activation='softmax', name='outputs') # Do not change this activation function. +]) +model.summary(expand_nested=True) + +model.compile(optimizer=optimizer, loss=loss, metrics=metrics) +history = model.fit( + x=x_train, + y=y_train, + epochs=epochs, + validation_data=(x_test, y_test) +) + +train_loss = history.history['loss'] +val_loss = history.history['val_loss'] +train_acc = history.history['accuracy'] +val_acc = history.history['val_accuracy'] + +plt.plot(train_loss, label='Train Loss') +plt.plot(val_loss, label='Validation Loss') +plt.title('Neural Network Loss per epoch') +plt.ylabel('Categorical Cross-Entropy') +plt.xlabel('Epochs') +plt.xlim(0, epochs) +plt.ylim(0, 1) +plt.legend() +plt.show() + +plt.plot(train_acc, label='Train Accuracy') +plt.plot(val_acc, label='Validation Accuracy') +plt.title('Neural Network Accuracy per epoch') +plt.ylabel('Accuracy') +plt.xlabel('Epochs') +plt.xlim(0, epochs) +plt.ylim(0, 1) +plt.legend() +plt.show() +