|
| 1 | +""" |
| 2 | +Convolutional Neural Network (CNN) for MNIST Classification |
| 3 | +
|
| 4 | +Goal: This script builds a deep CNN to classify the MNIST dataset using TensorFlow and Keras. It leverages |
| 5 | + convolutional layers for feature extraction and pooling layers for down-sampling, followed by fully connected layers |
| 6 | + for classification. |
| 7 | +
|
| 8 | +Objectives: |
| 9 | +- Load and preprocess MNIST data (reshape for CNN input). |
| 10 | +- Build a CNN with multiple convolutional, pooling, and batch normalization layers. |
| 11 | +- Train the CNN, evaluate its accuracy, and display model performance. |
| 12 | +
|
| 13 | +""" |
| 14 | + |
| 15 | + |
| 16 | +import tensorflow as tf |
| 17 | +from tensorflow.keras import layers, models |
| 18 | +from tensorflow.keras.datasets import mnist |
| 19 | +from tensorflow.keras.utils import to_categorical |
| 20 | + |
| 21 | +# Load and preprocess the MNIST data |
| 22 | +(X_train, y_train), (X_test, y_test) = mnist.load_data() |
| 23 | + |
| 24 | +# Normalize the pixel values (0 to 1) |
| 25 | +X_train = X_train.reshape(-1, 28, 28, 1).astype('float32') / 255 |
| 26 | +X_test = X_test.reshape(-1, 28, 28, 1).astype('float32') / 255 |
| 27 | + |
| 28 | +# Convert labels to one-hot encoding |
| 29 | +y_train = to_categorical(y_train, 10) |
| 30 | +y_test = to_categorical(y_test, 10) |
| 31 | + |
| 32 | +# Building the CNN model |
| 33 | +model = models.Sequential() |
| 34 | + |
| 35 | +# 1st Convolutional Layer |
| 36 | +model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1))) |
| 37 | +model.add(layers.MaxPooling2D((2, 2))) |
| 38 | +model.add(layers.BatchNormalization()) |
| 39 | + |
| 40 | +# 2nd Convolutional Layer |
| 41 | +model.add(layers.Conv2D(64, (3, 3), activation='relu')) |
| 42 | +model.add(layers.MaxPooling2D((2, 2))) |
| 43 | +model.add(layers.BatchNormalization()) |
| 44 | + |
| 45 | +# 3rd Convolutional Layer |
| 46 | +model.add(layers.Conv2D(128, (3, 3), activation='relu')) |
| 47 | +model.add(layers.BatchNormalization()) |
| 48 | + |
| 49 | +# Flattening the data before fully connected layers |
| 50 | +model.add(layers.Flatten()) |
| 51 | + |
| 52 | +# Fully Connected (Dense) Layer with Dropout for regularization |
| 53 | +model.add(layers.Dense(128, activation='relu')) |
| 54 | +model.add(layers.Dropout(0.5)) |
| 55 | + |
| 56 | +# Output Layer for classification |
| 57 | +model.add(layers.Dense(10, activation='softmax')) |
| 58 | + |
| 59 | +# Compile the model |
| 60 | +model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) |
| 61 | + |
| 62 | +# Display the model summary |
| 63 | +model.summary() |
| 64 | + |
| 65 | +# Train the model |
| 66 | +history = model.fit(X_train, y_train, epochs=5, batch_size=32, validation_data=(X_test, y_test)) |
| 67 | + |
| 68 | +# Evaluate the model on test data |
| 69 | +test_loss, test_acc = model.evaluate(X_test, y_test, verbose=2) |
| 70 | +print(f'\nTest accuracy: {test_acc}') |
0 commit comments