Skip to content
This repository was archived by the owner on Sep 13, 2023. It is now read-only.
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
141 changes: 141 additions & 0 deletions wargbootcamp.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": [],
"collapsed_sections": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "o7Rn9olzZu2j"
},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt \n",
"import numpy as np \n",
"import pandas as pd\n",
"\n",
"import tensorflow as tf\n",
"from tensorflow.keras import datasets, layers, models\n"
]
},
{
"cell_type": "code",
"source": [
"# defining test and train data and importing the dataset\n",
"(X_train, y_train), (X_test,y_test) = datasets.cifar10.load_data()"
],
"metadata": {
"id": "LLFckBO-Z3HE"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"#naming the classes\n",
"classes = [\"airplane\", \"automobile\", \"bird\", \"cat\", \"deer\", \"dog\", \"frog\", \"horse\", \"ship\", \"truck\"]"
],
"metadata": {
"id": "4b3U5Wala_zI"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# normalize the data so pixel value is consistent\n",
"X_train = X_train/255\n",
"X_test = X_test/255"
],
"metadata": {
"id": "EVEFWuJf6-v0"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"#creating the convolutional neural network\n",
"cnn = models.Sequential ([\n",
" #first cnn layer\n",
" layers.Conv2D(filters=32, kernel_size=(3, 3), activation='relu', input_shape=(32, 32, 3)),\n",
" #pooling the image to normalize pixels across images\n",
" layers.MaxPooling2D((2,2)),\n",
"\n",
" # second cnn layer \n",
" layers.Conv2D(filters=32, kernel_size=(3, 3), activation='relu', input_shape=(32, 32, 3)),\n",
" #pooling the image to normalize pixels across images\n",
" layers.MaxPooling2D((2,2)),\n",
"\n",
" #cnn dense layer\n",
" layers.Flatten(),\n",
" layers.Dense (64, activation='relu'),\n",
" layers.Dense (10, activation='softmax')\n",
"\n",
"])"
],
"metadata": {
"id": "Yyrw3bSAC9uj"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"#compiling the CNN; \n",
"#using adam optimizer since it has good accuracy\n",
"# using sparse_categorical_crossentropy because it is good for classification\n",
"cnn.compile( optimizer ='adam', \n",
" loss='sparse_categorical_crossentropy', \n",
" metrics=['accuracy'] \n",
")"
],
"metadata": {
"id": "btb3w_pNE2b_"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"#running the model on the training data with optimized epochs\n",
"cnn.fit(X_train, y_train, epochs=15)"
],
"metadata": {
"id": "hcR2vMS6F1BA"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"#running the model on the test data with optimized epochs\n",
"cnn.fit(X_test, y_test, epochs=15)"
],
"metadata": {
"id": "DkIuDuBIMhhc"
},
"execution_count": null,
"outputs": []
}
]
}