Skip to content

Commit

Permalink
Add python file
Browse files Browse the repository at this point in the history
  • Loading branch information
insancs authored Oct 14, 2021
1 parent b86066a commit 70ad6a4
Showing 1 changed file with 255 additions and 0 deletions.
255 changes: 255 additions & 0 deletions rockpaperscissors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,255 @@
# -*- coding: utf-8 -*-
"""rockpaperscissors.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/17GrZhVdEjUOOy78uTjcWCHh0MSjla5D3
# Import Libraries
"""

# Commented out IPython magic to ensure Python compatibility.
# import libraries
import numpy as np
import zipfile
import os
import matplotlib.pyplot as plt
from google.colab import files
from keras.preprocessing import image
import matplotlib.image as mpimg
# %matplotlib inline

import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import Callback
# Check tensorflow version
print(tf.__version__)

"""# Import Dataset"""

# import datasets
!wget --no-check-certificate \
https://storage.googleapis.com/laurencemoroney-blog.appspot.com/rps.zip \
-O /tmp/rps.zip

!wget --no-check-certificate \
https://storage.googleapis.com/laurencemoroney-blog.appspot.com/rps-test-set.zip \
-O /tmp/rps-test-set.zip

"""# Extract file"""

# Extract zip file
local_zip = '/tmp/rps.zip'
zip_ref = zipfile.ZipFile(local_zip, 'r')
zip_ref.extractall('/tmp/')
zip_ref.close()

local_zip = '/tmp/rps-test-set.zip'
zip_ref = zipfile.ZipFile(local_zip, 'r')
zip_ref.extractall('/tmp/')
zip_ref.close()

# Check rps directory
os.listdir('/tmp/rps')

# Check rps-test-set direcroty
os.listdir('/tmp/rps-test-set')

"""# Image Augmentation"""

# image augmentation
train_datagen = ImageDataGenerator(
rescale = 1./255,
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode='nearest',
)

validation_datagen = ImageDataGenerator(
rescale = 1./255,
)

TRAINING_DIR = "/tmp/rps/"
train_generator = train_datagen.flow_from_directory(
TRAINING_DIR,
target_size = (150, 150),
batch_size=32,
class_mode='categorical',
)

VALIDATION_DIR = "/tmp/rps-test-set/"
validation_generator = validation_datagen.flow_from_directory(
VALIDATION_DIR,
target_size = (150, 150),
batch_size = 32,
class_mode = 'categorical',
)

"""# Convolution Neural Network"""

model = tf.keras.models.Sequential([
# Note the input shape is the desired size of the image 150x150 with 3 bytes color
# This is the first convolution
tf.keras.layers.Conv2D(64, (3,3), activation='relu', input_shape=(150, 150, 3)),
tf.keras.layers.MaxPooling2D(2, 2),
# The second convolution
tf.keras.layers.Conv2D(64, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
# The third convolution
tf.keras.layers.Conv2D(128, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
# The fourth convolution
tf.keras.layers.Conv2D(128, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
# Flatten the results to feed into a DNN
tf.keras.layers.Flatten(),
tf.keras.layers.Dropout(0.5),
# 512 neuron hidden layer
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(3, activation='softmax')
])

# Tampilkan summary model
model.summary()

# Compile model
model.compile(loss = 'categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])

"""# Create Callbacks"""

# Create callbacks
class CustomCallback(Callback):
def on_train_begin(self, logs=None):
print("Starting training")

def on_train_end(self, logs=None):
print("Training has been stopped")
print('Accuracy = %2.2f%%' %(logs['accuracy']*100))

reduce_lr = tf.keras.callbacks.ReduceLROnPlateau(
monitor='val_loss',
factor=0.2,
patience=5,
min_lr=1.5e-5
)

early_stopping = tf.keras.callbacks.EarlyStopping(
monitor="val_loss",
min_delta=0,
patience=12,
verbose=0,
mode="auto",
baseline=None,
restore_best_weights=True
)

history = model.fit(train_generator,
epochs=30,
steps_per_epoch=20,
validation_data = validation_generator,
verbose = 1,
validation_steps=3,
callbacks=[CustomCallback(), reduce_lr, early_stopping]
)

"""# Model Evaluate"""

# Evaluation of training data
print("Evaluation of training data")
results = model.evaluate(train_generator, batch_size=32)
print('Loss: {:.4f}'.format(results[0]))
print('Accuracy: {:.2f}%'.format(results[1]*100))

# Evaluation of training data
print("Evaluation of validation data")
results = model.evaluate(validation_generator, batch_size=32)
print('Loss: {:.4f}'.format(results[0]))
print('Accuracy: {:.2f}%'.format(results[1]*100))

"""# Plotting Loss and Accuracy"""

fig, ax = plt.subplots(1, 2)
fig.set_size_inches(12,4)

# Define accuracy
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']

# Define loss
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(len(acc))

# Plotting accuracy
ax[0].plot(epochs, acc, 'r', label='Training Accuracy')
ax[0].plot(epochs, val_acc, 'b', label='Validation Accuracy')
ax[0].set_title('Training and Validation Accuracy')
ax[0].set_xlabel('Epoch')
ax[0].set_ylabel('Accuracy')
ax[0].legend(loc='upper left')

# Plotting loss
ax[1].plot(epochs, loss, 'r', label='Training Loss')
ax[1].plot(epochs, val_loss, 'b', label='Validation Loss')
ax[1].set_title('Training and Validation Loss')
ax[1].set_xlabel('Epoch')
ax[1].set_ylabel('Loss')
ax[1].legend(loc='upper left')

plt.show()

"""# Model Prediction"""

def predict_image(image_upload, model = model):
im = image_upload
im_array = np.asarray(im)
im_array = im_array*(1/225)
im_input = tf.reshape(im_array, shape = [1, 150, 150, 3])

predict_array = model.predict(im_input)[0]

import pandas as pd
df = pd.DataFrame(predict_array)
df = df.rename({0:'Probability'}, axis = 'columns')
prod = ['Paper', 'Rock', 'Scissors']
df['Product'] = prod
df = df[['Product', 'Probability']]

predict_label = np.argmax(model.predict(im_input))

if predict_label == 0:
predict_product = 'Paper'
elif predict_label == 1:
predict_product = 'Rock'
else:
predict_product = 'Scissor'

return predict_product, df

uploaded = files.upload()

for fn in uploaded.keys():
path = fn
img = image.load_img(path, target_size=(150,150))
imgplot = plt.imshow(img)
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
img = np.vstack([x])

label, df = predict_image(img)

print('\n')
plt.show()
print("\nThe image is detected as " + label)
print('\n')
print(df)
print('\n')

0 comments on commit 70ad6a4

Please sign in to comment.