SlideShare une entreprise Scribd logo
1  sur  4
Télécharger pour lire hors ligne
X_train original shape (60000, 28, 28)

y_train original shape (60000,)

X_test original shape (10000, 28, 28)

y_test original shape (10000,)

Text(0.5, 1.0, '5')
(5, array([0., 0., 0., 0., 0., 1., 0., 0., 0., 0.], dtype=float32))
In [1]: import numpy as np

import pandas as pd

import matplotlib.pyplot as plt

%matplotlib inline

from keras.datasets import mnist

from keras.models import Sequential

from keras.layers import Dense, Dropout, Activation

from keras.utils import np_utils



np.random.seed(35)

In [2]: (X_train, y_train), (X_test, y_test) = mnist.load_data()

print("X_train original shape", X_train.shape)

print("y_train original shape", y_train.shape)

print("X_test original shape", X_test.shape)

print("y_test original shape", y_test.shape)

In [3]: plt.imshow(X_train[0], cmap='gray')

plt.title(y_train[0])

Out[3]:
In [4]: X_train = X_train.reshape(60000,784)

X_test = X_test.reshape(10000,784)



X_train = X_train.astype('float32')

X_test = X_test.astype('float32')



X_train/=255

X_test/=255

In [5]: number_of_classes = 10



Y_train = np_utils.to_categorical(y_train, number_of_classes)

Y_test = np_utils.to_categorical(y_test, number_of_classes)



y_train[0], Y_train[0]

Out[5]:
((50000, 784), (10000, 784))
Epoch 1/5

391/391 [==============================] - 59s 34ms/step - loss: 0.5052 - accuracy:
0.8394 - val_loss: 0.1180 - val_accuracy: 0.9640

Epoch 2/5

391/391 [==============================] - 12s 30ms/step - loss: 0.1169 - accuracy:
0.9646 - val_loss: 0.1011 - val_accuracy: 0.9705

Epoch 3/5

391/391 [==============================] - 12s 30ms/step - loss: 0.0794 - accuracy:
0.9744 - val_loss: 0.0913 - val_accuracy: 0.9741

Epoch 4/5

391/391 [==============================] - 12s 30ms/step - loss: 0.0661 - accuracy:
0.9792 - val_loss: 0.0850 - val_accuracy: 0.9780

Epoch 5/5

391/391 [==============================] - 12s 30ms/step - loss: 0.0545 - accuracy:
0.9824 - val_loss: 0.0875 - val_accuracy: 0.9783

In [6]: #shuffle the training set

for _ in range(5): 

indexes = np.random.permutation(len(X_train))



X_train = X_train[indexes]

Y_train = Y_train[indexes]



#set aside 10,000 for validation

val_images = X_train[:10000,:]

val_labels = Y_train[:10000,:]



# leave rest in training set

train_images = X_train[10000:,:]

train_labels = Y_train[10000:,:]



train_images.shape, val_images.shape

Out[6]:
In [7]: model = Sequential()



model.add(Dense(512, input_dim=784))

# An "activation" is just a non-linear function applied to the output

# of the layer above. Here, with a "rectified linear unit",

# we clamp all values below 0 to 0.

model.add(Activation('relu'))

# Dropout helps protect the model from memorizing or "overfitting" the training data
model.add(Dropout(0.2))



model.add(Dense(512))

model.add(Activation('relu'))

model.add(Dropout(0.2))



model.add(Dense(512))

model.add(Activation('relu'))

model.add(Dropout(0.2))



model.add(Dense(10))

# This special "softmax" activation among other things,

# ensures the output is a valid probaility distribution, that is

# that its values are all non-negative and sum to 1.

model.add(Activation('softmax'))

In [8]: model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'
In [9]: history = model.fit(train_images, train_labels, epochs=5, batch_size=128,

validation_data=(val_images, val_labels))

In [10]: train_loss = history.history['loss']
<Figure size 432x288 with 0 Axes>
val_loss = history.history['val_loss']

In [11]: epochs = range(1, len(history.history['loss']) + 1)

In [13]: plt.plot(epochs, train_loss, 'bo', label='Training loss')

plt.plot(epochs, val_loss, 'g', label='Validation loss')

plt.title('Training and Validation Losses')

plt.xlabel('Epochs')

plt.ylabel('Loss')

plt.legend()



plt.show()

plt.savefig('Results/6_1_lossplot.png')

In [14]: train_accuracy = history.history['accuracy']

val_accuracy = history.history['val_accuracy']

In [15]: epochs = range(1, len(history.history['accuracy']) + 1)

In [16]: plt.plot(epochs, train_accuracy, 'bo', label='Training Accuracy')

plt.plot(epochs, val_accuracy, 'g', label='Validation Accuracy')

plt.title('Training and Validation Accuracy')

plt.xlabel('Epochs')

plt.ylabel('Accuracy')

plt.legend()



plt.show()

plt.savefig('results/6_1_accuracyplot.png')
<Figure size 432x288 with 0 Axes>
313/313 [==============================] - 2s 5ms/step - loss: 0.0770 - accuracy: 0.
9792

Test accuracy: 0.979200005531311

Actual Predictions

0 7 7

1 2 2

2 1 1

3 0 0

4 4 4

... ... ...

9995 2 2

9996 3 3

9997 4 4

9998 5 5

9999 6 6

[10000 rows x 2 columns]

In [17]: score = model.evaluate(X_test, Y_test)

print()

print('Test accuracy: ', score[1])

In [18]: predictions = np.argmax(model.predict(X_test), axis=1)


predictions = list(predictions)

actuals = list(y_test)



pred_res = pd.DataFrame({'Actual': actuals, 'Predictions': predictions})

pred_res.to_csv('results/6_1_predictions.csv', index=False)

print (pred_res)

In [19]: # save model

model.save('results/6_1_model.h5')

In [20]: #Metrics output

with open('results/6_1_metrics.txt', 'w') as f:
f.write('Training Loss: {}'.format(str(history.history['loss'])))

f.write('nTraining Accuracy: {}'.format(str(history.history['accuracy'])))

f.write('nTest Loss: {}'.format(score[0]))

f.write('nTest Accuracy: {}'.format(score[1]))

In [ ]:

Contenu connexe

Similaire à Assignment 6.1.pdf

AIML4 CNN lab256 1hr (111-1).pdf
AIML4 CNN lab256 1hr (111-1).pdfAIML4 CNN lab256 1hr (111-1).pdf
AIML4 CNN lab256 1hr (111-1).pdf
ssuserb4d806
 
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdfI want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
GordonF2XPatersonh
 

Similaire à Assignment 6.1.pdf (20)

Image classification using cnn
Image classification using cnnImage classification using cnn
Image classification using cnn
 
maXbox starter69 Machine Learning VII
maXbox starter69 Machine Learning VIImaXbox starter69 Machine Learning VII
maXbox starter69 Machine Learning VII
 
Assignment 6.2a.pdf
Assignment 6.2a.pdfAssignment 6.2a.pdf
Assignment 6.2a.pdf
 
How to use SVM for data classification
How to use SVM for data classificationHow to use SVM for data classification
How to use SVM for data classification
 
The Ring programming language version 1.2 book - Part 79 of 84
The Ring programming language version 1.2 book - Part 79 of 84The Ring programming language version 1.2 book - Part 79 of 84
The Ring programming language version 1.2 book - Part 79 of 84
 
AIML4 CNN lab256 1hr (111-1).pdf
AIML4 CNN lab256 1hr (111-1).pdfAIML4 CNN lab256 1hr (111-1).pdf
AIML4 CNN lab256 1hr (111-1).pdf
 
Intelligent System Optimizations
Intelligent System OptimizationsIntelligent System Optimizations
Intelligent System Optimizations
 
From Tensorflow Graph to Tensorflow Eager
From Tensorflow Graph to Tensorflow EagerFrom Tensorflow Graph to Tensorflow Eager
From Tensorflow Graph to Tensorflow Eager
 
Naïve Bayes.pptx
Naïve Bayes.pptxNaïve Bayes.pptx
Naïve Bayes.pptx
 
Testing in those hard to reach places
Testing in those hard to reach placesTesting in those hard to reach places
Testing in those hard to reach places
 
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdfI want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
 
The Ring programming language version 1.7 book - Part 91 of 196
The Ring programming language version 1.7 book - Part 91 of 196The Ring programming language version 1.7 book - Part 91 of 196
The Ring programming language version 1.7 book - Part 91 of 196
 
Gans
GansGans
Gans
 
GANs
GANsGANs
GANs
 
Xgboost
XgboostXgboost
Xgboost
 
Competition 1 (blog 1)
Competition 1 (blog 1)Competition 1 (blog 1)
Competition 1 (blog 1)
 
Deep Learning with Julia1.0 and Flux
Deep Learning with Julia1.0 and FluxDeep Learning with Julia1.0 and Flux
Deep Learning with Julia1.0 and Flux
 
The Ring programming language version 1.9 book - Part 99 of 210
The Ring programming language version 1.9 book - Part 99 of 210The Ring programming language version 1.9 book - Part 99 of 210
The Ring programming language version 1.9 book - Part 99 of 210
 
maXbox starter67 machine learning V
maXbox starter67 machine learning VmaXbox starter67 machine learning V
maXbox starter67 machine learning V
 
Training course lect2
Training course lect2Training course lect2
Training course lect2
 

Plus de dash41 (9)

Assignment7.pdf
Assignment7.pdfAssignment7.pdf
Assignment7.pdf
 
Assignment 6.3.pdf
Assignment 6.3.pdfAssignment 6.3.pdf
Assignment 6.3.pdf
 
Assignment 5.3.pdf
Assignment 5.3.pdfAssignment 5.3.pdf
Assignment 5.3.pdf
 
Assignment 5.2.pdf
Assignment 5.2.pdfAssignment 5.2.pdf
Assignment 5.2.pdf
 
Assignment 5.1.pdf
Assignment 5.1.pdfAssignment 5.1.pdf
Assignment 5.1.pdf
 
Assignment 4.pdf
Assignment 4.pdfAssignment 4.pdf
Assignment 4.pdf
 
Assignment 3.pdf
Assignment 3.pdfAssignment 3.pdf
Assignment 3.pdf
 
rdbms.pdf
rdbms.pdfrdbms.pdf
rdbms.pdf
 
documentsdb.pdf
documentsdb.pdfdocumentsdb.pdf
documentsdb.pdf
 

Dernier

Jual Obat Aborsi ( Asli No.1 ) 085657271886 Obat Penggugur Kandungan Cytotec
Jual Obat Aborsi ( Asli No.1 ) 085657271886 Obat Penggugur Kandungan CytotecJual Obat Aborsi ( Asli No.1 ) 085657271886 Obat Penggugur Kandungan Cytotec
Jual Obat Aborsi ( Asli No.1 ) 085657271886 Obat Penggugur Kandungan Cytotec
ZurliaSoop
 
Mifepristone Available in Muscat +918761049707^^ €€ Buy Abortion Pills in Oman
Mifepristone Available in Muscat +918761049707^^ €€ Buy Abortion Pills in OmanMifepristone Available in Muscat +918761049707^^ €€ Buy Abortion Pills in Oman
Mifepristone Available in Muscat +918761049707^^ €€ Buy Abortion Pills in Oman
instagramfab782445
 
!~+971581248768>> SAFE AND ORIGINAL ABORTION PILLS FOR SALE IN DUBAI AND ABUD...
!~+971581248768>> SAFE AND ORIGINAL ABORTION PILLS FOR SALE IN DUBAI AND ABUD...!~+971581248768>> SAFE AND ORIGINAL ABORTION PILLS FOR SALE IN DUBAI AND ABUD...
!~+971581248768>> SAFE AND ORIGINAL ABORTION PILLS FOR SALE IN DUBAI AND ABUD...
DUBAI (+971)581248768 BUY ABORTION PILLS IN ABU dhabi...Qatar
 

Dernier (20)

Buy gmail accounts.pdf buy Old Gmail Accounts
Buy gmail accounts.pdf buy Old Gmail AccountsBuy gmail accounts.pdf buy Old Gmail Accounts
Buy gmail accounts.pdf buy Old Gmail Accounts
 
joint cost.pptx COST ACCOUNTING Sixteenth Edition ...
joint cost.pptx  COST ACCOUNTING  Sixteenth Edition                          ...joint cost.pptx  COST ACCOUNTING  Sixteenth Edition                          ...
joint cost.pptx COST ACCOUNTING Sixteenth Edition ...
 
Jual Obat Aborsi ( Asli No.1 ) 085657271886 Obat Penggugur Kandungan Cytotec
Jual Obat Aborsi ( Asli No.1 ) 085657271886 Obat Penggugur Kandungan CytotecJual Obat Aborsi ( Asli No.1 ) 085657271886 Obat Penggugur Kandungan Cytotec
Jual Obat Aborsi ( Asli No.1 ) 085657271886 Obat Penggugur Kandungan Cytotec
 
Buy Verified TransferWise Accounts From Seosmmearth
Buy Verified TransferWise Accounts From SeosmmearthBuy Verified TransferWise Accounts From Seosmmearth
Buy Verified TransferWise Accounts From Seosmmearth
 
Mifepristone Available in Muscat +918761049707^^ €€ Buy Abortion Pills in Oman
Mifepristone Available in Muscat +918761049707^^ €€ Buy Abortion Pills in OmanMifepristone Available in Muscat +918761049707^^ €€ Buy Abortion Pills in Oman
Mifepristone Available in Muscat +918761049707^^ €€ Buy Abortion Pills in Oman
 
Cracking the 'Career Pathing' Slideshare
Cracking the 'Career Pathing' SlideshareCracking the 'Career Pathing' Slideshare
Cracking the 'Career Pathing' Slideshare
 
Putting the SPARK into Virtual Training.pptx
Putting the SPARK into Virtual Training.pptxPutting the SPARK into Virtual Training.pptx
Putting the SPARK into Virtual Training.pptx
 
!~+971581248768>> SAFE AND ORIGINAL ABORTION PILLS FOR SALE IN DUBAI AND ABUD...
!~+971581248768>> SAFE AND ORIGINAL ABORTION PILLS FOR SALE IN DUBAI AND ABUD...!~+971581248768>> SAFE AND ORIGINAL ABORTION PILLS FOR SALE IN DUBAI AND ABUD...
!~+971581248768>> SAFE AND ORIGINAL ABORTION PILLS FOR SALE IN DUBAI AND ABUD...
 
Lucknow Housewife Escorts by Sexy Bhabhi Service 8250092165
Lucknow Housewife Escorts  by Sexy Bhabhi Service 8250092165Lucknow Housewife Escorts  by Sexy Bhabhi Service 8250092165
Lucknow Housewife Escorts by Sexy Bhabhi Service 8250092165
 
New 2024 Cannabis Edibles Investor Pitch Deck Template
New 2024 Cannabis Edibles Investor Pitch Deck TemplateNew 2024 Cannabis Edibles Investor Pitch Deck Template
New 2024 Cannabis Edibles Investor Pitch Deck Template
 
HomeRoots Pitch Deck | Investor Insights | April 2024
HomeRoots Pitch Deck | Investor Insights | April 2024HomeRoots Pitch Deck | Investor Insights | April 2024
HomeRoots Pitch Deck | Investor Insights | April 2024
 
Paradip CALL GIRL❤7091819311❤CALL GIRLS IN ESCORT SERVICE WE ARE PROVIDING
Paradip CALL GIRL❤7091819311❤CALL GIRLS IN ESCORT SERVICE WE ARE PROVIDINGParadip CALL GIRL❤7091819311❤CALL GIRLS IN ESCORT SERVICE WE ARE PROVIDING
Paradip CALL GIRL❤7091819311❤CALL GIRLS IN ESCORT SERVICE WE ARE PROVIDING
 
Katrina Personal Brand Project and portfolio 1
Katrina Personal Brand Project and portfolio 1Katrina Personal Brand Project and portfolio 1
Katrina Personal Brand Project and portfolio 1
 
SEO Case Study: How I Increased SEO Traffic & Ranking by 50-60% in 6 Months
SEO Case Study: How I Increased SEO Traffic & Ranking by 50-60%  in 6 MonthsSEO Case Study: How I Increased SEO Traffic & Ranking by 50-60%  in 6 Months
SEO Case Study: How I Increased SEO Traffic & Ranking by 50-60% in 6 Months
 
Falcon Invoice Discounting: Empowering Your Business Growth
Falcon Invoice Discounting: Empowering Your Business GrowthFalcon Invoice Discounting: Empowering Your Business Growth
Falcon Invoice Discounting: Empowering Your Business Growth
 
Over the Top (OTT) Market Size & Growth Outlook 2024-2030
Over the Top (OTT) Market Size & Growth Outlook 2024-2030Over the Top (OTT) Market Size & Growth Outlook 2024-2030
Over the Top (OTT) Market Size & Growth Outlook 2024-2030
 
Famous Olympic Siblings from the 21st Century
Famous Olympic Siblings from the 21st CenturyFamous Olympic Siblings from the 21st Century
Famous Olympic Siblings from the 21st Century
 
How to Get Started in Social Media for Art League City
How to Get Started in Social Media for Art League CityHow to Get Started in Social Media for Art League City
How to Get Started in Social Media for Art League City
 
Falcon Invoice Discounting: Aviate Your Cash Flow Challenges
Falcon Invoice Discounting: Aviate Your Cash Flow ChallengesFalcon Invoice Discounting: Aviate Your Cash Flow Challenges
Falcon Invoice Discounting: Aviate Your Cash Flow Challenges
 
Dr. Admir Softic_ presentation_Green Club_ENG.pdf
Dr. Admir Softic_ presentation_Green Club_ENG.pdfDr. Admir Softic_ presentation_Green Club_ENG.pdf
Dr. Admir Softic_ presentation_Green Club_ENG.pdf
 

Assignment 6.1.pdf

  • 1. X_train original shape (60000, 28, 28) y_train original shape (60000,) X_test original shape (10000, 28, 28) y_test original shape (10000,) Text(0.5, 1.0, '5') (5, array([0., 0., 0., 0., 0., 1., 0., 0., 0., 0.], dtype=float32)) In [1]: import numpy as np import pandas as pd import matplotlib.pyplot as plt %matplotlib inline from keras.datasets import mnist from keras.models import Sequential from keras.layers import Dense, Dropout, Activation from keras.utils import np_utils np.random.seed(35) In [2]: (X_train, y_train), (X_test, y_test) = mnist.load_data() print("X_train original shape", X_train.shape) print("y_train original shape", y_train.shape) print("X_test original shape", X_test.shape) print("y_test original shape", y_test.shape) In [3]: plt.imshow(X_train[0], cmap='gray') plt.title(y_train[0]) Out[3]: In [4]: X_train = X_train.reshape(60000,784) X_test = X_test.reshape(10000,784) X_train = X_train.astype('float32') X_test = X_test.astype('float32') X_train/=255 X_test/=255 In [5]: number_of_classes = 10 Y_train = np_utils.to_categorical(y_train, number_of_classes) Y_test = np_utils.to_categorical(y_test, number_of_classes) y_train[0], Y_train[0] Out[5]:
  • 2. ((50000, 784), (10000, 784)) Epoch 1/5 391/391 [==============================] - 59s 34ms/step - loss: 0.5052 - accuracy: 0.8394 - val_loss: 0.1180 - val_accuracy: 0.9640 Epoch 2/5 391/391 [==============================] - 12s 30ms/step - loss: 0.1169 - accuracy: 0.9646 - val_loss: 0.1011 - val_accuracy: 0.9705 Epoch 3/5 391/391 [==============================] - 12s 30ms/step - loss: 0.0794 - accuracy: 0.9744 - val_loss: 0.0913 - val_accuracy: 0.9741 Epoch 4/5 391/391 [==============================] - 12s 30ms/step - loss: 0.0661 - accuracy: 0.9792 - val_loss: 0.0850 - val_accuracy: 0.9780 Epoch 5/5 391/391 [==============================] - 12s 30ms/step - loss: 0.0545 - accuracy: 0.9824 - val_loss: 0.0875 - val_accuracy: 0.9783 In [6]: #shuffle the training set for _ in range(5): indexes = np.random.permutation(len(X_train)) X_train = X_train[indexes] Y_train = Y_train[indexes] #set aside 10,000 for validation val_images = X_train[:10000,:] val_labels = Y_train[:10000,:] # leave rest in training set train_images = X_train[10000:,:] train_labels = Y_train[10000:,:] train_images.shape, val_images.shape Out[6]: In [7]: model = Sequential() model.add(Dense(512, input_dim=784)) # An "activation" is just a non-linear function applied to the output # of the layer above. Here, with a "rectified linear unit", # we clamp all values below 0 to 0. model.add(Activation('relu')) # Dropout helps protect the model from memorizing or "overfitting" the training data model.add(Dropout(0.2)) model.add(Dense(512)) model.add(Activation('relu')) model.add(Dropout(0.2)) model.add(Dense(512)) model.add(Activation('relu')) model.add(Dropout(0.2)) model.add(Dense(10)) # This special "softmax" activation among other things, # ensures the output is a valid probaility distribution, that is # that its values are all non-negative and sum to 1. model.add(Activation('softmax')) In [8]: model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy' In [9]: history = model.fit(train_images, train_labels, epochs=5, batch_size=128, validation_data=(val_images, val_labels)) In [10]: train_loss = history.history['loss']
  • 3. <Figure size 432x288 with 0 Axes> val_loss = history.history['val_loss'] In [11]: epochs = range(1, len(history.history['loss']) + 1) In [13]: plt.plot(epochs, train_loss, 'bo', label='Training loss') plt.plot(epochs, val_loss, 'g', label='Validation loss') plt.title('Training and Validation Losses') plt.xlabel('Epochs') plt.ylabel('Loss') plt.legend() plt.show() plt.savefig('Results/6_1_lossplot.png') In [14]: train_accuracy = history.history['accuracy'] val_accuracy = history.history['val_accuracy'] In [15]: epochs = range(1, len(history.history['accuracy']) + 1) In [16]: plt.plot(epochs, train_accuracy, 'bo', label='Training Accuracy') plt.plot(epochs, val_accuracy, 'g', label='Validation Accuracy') plt.title('Training and Validation Accuracy') plt.xlabel('Epochs') plt.ylabel('Accuracy') plt.legend() plt.show() plt.savefig('results/6_1_accuracyplot.png')
  • 4. <Figure size 432x288 with 0 Axes> 313/313 [==============================] - 2s 5ms/step - loss: 0.0770 - accuracy: 0. 9792 Test accuracy: 0.979200005531311 Actual Predictions 0 7 7 1 2 2 2 1 1 3 0 0 4 4 4 ... ... ... 9995 2 2 9996 3 3 9997 4 4 9998 5 5 9999 6 6 [10000 rows x 2 columns] In [17]: score = model.evaluate(X_test, Y_test) print() print('Test accuracy: ', score[1]) In [18]: predictions = np.argmax(model.predict(X_test), axis=1) predictions = list(predictions) actuals = list(y_test) pred_res = pd.DataFrame({'Actual': actuals, 'Predictions': predictions}) pred_res.to_csv('results/6_1_predictions.csv', index=False) print (pred_res) In [19]: # save model model.save('results/6_1_model.h5') In [20]: #Metrics output with open('results/6_1_metrics.txt', 'w') as f: f.write('Training Loss: {}'.format(str(history.history['loss']))) f.write('nTraining Accuracy: {}'.format(str(history.history['accuracy']))) f.write('nTest Loss: {}'.format(score[0])) f.write('nTest Accuracy: {}'.format(score[1])) In [ ]: