SlideShare a Scribd company logo
1 of 5
import os
from tensorflow.keras import layers
from tensorflow.keras import Model
!wget --no-check-certificate 
https://storage.googleapis.com/mledu-
datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 
-O /tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
from tensorflow.keras.applications.inception_v3 import InceptionV3
local_weights_file = '/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5'
pre_trained_model = InceptionV3(
input_shape=(150, 150, 3), include_top=False, weights=None)
pre_trained_model.load_weights(local_weights_file)
for layer in pre_trained_model.layers:
layer.trainable = False
last_layer = pre_trained_model.get_layer('mixed7')
print('last layer output shape:', last_layer.output_shape)
last_output = last_layer.output
from tensorflow.keras.optimizers import RMSprop
# Flatten the output layer to 1 dimension
x = layers.Flatten()(last_output)
# Add a fully connected layer with 1,024 hidden units and ReLU activation
x = layers.Dense(1024, activation='relu')(x)
# Add a dropout rate of 0.2
x = layers.Dropout(0.2)(x)
# Add a final sigmoid layer for classification
x = layers.Dense(1, activation='sigmoid')(x)
# Configure and compile the model
model = Model(pre_trained_model.input, x)
model.compile(loss='binary_crossentropy',
optimizer=RMSprop(lr=0.0001),
metrics=['acc'])
model.summary()
from tensorflow.keras import layers
from tensorflow.keras import Model
import numpy as np
import pandas as pd
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Conv2D, Activation, MaxPool2D, BatchNormalization, Flatten, Dens
e, Dropout
from tensorflow.keras.regularizers import l2
from tensorflow.keras.callbacks import ReduceLROnPlateau
from tensorflow.keras.optimizers import SGD
from keras.preprocessing.image import ImageDataGenerator
from keras.applications.vgg16 import preprocess_input
from keras.layers import Dense
from keras.models import Sequential
from keras.applications.vgg16 import VGG16
from glob import glob
import cv2
import os
base_dir = '/content/fruits'
train_dir = os.path.join(base_dir, 'training')
validation_dir = os.path.join(base_dir, 'test')
# Directory with our training cat pictures
train_cherry_dir = os.path.join(train_dir, 'cherry')
# Directory with our training dog pictures
train_infectedstrawberry_dir = os.path.join(train_dir, 'infectedstrawberry')
# Directory with our validation cat pictures
validation_cherry_dir = os.path.join(validation_dir, 'cherry')
# Directory with our validation dog pictures
validation_infectedstrawberry_dir = os.path.join(validation_dir, 'infectedstrawberry')
train_cherry_fnames = os.listdir(train_cherry_dir)
print(train_cherry_fnames[:10])
train_infectedstrawberry_fnames = os.listdir(train_infectedstrawberry_dir)
train_infectedstrawberry_fnames.sort()
print(train_infectedstrawberry_fnames[:10])
print('total training cherry images:', len(os.listdir(train_cherry_dir)))
print('total training infectedstrawberry images:', len(os.listdir(train_infectedstrawberry_dir)))
print('total validation cherry images:', len(os.listdir(validation_cherry_dir)))
print('total validation infectedstrawberry images:', len(os.listdir(validation_infectedstrawberry_di
r)))
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# All images will be rescaled by 1./255
train_datagen = ImageDataGenerator(rescale=1./255)
val_datagen = ImageDataGenerator(rescale=1./255)
# Flow training images in batches of 20 using train_datagen generator
train_generator = train_datagen.flow_from_directory(
train_dir, # This is the source directory for training images
target_size=(150, 150), # All images will be resized to 150x150
batch_size=1,
# Since we use binary_crossentropy loss, we need binary labels
class_mode='binary')
# Flow validation images in batches of 20 using val_datagen generator
validation_generator = val_datagen.flow_from_directory(
validation_dir,
target_size=(150, 150),
batch_size=1,
class_mode='binary')
history = model.fit_generator(
train_generator,
steps_per_epoch=164, # 2000 images = batch_size * steps
epochs=15,
validation_data=validation_generator,
validation_steps=32, # 1000 images = batch_size * steps
verbose=2)
# Retrieve a list of accuracy results on training and validation data
# sets for each training epoch
acc = history.history['acc']
val_acc = history.history['val_acc']
# Retrieve a list of list results on training and validation data
# sets for each training epoch
loss = history.history['loss']
val_loss = history.history['val_loss']
# Get number of epochs
epochs = range(len(acc))
# Plot training and validation accuracy per epoch
plt.plot(epochs, acc)
plt.plot(epochs, val_acc)
plt.title('Training and validation accuracy')
plt.figure()
# Plot training and validation loss per epoch
plt.plot(epochs, loss)
plt.plot(epochs, val_loss)
plt.title('Training and validation loss')
plt.title('Training and validation loss')
…………
import os
from tensorflow.keras import layers
from tensorflow.keras import Model
!wget --no-check-certificate 
https://storage.googleapis.com/mledu-
datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 
-O /tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
from tensorflow.keras.applications.inception_v3 import InceptionV3
local_weights_file = '/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5'
pre_trained_model = InceptionV3(
input_shape=(150, 150, 3), include_top=False, weights=None)
pre_trained_model.load_weights(local_weights_file)
for layer in pre_trained_model.layers:
layer.trainable = False
last_layer = pre_trained_model.get_layer('mixed7')
print('last layer output shape:', last_layer.output_shape)
last_output = last_layer.output
from tensorflow.keras.optimizers import RMSprop
# Flatten the output layer to 1 dimension
x = layers.Flatten()(last_output)
# Add a fully connected layer with 1,024 hidden units and ReLU activation
x = layers.Dense(1024, activation='relu')(x)
# Add a dropout rate of 0.2
x = layers.Dropout(0.2)(x)
# Add a final sigmoid layer for classification
x = layers.Dense(1, activation='sigmoid')(x)
# Configure and compile the model
model = Model(pre_trained_model.input, x)
model.compile(loss='binary_crossentropy',
optimizer=RMSprop(lr=0.0001),
metrics=['acc'])
model.summary()
from tensorflow.keras import layers
from tensorflow.keras import Model
import numpy as np
import pandas as pd
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Conv2D, Activation, MaxPool2D, BatchNormalization, Flatten, Dens
e, Dropout
from tensorflow.keras.regularizers import l2
from tensorflow.keras.callbacks import ReduceLROnPlateau
from tensorflow.keras.optimizers import SGD
from keras.preprocessing.image import ImageDataGenerator
from keras.applications.vgg16 import preprocess_input
from keras.layers import Dense
from keras.models import Sequential
from keras.applications.vgg16 import VGG16
from glob import glob
import cv2
import os
base_dir = '/content/fruits'
train_dir = os.path.join(base_dir, 'training')
validation_dir = os.path.join(base_dir, 'test')
# Directory with our training cat pictures
train_cherry_dir = os.path.join(train_dir, 'cherry')
# Directory with our training dog pictures
train_infectedstrawberry_dir = os.path.join(train_dir, 'infectedstrawberry')
# Directory with our validation cat pictures
validation_cherry_dir = os.path.join(validation_dir, 'cherry')
# Directory with our validation dog pictures
validation_infectedstrawberry_dir = os.path.join(validation_dir, 'infectedstrawberry')
train_cherry_fnames = os.listdir(train_cherry_dir)
print(train_cherry_fnames[:10])
train_infectedstrawberry_fnames = os.listdir(train_infectedstrawberry_dir)
train_infectedstrawberry_fnames.sort()
print(train_infectedstrawberry_fnames[:10])
print('total training cherry images:', len(os.listdir(train_cherry_dir)))
print('total training infectedstrawberry images:', len(os.listdir(train_infectedstrawberry_dir)))
print('total validation cherry images:', len(os.listdir(validation_cherry_dir)))
print('total validation infectedstrawberry images:', len(os.listdir(validation_infectedstrawberry_di
r)))
from tensorflow.keras.optimizers import SGD
unfreeze = False
# Unfreeze all models after "mixed6"
for layer in pre_trained_model.layers:
if unfreeze:
layer.trainable = True
if layer.name == 'mixed6':
unfreeze = True
# As an optimizer, here we will use SGD
# with a very low learning rate (0.00001)
model.compile(loss='binary_crossentropy',
optimizer=SGD(
lr=0.00001,
momentum=0.9),
metrics=['acc'])
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# All images will be rescaled by 1./255
train_datagen = ImageDataGenerator(rescale=1./255)
val_datagen = ImageDataGenerator(rescale=1./255)
# Flow training images in batches of 20 using train_datagen generator
train_generator = train_datagen.flow_from_directory(
train_dir, # This is the source directory for training images
target_size=(150, 150), # All images will be resized to 150x150
batch_size=1,
# Since we use binary_crossentropy loss, we need binary labels
class_mode='binary')
# Flow validation images in batches of 20 using val_datagen generator
validation_generator = val_datagen.flow_from_directory(
validation_dir,
target_size=(150, 150),
batch_size=1,
class_mode='binary')
history = model.fit_generator(
train_generator,
steps_per_epoch=164, # 2000 images = batch_size * steps
epochs=15,
validation_data=validation_generator,
validation_steps=32, # 1000 images = batch_size * steps
verbose=2)
# Retrieve a list of accuracy results on training and validation data
# sets for each training epoch
acc = history.history['acc']
val_acc = history.history['val_acc']
# Retrieve a list of list results on training and validation data
# sets for each training epoch
loss = history.history['loss']
val_loss = history.history['val_loss']
# Get number of epochs
epochs = range(len(acc))
# Plot training and validation accuracy per epoch
plt.plot(epochs, acc)
plt.plot(epochs, val_acc)
plt.title('Training and validation accuracy')
plt.figure()
# Plot training and validation loss per epoch
plt.plot(epochs, loss)
plt.plot(epochs, val_loss)
plt.title('Training and validation loss')
plt.title('Training and validation loss')

More Related Content

What's hot

PyCon Siberia 2016. Не доверяйте тестам!
PyCon Siberia 2016. Не доверяйте тестам!PyCon Siberia 2016. Не доверяйте тестам!
PyCon Siberia 2016. Не доверяйте тестам!Ivan Tsyganov
 
Automated Testing with CMake, CTest and CDash
Automated Testing with CMake, CTest and CDashAutomated Testing with CMake, CTest and CDash
Automated Testing with CMake, CTest and CDashRichard Thomson
 
Managing themes and server environments with extensible configuration arrays
Managing themes and server environments with extensible configuration arraysManaging themes and server environments with extensible configuration arrays
Managing themes and server environments with extensible configuration arraysChris Olbekson
 
From typing the test to testing the type
From typing the test to testing the typeFrom typing the test to testing the type
From typing the test to testing the typeWim Godden
 
Sylius and Api Platform The story of integration
Sylius and Api Platform The story of integrationSylius and Api Platform The story of integration
Sylius and Api Platform The story of integrationŁukasz Chruściel
 
Node.js behind: V8 and its optimizations
Node.js behind: V8 and its optimizationsNode.js behind: V8 and its optimizations
Node.js behind: V8 and its optimizationsDawid Rusnak
 
White paper for unit testing using boost
White paper for unit testing using boostWhite paper for unit testing using boost
White paper for unit testing using boostnkkatiyar
 
SQLAlchemy Core: An Introduction
SQLAlchemy Core: An IntroductionSQLAlchemy Core: An Introduction
SQLAlchemy Core: An IntroductionJason Myers
 
Re-Design with Elixir/OTP
Re-Design with Elixir/OTPRe-Design with Elixir/OTP
Re-Design with Elixir/OTPMustafa TURAN
 
Introduction to SQLAlchemy ORM
Introduction to SQLAlchemy ORMIntroduction to SQLAlchemy ORM
Introduction to SQLAlchemy ORMJason Myers
 
Oracle PL/SQL - Creative Conditional Compilation
Oracle PL/SQL - Creative Conditional CompilationOracle PL/SQL - Creative Conditional Compilation
Oracle PL/SQL - Creative Conditional CompilationScott Wesley
 
Introduction to SQLAlchemy and Alembic Migrations
Introduction to SQLAlchemy and Alembic MigrationsIntroduction to SQLAlchemy and Alembic Migrations
Introduction to SQLAlchemy and Alembic MigrationsJason Myers
 
Adding Dependency Injection to Legacy Applications
Adding Dependency Injection to Legacy ApplicationsAdding Dependency Injection to Legacy Applications
Adding Dependency Injection to Legacy ApplicationsSam Hennessy
 
Qualidade levada a sério em Python - Emilio Simoni
Qualidade levada a sério em Python - Emilio SimoniQualidade levada a sério em Python - Emilio Simoni
Qualidade levada a sério em Python - Emilio SimoniGrupo de Testes Carioca
 
Postgres can do THAT?
Postgres can do THAT?Postgres can do THAT?
Postgres can do THAT?alexbrasetvik
 

What's hot (20)

PyCon Siberia 2016. Не доверяйте тестам!
PyCon Siberia 2016. Не доверяйте тестам!PyCon Siberia 2016. Не доверяйте тестам!
PyCon Siberia 2016. Не доверяйте тестам!
 
Automated Testing with CMake, CTest and CDash
Automated Testing with CMake, CTest and CDashAutomated Testing with CMake, CTest and CDash
Automated Testing with CMake, CTest and CDash
 
Managing themes and server environments with extensible configuration arrays
Managing themes and server environments with extensible configuration arraysManaging themes and server environments with extensible configuration arrays
Managing themes and server environments with extensible configuration arrays
 
From typing the test to testing the type
From typing the test to testing the typeFrom typing the test to testing the type
From typing the test to testing the type
 
test
testtest
test
 
Next Level Testing
Next Level TestingNext Level Testing
Next Level Testing
 
Sylius and Api Platform The story of integration
Sylius and Api Platform The story of integrationSylius and Api Platform The story of integration
Sylius and Api Platform The story of integration
 
Node.js behind: V8 and its optimizations
Node.js behind: V8 and its optimizationsNode.js behind: V8 and its optimizations
Node.js behind: V8 and its optimizations
 
White paper for unit testing using boost
White paper for unit testing using boostWhite paper for unit testing using boost
White paper for unit testing using boost
 
SQLAlchemy Core: An Introduction
SQLAlchemy Core: An IntroductionSQLAlchemy Core: An Introduction
SQLAlchemy Core: An Introduction
 
Re-Design with Elixir/OTP
Re-Design with Elixir/OTPRe-Design with Elixir/OTP
Re-Design with Elixir/OTP
 
Introduction to SQLAlchemy ORM
Introduction to SQLAlchemy ORMIntroduction to SQLAlchemy ORM
Introduction to SQLAlchemy ORM
 
Oracle PL/SQL - Creative Conditional Compilation
Oracle PL/SQL - Creative Conditional CompilationOracle PL/SQL - Creative Conditional Compilation
Oracle PL/SQL - Creative Conditional Compilation
 
Practical Celery
Practical CeleryPractical Celery
Practical Celery
 
Introduction to SQLAlchemy and Alembic Migrations
Introduction to SQLAlchemy and Alembic MigrationsIntroduction to SQLAlchemy and Alembic Migrations
Introduction to SQLAlchemy and Alembic Migrations
 
Adding Dependency Injection to Legacy Applications
Adding Dependency Injection to Legacy ApplicationsAdding Dependency Injection to Legacy Applications
Adding Dependency Injection to Legacy Applications
 
Performance tests - it's a trap
Performance tests - it's a trapPerformance tests - it's a trap
Performance tests - it's a trap
 
Qualidade levada a sério em Python - Emilio Simoni
Qualidade levada a sério em Python - Emilio SimoniQualidade levada a sério em Python - Emilio Simoni
Qualidade levada a sério em Python - Emilio Simoni
 
J slider
J sliderJ slider
J slider
 
Postgres can do THAT?
Postgres can do THAT?Postgres can do THAT?
Postgres can do THAT?
 

Similar to inception.docx

I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdfI want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdfMattU5mLambertq
 
Hello- I hope you are doing well- I am doing my project- which is Rans (1).pdf
Hello- I hope you are doing well- I am doing my project- which is Rans (1).pdfHello- I hope you are doing well- I am doing my project- which is Rans (1).pdf
Hello- I hope you are doing well- I am doing my project- which is Rans (1).pdfIan0J2Bondo
 
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdfI want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdfGordonF2XPatersonh
 
I have tried running this code below- and it is working- but the accur.pdf
I have tried running this code below- and it is working- but the accur.pdfI have tried running this code below- and it is working- but the accur.pdf
I have tried running this code below- and it is working- but the accur.pdfGordonF2XPatersonh
 
Pydata DC 2018 (Skorch - A Union of Scikit-learn and PyTorch)
Pydata DC 2018 (Skorch - A Union of Scikit-learn and PyTorch)Pydata DC 2018 (Skorch - A Union of Scikit-learn and PyTorch)
Pydata DC 2018 (Skorch - A Union of Scikit-learn and PyTorch)Thomas Fan
 
Viktor Tsykunov: Azure Machine Learning Service
Viktor Tsykunov: Azure Machine Learning ServiceViktor Tsykunov: Azure Machine Learning Service
Viktor Tsykunov: Azure Machine Learning ServiceLviv Startup Club
 
29-kashyap-mask-detaction.pptx
29-kashyap-mask-detaction.pptx29-kashyap-mask-detaction.pptx
29-kashyap-mask-detaction.pptxKASHYAPPATHAK7
 
Competition 1 (blog 1)
Competition 1 (blog 1)Competition 1 (blog 1)
Competition 1 (blog 1)TarunPaparaju
 
Assignment 5.2.pdf
Assignment 5.2.pdfAssignment 5.2.pdf
Assignment 5.2.pdfdash41
 
MT_01_unittest_python.pdf
MT_01_unittest_python.pdfMT_01_unittest_python.pdf
MT_01_unittest_python.pdfHans Jones
 
Optimization in django orm
Optimization in django ormOptimization in django orm
Optimization in django ormDenys Levchenko
 
Intro to DL with Keras - chapter3 - datacamp.pdf
Intro to DL with Keras - chapter3 - datacamp.pdfIntro to DL with Keras - chapter3 - datacamp.pdf
Intro to DL with Keras - chapter3 - datacamp.pdfavivnur2023
 
Conf_2023-02_v1.pptx
Conf_2023-02_v1.pptxConf_2023-02_v1.pptx
Conf_2023-02_v1.pptxGavinFETHsieh
 
Detect Negative and Positive sentiment in user reviews using python word2vec ...
Detect Negative and Positive sentiment in user reviews using python word2vec ...Detect Negative and Positive sentiment in user reviews using python word2vec ...
Detect Negative and Positive sentiment in user reviews using python word2vec ...Mamoon Ismail Khalid
 
Python 03-parameters-graphics.pptx
Python 03-parameters-graphics.pptxPython 03-parameters-graphics.pptx
Python 03-parameters-graphics.pptxTseChris
 
ML-Ops how to bring your data science to production
ML-Ops  how to bring your data science to productionML-Ops  how to bring your data science to production
ML-Ops how to bring your data science to productionHerman Wu
 
Unsupervised Aspect Based Sentiment Analysis at Scale
Unsupervised Aspect Based Sentiment Analysis at ScaleUnsupervised Aspect Based Sentiment Analysis at Scale
Unsupervised Aspect Based Sentiment Analysis at ScaleAaron (Ari) Bornstein
 

Similar to inception.docx (20)

I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdfI want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
 
Hello- I hope you are doing well- I am doing my project- which is Rans (1).pdf
Hello- I hope you are doing well- I am doing my project- which is Rans (1).pdfHello- I hope you are doing well- I am doing my project- which is Rans (1).pdf
Hello- I hope you are doing well- I am doing my project- which is Rans (1).pdf
 
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdfI want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
I want you to add the output of the F1 score- Precision- ROC AUC- and.pdf
 
I have tried running this code below- and it is working- but the accur.pdf
I have tried running this code below- and it is working- but the accur.pdfI have tried running this code below- and it is working- but the accur.pdf
I have tried running this code below- and it is working- but the accur.pdf
 
Pydata DC 2018 (Skorch - A Union of Scikit-learn and PyTorch)
Pydata DC 2018 (Skorch - A Union of Scikit-learn and PyTorch)Pydata DC 2018 (Skorch - A Union of Scikit-learn and PyTorch)
Pydata DC 2018 (Skorch - A Union of Scikit-learn and PyTorch)
 
Viktor Tsykunov: Azure Machine Learning Service
Viktor Tsykunov: Azure Machine Learning ServiceViktor Tsykunov: Azure Machine Learning Service
Viktor Tsykunov: Azure Machine Learning Service
 
29-kashyap-mask-detaction.pptx
29-kashyap-mask-detaction.pptx29-kashyap-mask-detaction.pptx
29-kashyap-mask-detaction.pptx
 
Competition 1 (blog 1)
Competition 1 (blog 1)Competition 1 (blog 1)
Competition 1 (blog 1)
 
Assignment 5.2.pdf
Assignment 5.2.pdfAssignment 5.2.pdf
Assignment 5.2.pdf
 
knn classification
knn classificationknn classification
knn classification
 
svm classification
svm classificationsvm classification
svm classification
 
MT_01_unittest_python.pdf
MT_01_unittest_python.pdfMT_01_unittest_python.pdf
MT_01_unittest_python.pdf
 
Objective-c Runtime
Objective-c RuntimeObjective-c Runtime
Objective-c Runtime
 
Optimization in django orm
Optimization in django ormOptimization in django orm
Optimization in django orm
 
Intro to DL with Keras - chapter3 - datacamp.pdf
Intro to DL with Keras - chapter3 - datacamp.pdfIntro to DL with Keras - chapter3 - datacamp.pdf
Intro to DL with Keras - chapter3 - datacamp.pdf
 
Conf_2023-02_v1.pptx
Conf_2023-02_v1.pptxConf_2023-02_v1.pptx
Conf_2023-02_v1.pptx
 
Detect Negative and Positive sentiment in user reviews using python word2vec ...
Detect Negative and Positive sentiment in user reviews using python word2vec ...Detect Negative and Positive sentiment in user reviews using python word2vec ...
Detect Negative and Positive sentiment in user reviews using python word2vec ...
 
Python 03-parameters-graphics.pptx
Python 03-parameters-graphics.pptxPython 03-parameters-graphics.pptx
Python 03-parameters-graphics.pptx
 
ML-Ops how to bring your data science to production
ML-Ops  how to bring your data science to productionML-Ops  how to bring your data science to production
ML-Ops how to bring your data science to production
 
Unsupervised Aspect Based Sentiment Analysis at Scale
Unsupervised Aspect Based Sentiment Analysis at ScaleUnsupervised Aspect Based Sentiment Analysis at Scale
Unsupervised Aspect Based Sentiment Analysis at Scale
 

Recently uploaded

如何办理(UNTEC毕业证书)新西兰联合理工学院毕业证成绩单原件一模一样
如何办理(UNTEC毕业证书)新西兰联合理工学院毕业证成绩单原件一模一样如何办理(UNTEC毕业证书)新西兰联合理工学院毕业证成绩单原件一模一样
如何办理(UNTEC毕业证书)新西兰联合理工学院毕业证成绩单原件一模一样qyguxu
 
Navigating the Tech Industry Journey GDSC UNIDEB
Navigating the Tech Industry Journey GDSC UNIDEBNavigating the Tech Industry Journey GDSC UNIDEB
Navigating the Tech Industry Journey GDSC UNIDEBvaideheekore1
 
如何办理(TMU毕业证书)多伦多都会大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(TMU毕业证书)多伦多都会大学毕业证成绩单本科硕士学位证留信学历认证如何办理(TMU毕业证书)多伦多都会大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(TMU毕业证书)多伦多都会大学毕业证成绩单本科硕士学位证留信学历认证gkyvm
 
BLAHALIFHKSDFOILEWKHJSFDNLDSKFN,DLFKNFMELKFJAERPIOAL
BLAHALIFHKSDFOILEWKHJSFDNLDSKFN,DLFKNFMELKFJAERPIOALBLAHALIFHKSDFOILEWKHJSFDNLDSKFN,DLFKNFMELKFJAERPIOAL
BLAHALIFHKSDFOILEWKHJSFDNLDSKFN,DLFKNFMELKFJAERPIOALCaitlinCummins3
 
We’re looking for a junior patent engineer to join our Team!
We’re looking for a junior patent engineer to join our Team!We’re looking for a junior patent engineer to join our Team!
We’re looking for a junior patent engineer to join our Team!Juli Boned
 
Master SEO in 2024 - The Complete Beginner's Guide
Master SEO in 2024 - The Complete Beginner's GuideMaster SEO in 2024 - The Complete Beginner's Guide
Master SEO in 2024 - The Complete Beginner's GuideTechEasifyInfotech
 
如何办理(UoA毕业证书)奥克兰大学毕业证成绩单原件一模一样
如何办理(UoA毕业证书)奥克兰大学毕业证成绩单原件一模一样如何办理(UoA毕业证书)奥克兰大学毕业证成绩单原件一模一样
如何办理(UoA毕业证书)奥克兰大学毕业证成绩单原件一模一样qyguxu
 
如何办理(NEU毕业证书)东北大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(NEU毕业证书)东北大学毕业证成绩单本科硕士学位证留信学历认证如何办理(NEU毕业证书)东北大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(NEU毕业证书)东北大学毕业证成绩单本科硕士学位证留信学历认证gakamzu
 
Crafting an effective CV for AYUSH Doctors.pdf
Crafting an effective CV for AYUSH Doctors.pdfCrafting an effective CV for AYUSH Doctors.pdf
Crafting an effective CV for AYUSH Doctors.pdfShri Dr Arul Selvan
 
We’re looking for a Technology consultant to join our Team!
We’re looking for a Technology consultant to join our Team!We’re looking for a Technology consultant to join our Team!
We’re looking for a Technology consultant to join our Team!Juli Boned
 
如何办理(laurentian毕业证书)劳伦森大学毕业证成绩单原件一模一样
如何办理(laurentian毕业证书)劳伦森大学毕业证成绩单原件一模一样如何办理(laurentian毕业证书)劳伦森大学毕业证成绩单原件一模一样
如何办理(laurentian毕业证书)劳伦森大学毕业证成绩单原件一模一样muwyto
 
一比一原版(UCI毕业证)加州大学欧文分校毕业证成绩单学位证留信学历认证
一比一原版(UCI毕业证)加州大学欧文分校毕业证成绩单学位证留信学历认证一比一原版(UCI毕业证)加州大学欧文分校毕业证成绩单学位证留信学历认证
一比一原版(UCI毕业证)加州大学欧文分校毕业证成绩单学位证留信学历认证vflw6bsde
 
如何办理(UW毕业证书)西雅图华盛顿大学毕业证成绩单原件一模一样
如何办理(UW毕业证书)西雅图华盛顿大学毕业证成绩单原件一模一样如何办理(UW毕业证书)西雅图华盛顿大学毕业证成绩单原件一模一样
如何办理(UW毕业证书)西雅图华盛顿大学毕业证成绩单原件一模一样muwyto
 
如何办理(EUR毕业证书)鹿特丹伊拉斯姆斯大学毕业证成绩单原件一模一样
如何办理(EUR毕业证书)鹿特丹伊拉斯姆斯大学毕业证成绩单原件一模一样如何办理(EUR毕业证书)鹿特丹伊拉斯姆斯大学毕业证成绩单原件一模一样
如何办理(EUR毕业证书)鹿特丹伊拉斯姆斯大学毕业证成绩单原件一模一样qyguxu
 
一比一原版(UQ毕业证书)澳大利亚昆士兰大学毕业证成绩单学位证
一比一原版(UQ毕业证书)澳大利亚昆士兰大学毕业证成绩单学位证一比一原版(UQ毕业证书)澳大利亚昆士兰大学毕业证成绩单学位证
一比一原版(UQ毕业证书)澳大利亚昆士兰大学毕业证成绩单学位证B
 
如何办理(USC毕业证书)南加利福尼亚大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(USC毕业证书)南加利福尼亚大学毕业证成绩单本科硕士学位证留信学历认证如何办理(USC毕业证书)南加利福尼亚大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(USC毕业证书)南加利福尼亚大学毕业证成绩单本科硕士学位证留信学历认证gakamzu
 
如何办理(Indiana State毕业证书)印第安纳州立大学毕业证成绩单原件一模一样
如何办理(Indiana State毕业证书)印第安纳州立大学毕业证成绩单原件一模一样如何办理(Indiana State毕业证书)印第安纳州立大学毕业证成绩单原件一模一样
如何办理(Indiana State毕业证书)印第安纳州立大学毕业证成绩单原件一模一样qyguxu
 
Common breast clinical based cases in Tanzania.pptx
Common breast clinical based cases in Tanzania.pptxCommon breast clinical based cases in Tanzania.pptx
Common breast clinical based cases in Tanzania.pptxJustineNDeodatus
 
如何办理(CSU毕业证书)圣马科斯分校毕业证成绩单原件一模一样
如何办理(CSU毕业证书)圣马科斯分校毕业证成绩单原件一模一样如何办理(CSU毕业证书)圣马科斯分校毕业证成绩单原件一模一样
如何办理(CSU毕业证书)圣马科斯分校毕业证成绩单原件一模一样qyguxu
 
CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .
CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .
CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .Dr. David Burkett
 

Recently uploaded (20)

如何办理(UNTEC毕业证书)新西兰联合理工学院毕业证成绩单原件一模一样
如何办理(UNTEC毕业证书)新西兰联合理工学院毕业证成绩单原件一模一样如何办理(UNTEC毕业证书)新西兰联合理工学院毕业证成绩单原件一模一样
如何办理(UNTEC毕业证书)新西兰联合理工学院毕业证成绩单原件一模一样
 
Navigating the Tech Industry Journey GDSC UNIDEB
Navigating the Tech Industry Journey GDSC UNIDEBNavigating the Tech Industry Journey GDSC UNIDEB
Navigating the Tech Industry Journey GDSC UNIDEB
 
如何办理(TMU毕业证书)多伦多都会大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(TMU毕业证书)多伦多都会大学毕业证成绩单本科硕士学位证留信学历认证如何办理(TMU毕业证书)多伦多都会大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(TMU毕业证书)多伦多都会大学毕业证成绩单本科硕士学位证留信学历认证
 
BLAHALIFHKSDFOILEWKHJSFDNLDSKFN,DLFKNFMELKFJAERPIOAL
BLAHALIFHKSDFOILEWKHJSFDNLDSKFN,DLFKNFMELKFJAERPIOALBLAHALIFHKSDFOILEWKHJSFDNLDSKFN,DLFKNFMELKFJAERPIOAL
BLAHALIFHKSDFOILEWKHJSFDNLDSKFN,DLFKNFMELKFJAERPIOAL
 
We’re looking for a junior patent engineer to join our Team!
We’re looking for a junior patent engineer to join our Team!We’re looking for a junior patent engineer to join our Team!
We’re looking for a junior patent engineer to join our Team!
 
Master SEO in 2024 - The Complete Beginner's Guide
Master SEO in 2024 - The Complete Beginner's GuideMaster SEO in 2024 - The Complete Beginner's Guide
Master SEO in 2024 - The Complete Beginner's Guide
 
如何办理(UoA毕业证书)奥克兰大学毕业证成绩单原件一模一样
如何办理(UoA毕业证书)奥克兰大学毕业证成绩单原件一模一样如何办理(UoA毕业证书)奥克兰大学毕业证成绩单原件一模一样
如何办理(UoA毕业证书)奥克兰大学毕业证成绩单原件一模一样
 
如何办理(NEU毕业证书)东北大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(NEU毕业证书)东北大学毕业证成绩单本科硕士学位证留信学历认证如何办理(NEU毕业证书)东北大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(NEU毕业证书)东北大学毕业证成绩单本科硕士学位证留信学历认证
 
Crafting an effective CV for AYUSH Doctors.pdf
Crafting an effective CV for AYUSH Doctors.pdfCrafting an effective CV for AYUSH Doctors.pdf
Crafting an effective CV for AYUSH Doctors.pdf
 
We’re looking for a Technology consultant to join our Team!
We’re looking for a Technology consultant to join our Team!We’re looking for a Technology consultant to join our Team!
We’re looking for a Technology consultant to join our Team!
 
如何办理(laurentian毕业证书)劳伦森大学毕业证成绩单原件一模一样
如何办理(laurentian毕业证书)劳伦森大学毕业证成绩单原件一模一样如何办理(laurentian毕业证书)劳伦森大学毕业证成绩单原件一模一样
如何办理(laurentian毕业证书)劳伦森大学毕业证成绩单原件一模一样
 
一比一原版(UCI毕业证)加州大学欧文分校毕业证成绩单学位证留信学历认证
一比一原版(UCI毕业证)加州大学欧文分校毕业证成绩单学位证留信学历认证一比一原版(UCI毕业证)加州大学欧文分校毕业证成绩单学位证留信学历认证
一比一原版(UCI毕业证)加州大学欧文分校毕业证成绩单学位证留信学历认证
 
如何办理(UW毕业证书)西雅图华盛顿大学毕业证成绩单原件一模一样
如何办理(UW毕业证书)西雅图华盛顿大学毕业证成绩单原件一模一样如何办理(UW毕业证书)西雅图华盛顿大学毕业证成绩单原件一模一样
如何办理(UW毕业证书)西雅图华盛顿大学毕业证成绩单原件一模一样
 
如何办理(EUR毕业证书)鹿特丹伊拉斯姆斯大学毕业证成绩单原件一模一样
如何办理(EUR毕业证书)鹿特丹伊拉斯姆斯大学毕业证成绩单原件一模一样如何办理(EUR毕业证书)鹿特丹伊拉斯姆斯大学毕业证成绩单原件一模一样
如何办理(EUR毕业证书)鹿特丹伊拉斯姆斯大学毕业证成绩单原件一模一样
 
一比一原版(UQ毕业证书)澳大利亚昆士兰大学毕业证成绩单学位证
一比一原版(UQ毕业证书)澳大利亚昆士兰大学毕业证成绩单学位证一比一原版(UQ毕业证书)澳大利亚昆士兰大学毕业证成绩单学位证
一比一原版(UQ毕业证书)澳大利亚昆士兰大学毕业证成绩单学位证
 
如何办理(USC毕业证书)南加利福尼亚大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(USC毕业证书)南加利福尼亚大学毕业证成绩单本科硕士学位证留信学历认证如何办理(USC毕业证书)南加利福尼亚大学毕业证成绩单本科硕士学位证留信学历认证
如何办理(USC毕业证书)南加利福尼亚大学毕业证成绩单本科硕士学位证留信学历认证
 
如何办理(Indiana State毕业证书)印第安纳州立大学毕业证成绩单原件一模一样
如何办理(Indiana State毕业证书)印第安纳州立大学毕业证成绩单原件一模一样如何办理(Indiana State毕业证书)印第安纳州立大学毕业证成绩单原件一模一样
如何办理(Indiana State毕业证书)印第安纳州立大学毕业证成绩单原件一模一样
 
Common breast clinical based cases in Tanzania.pptx
Common breast clinical based cases in Tanzania.pptxCommon breast clinical based cases in Tanzania.pptx
Common breast clinical based cases in Tanzania.pptx
 
如何办理(CSU毕业证书)圣马科斯分校毕业证成绩单原件一模一样
如何办理(CSU毕业证书)圣马科斯分校毕业证成绩单原件一模一样如何办理(CSU毕业证书)圣马科斯分校毕业证成绩单原件一模一样
如何办理(CSU毕业证书)圣马科斯分校毕业证成绩单原件一模一样
 
CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .
CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .
CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .
 

inception.docx

  • 1. import os from tensorflow.keras import layers from tensorflow.keras import Model !wget --no-check-certificate https://storage.googleapis.com/mledu- datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 -O /tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 from tensorflow.keras.applications.inception_v3 import InceptionV3 local_weights_file = '/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5' pre_trained_model = InceptionV3( input_shape=(150, 150, 3), include_top=False, weights=None) pre_trained_model.load_weights(local_weights_file) for layer in pre_trained_model.layers: layer.trainable = False last_layer = pre_trained_model.get_layer('mixed7') print('last layer output shape:', last_layer.output_shape) last_output = last_layer.output from tensorflow.keras.optimizers import RMSprop # Flatten the output layer to 1 dimension x = layers.Flatten()(last_output) # Add a fully connected layer with 1,024 hidden units and ReLU activation x = layers.Dense(1024, activation='relu')(x) # Add a dropout rate of 0.2 x = layers.Dropout(0.2)(x) # Add a final sigmoid layer for classification x = layers.Dense(1, activation='sigmoid')(x) # Configure and compile the model model = Model(pre_trained_model.input, x) model.compile(loss='binary_crossentropy', optimizer=RMSprop(lr=0.0001), metrics=['acc']) model.summary() from tensorflow.keras import layers from tensorflow.keras import Model import numpy as np import pandas as pd from PIL import Image import numpy as np import matplotlib.pyplot as plt import tensorflow as tf from tensorflow.keras import Sequential from tensorflow.keras.layers import Conv2D, Activation, MaxPool2D, BatchNormalization, Flatten, Dens e, Dropout from tensorflow.keras.regularizers import l2 from tensorflow.keras.callbacks import ReduceLROnPlateau from tensorflow.keras.optimizers import SGD from keras.preprocessing.image import ImageDataGenerator from keras.applications.vgg16 import preprocess_input from keras.layers import Dense from keras.models import Sequential from keras.applications.vgg16 import VGG16 from glob import glob import cv2 import os
  • 2. base_dir = '/content/fruits' train_dir = os.path.join(base_dir, 'training') validation_dir = os.path.join(base_dir, 'test') # Directory with our training cat pictures train_cherry_dir = os.path.join(train_dir, 'cherry') # Directory with our training dog pictures train_infectedstrawberry_dir = os.path.join(train_dir, 'infectedstrawberry') # Directory with our validation cat pictures validation_cherry_dir = os.path.join(validation_dir, 'cherry') # Directory with our validation dog pictures validation_infectedstrawberry_dir = os.path.join(validation_dir, 'infectedstrawberry') train_cherry_fnames = os.listdir(train_cherry_dir) print(train_cherry_fnames[:10]) train_infectedstrawberry_fnames = os.listdir(train_infectedstrawberry_dir) train_infectedstrawberry_fnames.sort() print(train_infectedstrawberry_fnames[:10]) print('total training cherry images:', len(os.listdir(train_cherry_dir))) print('total training infectedstrawberry images:', len(os.listdir(train_infectedstrawberry_dir))) print('total validation cherry images:', len(os.listdir(validation_cherry_dir))) print('total validation infectedstrawberry images:', len(os.listdir(validation_infectedstrawberry_di r))) from tensorflow.keras.preprocessing.image import ImageDataGenerator # All images will be rescaled by 1./255 train_datagen = ImageDataGenerator(rescale=1./255) val_datagen = ImageDataGenerator(rescale=1./255) # Flow training images in batches of 20 using train_datagen generator train_generator = train_datagen.flow_from_directory( train_dir, # This is the source directory for training images target_size=(150, 150), # All images will be resized to 150x150 batch_size=1, # Since we use binary_crossentropy loss, we need binary labels class_mode='binary') # Flow validation images in batches of 20 using val_datagen generator validation_generator = val_datagen.flow_from_directory( validation_dir, target_size=(150, 150), batch_size=1, class_mode='binary') history = model.fit_generator( train_generator, steps_per_epoch=164, # 2000 images = batch_size * steps epochs=15, validation_data=validation_generator, validation_steps=32, # 1000 images = batch_size * steps verbose=2) # Retrieve a list of accuracy results on training and validation data # sets for each training epoch acc = history.history['acc'] val_acc = history.history['val_acc'] # Retrieve a list of list results on training and validation data # sets for each training epoch loss = history.history['loss'] val_loss = history.history['val_loss'] # Get number of epochs epochs = range(len(acc))
  • 3. # Plot training and validation accuracy per epoch plt.plot(epochs, acc) plt.plot(epochs, val_acc) plt.title('Training and validation accuracy') plt.figure() # Plot training and validation loss per epoch plt.plot(epochs, loss) plt.plot(epochs, val_loss) plt.title('Training and validation loss') plt.title('Training and validation loss') ………… import os from tensorflow.keras import layers from tensorflow.keras import Model !wget --no-check-certificate https://storage.googleapis.com/mledu- datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 -O /tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 from tensorflow.keras.applications.inception_v3 import InceptionV3 local_weights_file = '/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5' pre_trained_model = InceptionV3( input_shape=(150, 150, 3), include_top=False, weights=None) pre_trained_model.load_weights(local_weights_file) for layer in pre_trained_model.layers: layer.trainable = False last_layer = pre_trained_model.get_layer('mixed7') print('last layer output shape:', last_layer.output_shape) last_output = last_layer.output from tensorflow.keras.optimizers import RMSprop # Flatten the output layer to 1 dimension x = layers.Flatten()(last_output) # Add a fully connected layer with 1,024 hidden units and ReLU activation x = layers.Dense(1024, activation='relu')(x) # Add a dropout rate of 0.2 x = layers.Dropout(0.2)(x) # Add a final sigmoid layer for classification x = layers.Dense(1, activation='sigmoid')(x) # Configure and compile the model model = Model(pre_trained_model.input, x)
  • 4. model.compile(loss='binary_crossentropy', optimizer=RMSprop(lr=0.0001), metrics=['acc']) model.summary() from tensorflow.keras import layers from tensorflow.keras import Model import numpy as np import pandas as pd from PIL import Image import numpy as np import matplotlib.pyplot as plt import tensorflow as tf from tensorflow.keras import Sequential from tensorflow.keras.layers import Conv2D, Activation, MaxPool2D, BatchNormalization, Flatten, Dens e, Dropout from tensorflow.keras.regularizers import l2 from tensorflow.keras.callbacks import ReduceLROnPlateau from tensorflow.keras.optimizers import SGD from keras.preprocessing.image import ImageDataGenerator from keras.applications.vgg16 import preprocess_input from keras.layers import Dense from keras.models import Sequential from keras.applications.vgg16 import VGG16 from glob import glob import cv2 import os base_dir = '/content/fruits' train_dir = os.path.join(base_dir, 'training') validation_dir = os.path.join(base_dir, 'test') # Directory with our training cat pictures train_cherry_dir = os.path.join(train_dir, 'cherry') # Directory with our training dog pictures train_infectedstrawberry_dir = os.path.join(train_dir, 'infectedstrawberry') # Directory with our validation cat pictures validation_cherry_dir = os.path.join(validation_dir, 'cherry') # Directory with our validation dog pictures validation_infectedstrawberry_dir = os.path.join(validation_dir, 'infectedstrawberry') train_cherry_fnames = os.listdir(train_cherry_dir) print(train_cherry_fnames[:10]) train_infectedstrawberry_fnames = os.listdir(train_infectedstrawberry_dir) train_infectedstrawberry_fnames.sort() print(train_infectedstrawberry_fnames[:10]) print('total training cherry images:', len(os.listdir(train_cherry_dir))) print('total training infectedstrawberry images:', len(os.listdir(train_infectedstrawberry_dir))) print('total validation cherry images:', len(os.listdir(validation_cherry_dir))) print('total validation infectedstrawberry images:', len(os.listdir(validation_infectedstrawberry_di r))) from tensorflow.keras.optimizers import SGD unfreeze = False # Unfreeze all models after "mixed6" for layer in pre_trained_model.layers: if unfreeze: layer.trainable = True if layer.name == 'mixed6': unfreeze = True # As an optimizer, here we will use SGD # with a very low learning rate (0.00001) model.compile(loss='binary_crossentropy', optimizer=SGD(
  • 5. lr=0.00001, momentum=0.9), metrics=['acc']) from tensorflow.keras.preprocessing.image import ImageDataGenerator # All images will be rescaled by 1./255 train_datagen = ImageDataGenerator(rescale=1./255) val_datagen = ImageDataGenerator(rescale=1./255) # Flow training images in batches of 20 using train_datagen generator train_generator = train_datagen.flow_from_directory( train_dir, # This is the source directory for training images target_size=(150, 150), # All images will be resized to 150x150 batch_size=1, # Since we use binary_crossentropy loss, we need binary labels class_mode='binary') # Flow validation images in batches of 20 using val_datagen generator validation_generator = val_datagen.flow_from_directory( validation_dir, target_size=(150, 150), batch_size=1, class_mode='binary') history = model.fit_generator( train_generator, steps_per_epoch=164, # 2000 images = batch_size * steps epochs=15, validation_data=validation_generator, validation_steps=32, # 1000 images = batch_size * steps verbose=2) # Retrieve a list of accuracy results on training and validation data # sets for each training epoch acc = history.history['acc'] val_acc = history.history['val_acc'] # Retrieve a list of list results on training and validation data # sets for each training epoch loss = history.history['loss'] val_loss = history.history['val_loss'] # Get number of epochs epochs = range(len(acc)) # Plot training and validation accuracy per epoch plt.plot(epochs, acc) plt.plot(epochs, val_acc) plt.title('Training and validation accuracy') plt.figure() # Plot training and validation loss per epoch plt.plot(epochs, loss) plt.plot(epochs, val_loss) plt.title('Training and validation loss') plt.title('Training and validation loss')