CV OF Dr. David Burkett | Cardiologist and Electrophysiologist .
inception.docx
1. import os
from tensorflow.keras import layers
from tensorflow.keras import Model
!wget --no-check-certificate
https://storage.googleapis.com/mledu-
datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
-O /tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
from tensorflow.keras.applications.inception_v3 import InceptionV3
local_weights_file = '/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5'
pre_trained_model = InceptionV3(
input_shape=(150, 150, 3), include_top=False, weights=None)
pre_trained_model.load_weights(local_weights_file)
for layer in pre_trained_model.layers:
layer.trainable = False
last_layer = pre_trained_model.get_layer('mixed7')
print('last layer output shape:', last_layer.output_shape)
last_output = last_layer.output
from tensorflow.keras.optimizers import RMSprop
# Flatten the output layer to 1 dimension
x = layers.Flatten()(last_output)
# Add a fully connected layer with 1,024 hidden units and ReLU activation
x = layers.Dense(1024, activation='relu')(x)
# Add a dropout rate of 0.2
x = layers.Dropout(0.2)(x)
# Add a final sigmoid layer for classification
x = layers.Dense(1, activation='sigmoid')(x)
# Configure and compile the model
model = Model(pre_trained_model.input, x)
model.compile(loss='binary_crossentropy',
optimizer=RMSprop(lr=0.0001),
metrics=['acc'])
model.summary()
from tensorflow.keras import layers
from tensorflow.keras import Model
import numpy as np
import pandas as pd
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Conv2D, Activation, MaxPool2D, BatchNormalization, Flatten, Dens
e, Dropout
from tensorflow.keras.regularizers import l2
from tensorflow.keras.callbacks import ReduceLROnPlateau
from tensorflow.keras.optimizers import SGD
from keras.preprocessing.image import ImageDataGenerator
from keras.applications.vgg16 import preprocess_input
from keras.layers import Dense
from keras.models import Sequential
from keras.applications.vgg16 import VGG16
from glob import glob
import cv2
import os
2. base_dir = '/content/fruits'
train_dir = os.path.join(base_dir, 'training')
validation_dir = os.path.join(base_dir, 'test')
# Directory with our training cat pictures
train_cherry_dir = os.path.join(train_dir, 'cherry')
# Directory with our training dog pictures
train_infectedstrawberry_dir = os.path.join(train_dir, 'infectedstrawberry')
# Directory with our validation cat pictures
validation_cherry_dir = os.path.join(validation_dir, 'cherry')
# Directory with our validation dog pictures
validation_infectedstrawberry_dir = os.path.join(validation_dir, 'infectedstrawberry')
train_cherry_fnames = os.listdir(train_cherry_dir)
print(train_cherry_fnames[:10])
train_infectedstrawberry_fnames = os.listdir(train_infectedstrawberry_dir)
train_infectedstrawberry_fnames.sort()
print(train_infectedstrawberry_fnames[:10])
print('total training cherry images:', len(os.listdir(train_cherry_dir)))
print('total training infectedstrawberry images:', len(os.listdir(train_infectedstrawberry_dir)))
print('total validation cherry images:', len(os.listdir(validation_cherry_dir)))
print('total validation infectedstrawberry images:', len(os.listdir(validation_infectedstrawberry_di
r)))
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# All images will be rescaled by 1./255
train_datagen = ImageDataGenerator(rescale=1./255)
val_datagen = ImageDataGenerator(rescale=1./255)
# Flow training images in batches of 20 using train_datagen generator
train_generator = train_datagen.flow_from_directory(
train_dir, # This is the source directory for training images
target_size=(150, 150), # All images will be resized to 150x150
batch_size=1,
# Since we use binary_crossentropy loss, we need binary labels
class_mode='binary')
# Flow validation images in batches of 20 using val_datagen generator
validation_generator = val_datagen.flow_from_directory(
validation_dir,
target_size=(150, 150),
batch_size=1,
class_mode='binary')
history = model.fit_generator(
train_generator,
steps_per_epoch=164, # 2000 images = batch_size * steps
epochs=15,
validation_data=validation_generator,
validation_steps=32, # 1000 images = batch_size * steps
verbose=2)
# Retrieve a list of accuracy results on training and validation data
# sets for each training epoch
acc = history.history['acc']
val_acc = history.history['val_acc']
# Retrieve a list of list results on training and validation data
# sets for each training epoch
loss = history.history['loss']
val_loss = history.history['val_loss']
# Get number of epochs
epochs = range(len(acc))
3. # Plot training and validation accuracy per epoch
plt.plot(epochs, acc)
plt.plot(epochs, val_acc)
plt.title('Training and validation accuracy')
plt.figure()
# Plot training and validation loss per epoch
plt.plot(epochs, loss)
plt.plot(epochs, val_loss)
plt.title('Training and validation loss')
plt.title('Training and validation loss')
…………
import os
from tensorflow.keras import layers
from tensorflow.keras import Model
!wget --no-check-certificate
https://storage.googleapis.com/mledu-
datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
-O /tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
from tensorflow.keras.applications.inception_v3 import InceptionV3
local_weights_file = '/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5'
pre_trained_model = InceptionV3(
input_shape=(150, 150, 3), include_top=False, weights=None)
pre_trained_model.load_weights(local_weights_file)
for layer in pre_trained_model.layers:
layer.trainable = False
last_layer = pre_trained_model.get_layer('mixed7')
print('last layer output shape:', last_layer.output_shape)
last_output = last_layer.output
from tensorflow.keras.optimizers import RMSprop
# Flatten the output layer to 1 dimension
x = layers.Flatten()(last_output)
# Add a fully connected layer with 1,024 hidden units and ReLU activation
x = layers.Dense(1024, activation='relu')(x)
# Add a dropout rate of 0.2
x = layers.Dropout(0.2)(x)
# Add a final sigmoid layer for classification
x = layers.Dense(1, activation='sigmoid')(x)
# Configure and compile the model
model = Model(pre_trained_model.input, x)
4. model.compile(loss='binary_crossentropy',
optimizer=RMSprop(lr=0.0001),
metrics=['acc'])
model.summary()
from tensorflow.keras import layers
from tensorflow.keras import Model
import numpy as np
import pandas as pd
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Conv2D, Activation, MaxPool2D, BatchNormalization, Flatten, Dens
e, Dropout
from tensorflow.keras.regularizers import l2
from tensorflow.keras.callbacks import ReduceLROnPlateau
from tensorflow.keras.optimizers import SGD
from keras.preprocessing.image import ImageDataGenerator
from keras.applications.vgg16 import preprocess_input
from keras.layers import Dense
from keras.models import Sequential
from keras.applications.vgg16 import VGG16
from glob import glob
import cv2
import os
base_dir = '/content/fruits'
train_dir = os.path.join(base_dir, 'training')
validation_dir = os.path.join(base_dir, 'test')
# Directory with our training cat pictures
train_cherry_dir = os.path.join(train_dir, 'cherry')
# Directory with our training dog pictures
train_infectedstrawberry_dir = os.path.join(train_dir, 'infectedstrawberry')
# Directory with our validation cat pictures
validation_cherry_dir = os.path.join(validation_dir, 'cherry')
# Directory with our validation dog pictures
validation_infectedstrawberry_dir = os.path.join(validation_dir, 'infectedstrawberry')
train_cherry_fnames = os.listdir(train_cherry_dir)
print(train_cherry_fnames[:10])
train_infectedstrawberry_fnames = os.listdir(train_infectedstrawberry_dir)
train_infectedstrawberry_fnames.sort()
print(train_infectedstrawberry_fnames[:10])
print('total training cherry images:', len(os.listdir(train_cherry_dir)))
print('total training infectedstrawberry images:', len(os.listdir(train_infectedstrawberry_dir)))
print('total validation cherry images:', len(os.listdir(validation_cherry_dir)))
print('total validation infectedstrawberry images:', len(os.listdir(validation_infectedstrawberry_di
r)))
from tensorflow.keras.optimizers import SGD
unfreeze = False
# Unfreeze all models after "mixed6"
for layer in pre_trained_model.layers:
if unfreeze:
layer.trainable = True
if layer.name == 'mixed6':
unfreeze = True
# As an optimizer, here we will use SGD
# with a very low learning rate (0.00001)
model.compile(loss='binary_crossentropy',
optimizer=SGD(
5. lr=0.00001,
momentum=0.9),
metrics=['acc'])
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# All images will be rescaled by 1./255
train_datagen = ImageDataGenerator(rescale=1./255)
val_datagen = ImageDataGenerator(rescale=1./255)
# Flow training images in batches of 20 using train_datagen generator
train_generator = train_datagen.flow_from_directory(
train_dir, # This is the source directory for training images
target_size=(150, 150), # All images will be resized to 150x150
batch_size=1,
# Since we use binary_crossentropy loss, we need binary labels
class_mode='binary')
# Flow validation images in batches of 20 using val_datagen generator
validation_generator = val_datagen.flow_from_directory(
validation_dir,
target_size=(150, 150),
batch_size=1,
class_mode='binary')
history = model.fit_generator(
train_generator,
steps_per_epoch=164, # 2000 images = batch_size * steps
epochs=15,
validation_data=validation_generator,
validation_steps=32, # 1000 images = batch_size * steps
verbose=2)
# Retrieve a list of accuracy results on training and validation data
# sets for each training epoch
acc = history.history['acc']
val_acc = history.history['val_acc']
# Retrieve a list of list results on training and validation data
# sets for each training epoch
loss = history.history['loss']
val_loss = history.history['val_loss']
# Get number of epochs
epochs = range(len(acc))
# Plot training and validation accuracy per epoch
plt.plot(epochs, acc)
plt.plot(epochs, val_acc)
plt.title('Training and validation accuracy')
plt.figure()
# Plot training and validation loss per epoch
plt.plot(epochs, loss)
plt.plot(epochs, val_loss)
plt.title('Training and validation loss')
plt.title('Training and validation loss')