Even though I defined my Google Drive(and my dataset in it) to google colab but when I run my code I give this error:FileNotFoundError: [Errno 2] No such file or directory: 'content/drive/My Drive/....
I already defined google drive in google colab and I can access to it through google colab but when I run my code I give this error
from keras.models import Sequential
from keras.layers import Convolution2D
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from keras.layers import Dense
model=Sequential()
model.add(Convolution2D(32,3,3,input_shape=(64,64,3),activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Convolution2D(32,3,3,activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Flatten())
model.add(Dense(output_dim=128,activation='relu'))
model.add(Dense(output_dim=1,activation='sigmoid'))
model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])
from keras.preprocessing.image import ImageDataGenerator
train_datagen=ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen=ImageDataGenerator(rescale=1./255)
training_set=train_datagen.flow_from_directory(
directory='content/drive/My Drive/Convolutional_Neural_Networks/dataset/training_set',
target_size=(64,64),
batch_size=32,
class_mode='binary')
test_set=test_datagen.flow_from_directory(
directory='content/drive/My Drive/Convolutional_Neural_Networks/dataset/test_set',
target_size=(64,64),
batch_size=32,
class_mode='binary')
#train
model.fit_generator(
training_set,
samples_per_epoch=8000,
nb_epoch=2,
validation_data=test_set,
nb_val_samples=1000)
import numpy as np
from keras.preprocessing import image
test_image=image.load_img('sunn.jpg',target_size=(64,64))
test_image=image.img_to_array(test_image)
test_image=np.expand_dims(test_image,axis=0)
result=model.predict(test_image)
training_set.class_indices
if result[0][0] >= 0.5:
prediction='dog'
else:
prediction='cat'
print(prediction)