Fruits
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout
from tensorflow.keras.optimizers import Adam
2024-09-11 15:18:48.720257: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.
2024-09-11 15:18:48.720757: I external/local_xla/xla/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used.
2024-09-11 15:18:48.733149: I external/local_xla/xla/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used.
2024-09-11 15:18:48.749215: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:485] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
2024-09-11 15:18:48.767948: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:8454] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
2024-09-11 15:18:48.774441: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1452] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
2024-09-11 15:18:48.796444: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
2024-09-11 15:18:50.268827: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
# Define paths
train_dir = r'Documents\kaggle\fruits360\fruits-360_dataset\fruits-360\Training'
test_dir = r'Documents\kaggle\fruits360\fruits-360_dataset\fruits-360\Test'
# Image data generators for loading and augmenting the images
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True
)
test_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(
train_dir,
target_size=(100, 100),
batch_size=32,
class_mode='categorical'
)
test_generator = test_datagen.flow_from_directory(
test_dir,
target_size=(100, 100),
batch_size=32,
class_mode='categorical'
)
---------------------------------------------------------------------------
FileNotFoundError Traceback (most recent call last)
Cell In[3], line 11
2 train_datagen = ImageDataGenerator(
3 rescale=1./255,
4 shear_range=0.2,
5 zoom_range=0.2,
6 horizontal_flip=True
7 )
9 test_datagen = ImageDataGenerator(rescale=1./255)
---> 11 train_generator = train_datagen.flow_from_directory(
12 train_dir,
13 target_size=(100, 100),
14 batch_size=32,
15 class_mode='categorical'
16 )
18 test_generator = test_datagen.flow_from_directory(
19 test_dir,
20 target_size=(100, 100),
21 batch_size=32,
22 class_mode='categorical'
23 )
File ~/adi_student/venv/lib/python3.10/site-packages/keras/src/legacy/preprocessing/image.py:1138, in ImageDataGenerator.flow_from_directory(self, directory, target_size, color_mode, classes, class_mode, batch_size, shuffle, seed, save_to_dir, save_prefix, save_format, follow_links, subset, interpolation, keep_aspect_ratio)
1120 def flow_from_directory(
1121 self,
1122 directory,
(...)
1136 keep_aspect_ratio=False,
1137 ):
-> 1138 return DirectoryIterator(
1139 directory,
1140 self,
1141 target_size=target_size,
1142 color_mode=color_mode,
1143 keep_aspect_ratio=keep_aspect_ratio,
1144 classes=classes,
1145 class_mode=class_mode,
1146 data_format=self.data_format,
1147 batch_size=batch_size,
1148 shuffle=shuffle,
1149 seed=seed,
1150 save_to_dir=save_to_dir,
1151 save_prefix=save_prefix,
1152 save_format=save_format,
1153 follow_links=follow_links,
1154 subset=subset,
1155 interpolation=interpolation,
1156 dtype=self.dtype,
1157 )
File ~/adi_student/venv/lib/python3.10/site-packages/keras/src/legacy/preprocessing/image.py:453, in DirectoryIterator.__init__(self, directory, image_data_generator, target_size, color_mode, classes, class_mode, batch_size, shuffle, seed, data_format, save_to_dir, save_prefix, save_format, follow_links, subset, interpolation, keep_aspect_ratio, dtype)
451 if not classes:
452 classes = []
--> 453 for subdir in sorted(os.listdir(directory)):
454 if os.path.isdir(os.path.join(directory, subdir)):
455 classes.append(subdir)
FileNotFoundError: [Errno 2] No such file or directory: 'Documents\\kaggle\\fruits360\\fruits-360_dataset\\fruits-360\\Training'
# Building the CNN model
model = Sequential([
Conv2D(32, (3, 3), activation='relu', input_shape=(100, 100, 3)),
MaxPooling2D(pool_size=(2, 2)),
Conv2D(64, (3, 3), activation='relu'),
MaxPooling2D(pool_size=(2, 2)),
Conv2D(128, (3, 3), activation='relu'),
MaxPooling2D(pool_size=(2, 2)),
Flatten(),
Dense(512, activation='relu'),
Dropout(0.5),
Dense(131, activation='softmax') # 131 classes in the dataset
])
# Compile the model
model.compile(optimizer=Adam(), loss='categorical_crossentropy', metrics=['accuracy'])
# Determine steps per epoch and validation steps
steps_per_epoch = train_generator.samples // train_generator.batch_size
validation_steps = test_generator.samples // test_generator.batch_size
# Evaluate the model
loss, accuracy = model.evaluate(test_generator)
print(f'Test accuracy: {accuracy * 100:.2f}%')
Found 67692 images belonging to 131 classes.
Found 22688 images belonging to 131 classes.
C:\Users\adity\AppData\Local\Programs\Python\Python312\Lib\site-packages\keras\src\layers\convolutional\base_conv.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.
super().__init__(activity_regularizer=activity_regularizer, **kwargs)
Epoch 1/15
C:\Users\adity\AppData\Local\Programs\Python\Python312\Lib\site-packages\keras\src\trainers\data_adapters\py_dataset_adapter.py:121: UserWarning: Your `PyDataset` class should call `super().__init__(**kwargs)` in its constructor. `**kwargs` can include `workers`, `use_multiprocessing`, `max_queue_size`. Do not pass these arguments to `fit()`, as they will be ignored.
self._warn_if_super_not_called()
[1m2115/2115[0m [32m━━━━━━━━━━━━━━━━━━━━[0m[37m[0m [1m720s[0m 338ms/step - accuracy: 0.4970 - loss: 1.9714 - val_accuracy: 0.9128 - val_loss: 0.3601
Epoch 2/15
[1m 1/2115[0m [37m━━━━━━━━━━━━━━━━━━━━[0m [1m6:14[0m 177ms/step - accuracy: 0.8750 - loss: 0.2425
C:\Users\adity\AppData\Local\Programs\Python\Python312\Lib\contextlib.py:158: UserWarning: Your input ran out of data; interrupting training. Make sure that your dataset or generator can generate at least `steps_per_epoch * epochs` batches. You may need to use the `.repeat()` function when building your dataset.
self.gen.throw(value)
An error occurred: 'NoneType' object has no attribute 'items'
[1m709/709[0m [32m━━━━━━━━━━━━━━━━━━━━[0m[37m[0m [1m46s[0m 65ms/step - accuracy: 0.9185 - loss: 0.3224
Test accuracy: 91.70%