# Training history = model.fit(train_generator, steps_per_epoch=train_generator.samples // 32, validation_data=validation_generator, validation_steps=validation_generator.samples // 32, epochs=10)
validation_generator = validation_datagen.flow_from_directory(validation_dir, target_size=(224, 224), batch_size=32, class_mode='categorical')
# Data augmentation train_datagen = ImageDataGenerator(rescale=1./255, shear_range=0.2, zoom_range=0.2, horizontal_flip=True) crax rat
# Freeze base layers for layer in base_model.layers: layer.trainable = False
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) # Training history = model
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# Assuming you've collected and preprocessed your data train_dir = 'path/to/train' validation_dir = 'path/to/validation' # Training history = model.fit(train_generator
# Building the model base_model = VGG16(weights='imagenet', include_top=False, input_shape=(224, 224, 3))