Anandxd133 / AgroAI

0 stars 0 forks source link

Wrong directories and path not mentioned properly. #1

Open rishabhsinghrathaur opened 1 month ago

rishabhsinghrathaur commented 1 month ago

Code with error handling proper message

rishabhsinghrathaur commented 1 month ago

import tensorflow as tf from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.preprocessing import image import numpy as np import os

def create_model(): model = Sequential([ Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 3)), MaxPooling2D(pool_size=(2, 2)), Conv2D(64, (3, 3), activation='relu'), MaxPooling2D(pool_size=(2, 2)), Conv2D(128, (3, 3), activation='relu'), MaxPooling2D(pool_size=(2, 2)), Flatten(), Dense(512, activation='relu'), Dropout(0.5), Dense(4, activation='softmax') # 4 classes: nitrogen, potassium, phosphorus, healthy ]) return model

def train_model(train_generator, validation_generator): model = create_model() model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])

history = model.fit(
    train_generator,
    steps_per_epoch=train_generator.samples // train_generator.batch_size,
    validation_data=validation_generator,
    validation_steps=validation_generator.samples // validation_generator.batch_size,
    epochs=25
)

return model, history

def predict_image(img_path, model): try: img = image.load_img(img_path, target_size=(128, 128)) img_array = image.img_to_array(img) / 255.0 img_array = np.expand_dims(img_array, axis=0) prediction = model.predict(img_array) class_indices = {v: k for k, v in train_generator.class_indices.items()} predicted_class = class_indices[np.argmax(prediction)] confidence = np.max(prediction) return predicted_class, confidence except Exception as e: print(f"Error in prediction: {str(e)}") return None, None

try:

Check if GPU is available

print("Num GPUs Available: ", len(tf.config.experimental.list_physical_devices('GPU')))

# Data Augmentation
train_datagen = ImageDataGenerator(
    rescale=1./255,
    rotation_range=20,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True,
    fill_mode='nearest',
    validation_split=0.2  # 20% for validation
)

# Check if the dataset directory exists
dataset_path = 'Dataset'
if not os.path.exists(dataset_path):
    raise FileNotFoundError(f"Dataset directory not found: {dataset_path}")

train_generator = train_datagen.flow_from_directory(
    dataset_path,
    target_size=(128, 128),
    batch_size=32,
    class_mode='categorical',
    subset='training'
)

validation_generator = train_datagen.flow_from_directory(
    dataset_path,
    target_size=(128, 128),
    batch_size=32,
    class_mode='categorical',
    subset='validation'
)

# Print class indices
print("Class indices:", train_generator.class_indices)

# Train the model
model, history = train_model(train_generator, validation_generator)

# Evaluating
val_loss, val_accuracy = model.evaluate(validation_generator)
print(f"Validation Loss: {val_loss}")
print(f"Validation Accuracy: {val_accuracy}")

# Save the model
model.save('leaf_deficiency_model.h5')
print("Model saved successfully.")

# Test prediction
test_image_path = 'path_to_your_test_image.jpg'  # Replace with an actual image path
if os.path.exists(test_image_path):
    predicted_class, confidence = predict_image(test_image_path, model)
    if predicted_class:
        print(f"Predicted class: {predicted_class}")
        print(f"Confidence: {confidence:.2f}")
else:
    print(f"Test image not found: {test_image_path}")

except Exception as e: print(f"An error occurred: {str(e)}")