Uploaded by criteria1

Klasifikacija slika pomoću Kerasa: Kodni isječak

advertisement
from google.colab import drive
drive.mount('/content/drive')
import os
dataset_path = "/content/drive/My Drive/sampletomotodata"
train_dir = os.path.join(dataset_path, "train")
test_dir = os.path.join(dataset_path, "valid")
print("Train Path:", train_dir)
print("Test Path:", test_dir)
import cv2
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten, Dropout
from tensorflow.keras.utils import to_categorical
import matplotlib.pyplot as plt
# Function to load images from a folder
def load_images_from_folder(folder, img_size=(224, 224)):
images = []
labels = []
class_names = sorted(os.listdir(folder)) # Get class names
for class_index, class_name in enumerate(class_names):
class_path = os.path.join(folder, class_name)
if os.path.isdir(class_path): # Only process directories
for img_name in os.listdir(class_path):
img_path = os.path.join(class_path, img_name)
img = cv2.imread(img_path)
if img is not None:
img = cv2.resize(img, img_size) # Resize image
images.append(img)
labels.append(class_index) # Assign label based on folder name
return np.array(images), np.array(labels), class_names
# Load training and testing data
X_train, y_train, class_names = load_images_from_folder(train_dir)
X_test, y_test, _ = load_images_from_folder(test_dir)
# Normalize pixel values (0-1)
X_train = X_train.astype("float32") / 255.0
X_test = X_test.astype("float32") / 255.0
print("Training Data Shape:", X_train.shape)
print("Testing Data Shape:", X_test.shape)
print("Class Names:", class_names)
# Display some images
plt.figure(figsize=(20, 5))
for i in range(5):
plt.subplot(1, 5, i+1)
plt.imshow(X_train[i])
plt.axis("off")
plt.title(class_names[y_train[i]])
plt.show()
# One-hot encode the labels
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
print("Testing Data Shape:", y_train.shape)
print("Testing Data Shape:", y_test.shape)
# Define the ANN model
model = Sequential([
Flatten(input_shape=(224, 224,3)), # Flatten the 28x28 images into a 784-dimensional
vector
Dense(512, activation='relu'), # Fully connected layer with 512 units and ReLU activation
Dropout(0.2),
# Dropout layer to reduce overfitting
Dense(512, activation='relu'), # Fully connected layer with 512 units and ReLU activation
Dropout(0.2),
# Dropout layer to reduce overfitting
Dense(2, activation='sigmoid') # Output layer with 10 units (one for each class) and
softmax activation
])
# Compile the model
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# Train the model
history = model.fit(X_train, y_train, epochs=5, batch_size=128, validation_split=0.2)
# Evaluate the model
print("Training Data Shape:", X_train.shape)
print("Testing Data Shape:", y_train.shape)
print("Testing Data Shape:", X_test.shape)
print("Testing Data Shape:", y_test.shape)
# Evaluate the model
loss, accuracy = model.evaluate(X_test, y_test)
print(f'Test loss: {loss:.4f}, Test accuracy: {accuracy:.4f}')
Download