0% found this document useful (0 votes)
18 views32 pages

TP 3

Uploaded by

ghazelahmed7
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
18 views32 pages

TP 3

Uploaded by

ghazelahmed7
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 32

QST 1 Tamura

import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import cv2

# Function to compute Tamura Coarseness


def tamura_coarseness(img):
kernel_sizes = [1, 2, 4, 8, 16]
h, w = img.shape
average_grays = []

# Compute the average gray levels for each kernel size


for k in kernel_sizes:
avg_gray = cv2.blur(img, (k, k))
average_grays.append(avg_gray)

# Calculate differences for each kernel size


E_h = [np.abs(np.roll(avg, k, axis=1) - avg) for avg, k in
zip(average_grays, kernel_sizes)]
E_v = [np.abs(np.roll(avg, k, axis=0) - avg) for avg, k in
zip(average_grays, kernel_sizes)]

# Select maximum difference


Sbest = np.zeros_like(img)
for Eh, Ev in zip(E_h, E_v):
Sbest = np.maximum(Sbest, np.maximum(Eh, Ev))
return np.mean(Sbest)

# Function to compute Tamura Contrast


def tamura_contrast(img):
std_dev = np.std(img)
mean = np.mean(img)
contrast = std_dev / (mean + 1e-10) # Avoid division by zero
return contrast

# Function to compute Tamura Directionality


def tamura_directionality(img):
sobel_x = cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=3)
sobel_y = cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=3)
magnitude, angle = cv2.cartToPolar(sobel_x, sobel_y,
angleInDegrees=True)
hist, _ = np.histogram(angle, bins=16, range=(0, 180))
hist = hist / hist.sum()
directionality = np.sum(hist**2)
return directionality

# Function to compute Linearity


def tamura_linearity(img):
sobel_x = cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=3)
sobel_y = cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=3)
_, angle = cv2.cartToPolar(sobel_x, sobel_y, angleInDegrees=True)

# Compute orientation histogram


hist, bin_edges = np.histogram(angle, bins=16, range=(0, 180))
hist = hist / hist.sum() # Normalize histogram to probabilities

# Calculate Linearity as the inverse of dispersion in the


histogram
max_bin = np.max(hist) # Maximum bin value indicates
concentration
linearity = max_bin
return linearity
def tamura_regularity(img):
# Calculate coarseness, contrast, and directionality
coarseness = tamura_coarseness(img)
contrast = tamura_contrast(img)
directionality = tamura_directionality(img)

# Compute regularity
return 1 / (1 + coarseness + contrast + (1 - directionality))

# Function to compute Roughness


def tamura_roughness(img):
laplacian = cv2.Laplacian(img, cv2.CV_64F)
roughness = np.var(laplacian)
return roughness

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image and compute Tamura features


query_image = Image.open(image_query_path).convert("L")
query_array = np.array(query_image)
query_array = query_array / 255.0
query_features = [
tamura_roughness(query_array),
tamura_linearity(query_array),
tamura_regularity(query_array),
tamura_coarseness(query_array),
tamura_contrast(query_array),
tamura_directionality(query_array)
]

# List to store similarity scores


similarities = []
sc=[]
# Loop through each image to calculate Tamura features and compute
similarity
for image_path in image_paths:
image = Image.open(image_path).convert("L")
image_array = np.array(image)
image_array = image_array / 255.0
# Compute Tamura features for the current image
features = [
tamura_roughness(image_array),
tamura_linearity(image_array),
tamura_regularity(image_array),
tamura_coarseness(image_array),
tamura_contrast(image_array),
tamura_directionality(image_array)
]

# Compute Euclidean distance as a measure of similarity


score = euclidean(query_features, features)
similarities.append((image_path, score,features))
sc.append(features)
#print(sc)
# Sort images by similarity score (lower is more similar)
similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
print("Query Image:", image_query_path)
print("Query Features:", query_features)

plt.figure(figsize=(20, 10))
for i in range(min(top_k, len(similarities))):
image_path, score , features = similarities[i]
print(f"{i+1}. {image_path} - Score: {score:.4f} -\n {features}")
image = Image.open(image_path)
plt.subplot(2, top_k, i + 1)
plt.imshow(image, cmap='gray')
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()

Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images


N°5/2774.jpg
Query Features: [0.2042472324846362, 0.08704549031953408,
0.38272818490661975, 0.2136791578305313, 0.46301451107949937,
0.063873292893078]
1. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2774.jpg
- Score: 0.0000 -
[0.2042472324846362, 0.08704549031953408, 0.38272818490661975,
0.2136791578305313, 0.46301451107949937, 0.063873292893078]
2. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2933.jpg
- Score: 0.0417 -
[0.2156007445613762, 0.08743430482560917, 0.3910163004097895,
0.18307023515888288, 0.4384137928414785, 0.06404588807630002]
3. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2773.jpg
- Score: 0.0529 -
[0.22432103255877833, 0.0858856962425008, 0.3742055276384279,
0.22699516520780674, 0.5093002358311689, 0.06396715855978066]
4. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2956.jpg
- Score: 0.0543 -
[0.16408157372484533, 0.08709954233409611, 0.38809223486085515,
0.213710284205594, 0.4268201215379499, 0.06382334960124418]
5. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2940.jpg
- Score: 0.0544 -
[0.25501912750220673, 0.08258044064035505, 0.3865194434404967,
0.19808677972531785, 0.45296269477887396, 0.06385750532723508]
6. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2769.jpg
- Score: 0.0550 -
[0.23233048633191014, 0.0798180962835189, 0.37556983511402914,
0.21721418542799606, 0.5090711709746084, 0.06366470238659795]
7. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2764.jpg
- Score: 0.0575 -
[0.25921643171058245, 0.09131832797427653, 0.38486607623744357,
0.21582464081010008, 0.44702537181507174, 0.0645435841234065]
8. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2970.jpg
- Score: 0.0704 -
[0.14323099471466663, 0.08333333333333333, 0.3794891999081363,
0.20263748418271932, 0.4960830775863336, 0.06359945623542501]
9. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2780.jpg
- Score: 0.0737 -
[0.24279998886808793, 0.08921302578018996, 0.37255041804767897,
0.274680908053529, 0.4737589873741103, 0.06423944481367677]
10. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2921.jpg - Score: 0.0773 -
[0.27095297157444953, 0.08074857043839889, 0.3787662880825163,
0.25077144995111805, 0.45296530187328865, 0.06358626466662509]
QST 2 Haralick
import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import mahotas as mh

# Function to compute Haralick features


def compute_haralick_features(image):
# Compute the GLCM and extract Haralick features
features = mh.features.haralick(image).mean(axis=0) # Average
across angles
return features

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image and compute Haralick features


query_image = Image.open(image_query_path).convert("L")
query_array = np.array(query_image)
query_features = compute_haralick_features(query_array)

# Haralick feature names for descriptions


feature_names = [
"Angular Second Moment (ASM)",
"Contrast",
"Correlation",
"Sum of Squares: Variance",
"Inverse Difference Moment (IDM)",
"Sum Average",
"Sum Variance",
"Sum Entropy",
"Entropy",
"Difference Variance",
"Difference Entropy",
"Information Measure of Correlation 1",
"Information Measure of Correlation 2"
]

# List to store similarity scores


similarities = []

# Loop through each image to calculate Haralick features and compute


similarity
for image_path in image_paths:
image = Image.open(image_path).convert("L")
image_array = np.array(image)
# Compute Haralick features for the current image
features = compute_haralick_features(image_array)

# Compute Euclidean distance as a measure of similarity


score = euclidean(query_features, features)
similarities.append((image_path, score, features))

# Sort images by similarity score (lower is more similar)


similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
print("Query Image:", image_query_path)
print("Query Features:")
for name, value in zip(feature_names, query_features):
print(f" {name}: {value:.4f}")

plt.figure(figsize=(20, 10))
for i in range(min(top_k, len(similarities))):
image_path, score, features = similarities[i]
#print(f"\nRank {i+1}: {image_path}")
#print(f" Similarity Score: {score:.4f} {'(High)' if score > 1
else '(Low)'}")
#print(" Features:")
#for name, value in zip(feature_names, features):
# print(f" {name}: {value:.4f}")

image = Image.open(image_path)
plt.subplot(2, top_k, i + 1)
plt.imshow(image, cmap='gray')
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()

Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images


N°5/2774.jpg
Query Features:
Angular Second Moment (ASM): 0.0001
Contrast: 1785.6292
Correlation: 0.4951
Sum of Squares: Variance: 1768.4201
Inverse Difference Moment (IDM): 0.0520
Sum Average: 181.4227
Sum Variance: 5288.0512
Sum Entropy: 8.0582
Entropy: 13.1889
Difference Variance: 0.0001
Difference Entropy: 6.2715
Information Measure of Correlation 1: -0.1749
Information Measure of Correlation 2: 0.9591

#haralik test 2
import os
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
from skimage.feature import graycomatrix, graycoprops

# Function to compute essential GLCM properties


def compute_glcm_features(image, levels=32, distance=1, angle=0):
"""
Compute GLCM features from the image.
- levels: Number of gray levels for reducing resolution.
- distance: Pixel distance for GLCM calculation.
- angle: Angle for GLCM (e.g., 0°, 45°, 90°, 135° in radians).
"""
# Reduce gray levels for faster computation
reduced_image = (image / (256 // levels)).astype(np.uint8)

# Compute GLCM
glcm = graycomatrix(reduced_image, [distance], [angle],
levels=levels, symmetric=True, normed=True)

# Extract essential properties


features = {
"Contrast": graycoprops(glcm, "contrast")[0, 0],
"Dissimilarity": graycoprops(glcm, "dissimilarity")[0, 0],
"Homogeneity": graycoprops(glcm, "homogeneity")[0, 0],
"Energy": graycoprops(glcm, "energy")[0, 0],
"Correlation": graycoprops(glcm, "correlation")[0, 0]
}
return features
# Euclidean distance for feature comparison
def compute_similarity(features1, features2):
return np.sqrt(sum((features1[key] - features2[key]) ** 2 for key
in features1))

# Directory and image paths


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# Get list of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image and compute features


query_image = Image.open(image_query_path).convert("L")
query_array = np.array(query_image)
query_features = compute_glcm_features(query_array)

# Compare query image with other images


similarities = []
for image_path in image_paths:
image = Image.open(image_path).convert("L")
image_array = np.array(image)
features = compute_glcm_features(image_array)
distance = compute_similarity(query_features, features)
similarities.append((image_path, distance))

# Sort images by similarity (lower distance = more similar)


similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
plt.figure(figsize=(25, 5))
print("Query Image:", image_query_path)
for i in range(min(top_k, len(similarities))):
image_path, score = similarities[i]
print(f"Rank {i+1}: {image_path} | Similarity Score: {score:.4f}")

image = Image.open(image_path)
plt.subplot(1, top_k, i + 1)
plt.imshow(image, cmap='gray')
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()
Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2774.jpg
Rank 1: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2774.jpg | Similarity Score: 0.0000
Rank 2: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2660.jpg | Similarity Score: 0.2593
Rank 3: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2729.jpg | Similarity Score: 0.4857
Rank 4: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2665.jpg | Similarity Score: 0.6311
Rank 5: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2763.jpg | Similarity Score: 0.6503
Rank 6: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/3029.jpg | Similarity Score: 0.8215
Rank 7: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2841.jpg | Similarity Score: 0.9531
Rank 8: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2924.jpg | Similarity Score: 1.1248
Rank 9: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2978.jpg | Similarity Score: 1.1737
Rank 10: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2901.jpg | Similarity Score: 1.3823

QST 3 Gabor
import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import cv2

# Function to apply Gabor filters and extract features


def gabor_features(img, frequencies=[0.1, 0.2, 0.3], angles=[0,
np.pi/4, np.pi/2, 3*np.pi/4]):
"""
Extracts Gabor filter responses as features.
- frequencies: List of spatial frequencies.
- angles: List of orientations in radians.
"""
img = img.astype(np.float32)
features = []
for theta in angles:
for freq in frequencies:
# Create Gabor kernel
kernel = cv2.getGaborKernel(ksize=(15, 15), sigma=4.0,
theta=theta, lambd=1/freq, gamma=0.5, psi=0)
# Filter the image
filtered = cv2.filter2D(img, cv2.CV_32F, kernel)
# Use mean and standard deviation of the filtered response
as features
features.append(np.mean(filtered))
features.append(np.std(filtered))
return features

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image and compute Gabor features


query_image = Image.open(image_query_path).convert("L")
query_array = np.array(query_image)
query_array = query_array / 255.0 # Normalize the image
query_features = gabor_features(query_array)

# List to store similarity scores


similarities = []

# Loop through each image to calculate Gabor features and compute


similarity
for image_path in image_paths:
image = Image.open(image_path).convert("L")
image_array = np.array(image)
image_array = image_array / 255.0 # Normalize the image
# Compute Gabor features for the current image
features = gabor_features(image_array)
# Compute Euclidean distance as a measure of similarity
score = euclidean(query_features, features)
similarities.append((image_path, score))

# Sort images by similarity score (lower is more similar)


similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
print("Query Image:", image_query_path)
print("Query Features:", query_features)

plt.figure(figsize=(20, 10))
for i in range(min(top_k, len(similarities))):
image_path, score = similarities[i]
print(f"{i+1}. {image_path} - Score: {score:.4f}")
image = Image.open(image_path)
plt.subplot(2, top_k, i + 1)
plt.imshow(image, cmap='gray')
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()

Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images


N°5/2774.jpg
Query Features: [0.43383968, 2.471432, 0.42397752, 1.2523853,
0.94840676, 0.9164261, 5.8681455, 2.6911514, 0.4296119, 1.4688164,
0.19163783, 0.94556856, 0.42563984, 2.4043221, 0.426339, 1.2238321,
0.9486763, 0.81046677, 5.8727756, 2.3874512, 0.42904243, 1.2405515,
0.19105272, 0.7822297]
1. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2774.jpg
- Score: 0.0000
2. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2773.jpg
- Score: 1.0311
3. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2970.jpg
- Score: 1.1091
4. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2752.jpg
- Score: 1.1172
5. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2950.jpg
- Score: 1.1482
6. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2674.jpg
- Score: 1.1586
7. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2764.jpg
- Score: 1.2439
8. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2921.jpg
- Score: 1.2910
9. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2677.jpg
- Score: 1.5172
10. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2769.jpg - Score: 1.5681

Qst 4 filtre ondelette


import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import pywt # PyWavelets library for wavelet decomposition

# Function to compute wavelet features


def wavelet_features(img, wavelet="haar", level=2):
"""
Extracts features using Discrete Wavelet Transform (DWT).
- img: Input grayscale image (numpy array).
- wavelet: Type of wavelet to use (e.g., 'haar', 'db1').
- level: Number of decomposition levels.
"""
# Perform wavelet decomposition
coeffs = pywt.wavedec2(img, wavelet, level=level)
features = []

# The first element in coeffs is the approximation coefficients


(cA)
cA = coeffs[0]

# Iterate over the remaining coefficients (detail coefficients)


for i in range(1, len(coeffs)):
cH, cV, cD = coeffs[i]
features.extend([
np.mean(cH), np.std(cH), # Horizontal details
np.mean(cV), np.std(cV), # Vertical details
np.mean(cD), np.std(cD) # Diagonal details
])
return features

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image and compute wavelet features


query_image = Image.open(image_query_path).convert("L")
query_array = np.array(query_image) / 255.0 # Normalize to [0, 1]
query_features = wavelet_features(query_array)

# List to store similarity scores


similarities = []

# Loop through each image to calculate wavelet features and compute


similarity
for image_path in image_paths:
image = Image.open(image_path).convert("L")
image_array = np.array(image) / 255.0 # Normalize to [0, 1]
# Compute wavelet features for the current image
features = wavelet_features(image_array)
# Compute Euclidean distance as a measure of similarity
score = euclidean(query_features, features)
similarities.append((image_path, score))

# Sort images by similarity score (lower is more similar)


similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
print("Query Image:", image_query_path)
print("Query Features:", query_features)

plt.figure(figsize=(20, 10))
for i in range(min(top_k, len(similarities))):
image_path, score = similarities[i]
print(f"{i+1}. {image_path} - Score: {score:.4f}")
image = Image.open(image_path)
plt.subplot(2, top_k, i + 1)
plt.imshow(image, cmap='gray')
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()

Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images


N°5/2774.jpg
Query Features: [0.0006382761437908491, 0.21011849450618655,
0.006168300653594774, 0.2071044732132275, 0.004368361928104583,
0.1582652088619426, -0.0011169832516339866, 0.11945170411699244,
0.001996527777777778, 0.12163539637068252, 0.0010289011437908497,
0.09673697343232661]
1. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2774.jpg
- Score: 0.0000
2. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2665.jpg
- Score: 0.0259
3. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2773.jpg
- Score: 0.0273
4. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2769.jpg
- Score: 0.0344
5. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2970.jpg
- Score: 0.0375
6. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/3014.jpg
- Score: 0.0381
7. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2956.jpg
- Score: 0.0382
8. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2660.jpg
- Score: 0.0413
9. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2762.jpg
- Score: 0.0417
10. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2772.jpg - Score: 0.0436

Comparer et commenter les résultats


import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import mahotas as mh
import cv2
import pywt

# Function to compute Tamura features


def tamura_features(img):
return [tamura_roughness(img), tamura_linearity(img),
tamura_regularity(img),
tamura_coarseness(img), tamura_contrast(img),
tamura_directionality(img)]

# Function to compute Haralick features


def haralick_features(img):
img = (img * 255).astype(np.uint8)
return mh.features.haralick(img).mean(axis=0)

# Function to compute Gabor features


def gabor_features(img, frequencies=[0.1, 0.2, 0.3], angles=[0,
np.pi/4, np.pi/2, 3*np.pi/4]):
features = []
for theta in angles:
for freq in frequencies:
kernel = cv2.getGaborKernel(ksize=(15, 15), sigma=4.0,
theta=theta, lambd=1/freq, gamma=0.5, psi=0)
filtered = cv2.filter2D(img, cv2.CV_32F, kernel)
features.append(np.mean(filtered))
features.append(np.std(filtered))
return features

# Function to compute Wavelet features


def wavelet_features(img, wavelet="haar", level=2):
coeffs = pywt.wavedec2(img, wavelet, level=level)
features = []
cA = coeffs[0]
for i in range(1, len(coeffs)):
cH, cV, cD = coeffs[i]
features.extend([np.mean(cH), np.std(cH), np.mean(cV),
np.std(cV), np.mean(cD), np.std(cD)])
return features

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image and compute all features


query_image = Image.open(image_query_path).convert("L")
query_array = np.array(query_image) / 255.0 # Normalize

# Compute features for each method


query_tamura = tamura_features(query_array)
query_haralick = haralick_features(query_array)
query_gabor = gabor_features(query_array)
query_wavelet = wavelet_features(query_array)

# List to store similarity scores for all methods


similarities_tamura = []
similarities_haralick = []
similarities_gabor = []
similarities_wavelet = []

# Loop through each image to calculate features and compute similarity


for image_path in image_paths:
image = Image.open(image_path).convert("L")
image_array = np.array(image) / 255.0 # Normalize

# Compute features for current image


features_tamura = tamura_features(image_array)
features_haralick = haralick_features(image_array)
features_gabor = gabor_features(image_array)
features_wavelet = wavelet_features(image_array)

# Compute Euclidean distance for each feature set


score_tamura = euclidean(query_tamura, features_tamura)
score_haralick = euclidean(query_haralick, features_haralick)
score_gabor = euclidean(query_gabor, features_gabor)
score_wavelet = euclidean(query_wavelet, features_wavelet)

similarities_tamura.append((image_path, score_tamura,
features_tamura))
similarities_haralick.append((image_path, score_haralick,
features_haralick))
similarities_gabor.append((image_path, score_gabor,
features_gabor))
similarities_wavelet.append((image_path, score_wavelet,
features_wavelet))

# Sort images by similarity score (lower is more similar) for each


method
similarities_tamura.sort(key=lambda x: x[1])
similarities_haralick.sort(key=lambda x: x[1])
similarities_gabor.sort(key=lambda x: x[1])
similarities_wavelet.sort(key=lambda x: x[1])

# Display results for all methods together


top_k = 10
fig, axes = plt.subplots(4, top_k, figsize=(20, 15))

# Tamura features display


for i in range(min(top_k, len(similarities_tamura))):
image_path, score, features = similarities_tamura[i]
image = Image.open(image_path)
axes[0, i].imshow(image, cmap='gray')
axes[0, i].set_title(f"Rank {i+1}\nScore: {score:.2f}")
axes[0, i].axis('off')

# Add a title for Tamura features row


axes[0, 0].set_title("Tamura Features", fontsize=14)

# Haralick features display


for i in range(min(top_k, len(similarities_haralick))):
image_path, score, features = similarities_haralick[i]
image = Image.open(image_path)
axes[1, i].imshow(image, cmap='gray')
axes[1, i].set_title(f"Rank {i+1}\nScore: {score:.2f}")
axes[1, i].axis('off')

# Add a title for Haralick features row


axes[1, 0].set_title("Haralick Features", fontsize=14)

# Gabor features display


for i in range(min(top_k, len(similarities_gabor))):
image_path, score, features = similarities_gabor[i]
image = Image.open(image_path)
axes[2, i].imshow(image, cmap='gray')
axes[2, i].set_title(f"Rank {i+1}\nScore: {score:.2f}")
axes[2, i].axis('off')

# Add a title for Gabor features row


axes[2, 0].set_title("Gabor Features", fontsize=14)

# Wavelet features display


for i in range(min(top_k, len(similarities_wavelet))):
image_path, score, features = similarities_wavelet[i]
image = Image.open(image_path)
axes[3, i].imshow(image, cmap='gray')
axes[3, i].set_title(f"Rank {i+1}\nScore: {score:.2f}")
axes[3, i].axis('off')

# Add a title for Wavelet features row


axes[3, 0].set_title("Wavelet Features", fontsize=14)

plt.tight_layout()
plt.show()

QST1 HSV
Tamura
import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import cv2

# Function to compute Tamura Coarseness


def tamura_coarseness(img):
kernel_sizes = [1, 2, 4, 8, 16]
h, w = img.shape
average_grays = []

# Compute the average gray levels for each kernel size


for k in kernel_sizes:
avg_gray = cv2.blur(img, (k, k))
average_grays.append(avg_gray)

# Calculate differences for each kernel size


E_h = [np.abs(np.roll(avg, k, axis=1) - avg) for avg, k in
zip(average_grays, kernel_sizes)]
E_v = [np.abs(np.roll(avg, k, axis=0) - avg) for avg, k in
zip(average_grays, kernel_sizes)]

# Select maximum difference


Sbest = np.zeros_like(img)
for Eh, Ev in zip(E_h, E_v):
Sbest = np.maximum(Sbest, np.maximum(Eh, Ev))
return np.mean(Sbest)

# Function to compute Tamura Contrast


def tamura_contrast(img):
std_dev = np.std(img)
mean = np.mean(img)
contrast = std_dev / (mean + 1e-10) # Avoid division by zero
return contrast

# Function to compute Tamura Directionality


def tamura_directionality(img):
sobel_x = cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=3)
sobel_y = cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=3)
magnitude, angle = cv2.cartToPolar(sobel_x, sobel_y,
angleInDegrees=True)
hist, _ = np.histogram(angle, bins=16, range=(0, 180))
hist = hist / hist.sum()
directionality = np.sum(hist**2)
return directionality

# Function to compute Linearity


def tamura_linearity(img):
sobel_x = cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=3)
sobel_y = cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=3)
_, angle = cv2.cartToPolar(sobel_x, sobel_y, angleInDegrees=True)

# Compute orientation histogram


hist, _ = np.histogram(angle, bins=16, range=(0, 180))
hist = hist / hist.sum() # Normalize histogram to probabilities

# Calculate Linearity as the inverse of dispersion in the


histogram
max_bin = np.max(hist) # Maximum bin value indicates
concentration
linearity = max_bin
return linearity

# Function to compute Tamura Regularity


def tamura_regularity(img):
coarseness = tamura_coarseness(img)
contrast = tamura_contrast(img)
directionality = tamura_directionality(img)
return 1 / (1 + coarseness + contrast + (1 - directionality))

# Function to compute Roughness


def tamura_roughness(img):
laplacian = cv2.Laplacian(img, cv2.CV_64F)
roughness = np.var(laplacian)
return roughness

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image as HSV and compute Tamura features


query_image = Image.open(image_query_path).convert("HSV")
query_array = np.array(query_image) / 255.0 # Normalize to [0, 1]
v_channel = query_array[:, :, 2] # Use Value channel for Tamura
features

query_features = [
tamura_roughness(v_channel),
tamura_linearity(v_channel),
tamura_regularity(v_channel),
tamura_coarseness(v_channel),
tamura_contrast(v_channel),
tamura_directionality(v_channel),
]
# List to store similarity scores
similarities = []

# Loop through each image to calculate Tamura features and compute


similarity
for image_path in image_paths:
image = Image.open(image_path).convert("HSV")
image_array = np.array(image) / 255.0 # Normalize to [0, 1]
v_channel = image_array[:, :, 2] # Use Value channel for Tamura
features

# Compute Tamura features for the current image


features = [
tamura_roughness(v_channel),
tamura_linearity(v_channel),
tamura_regularity(v_channel),
tamura_coarseness(v_channel),
tamura_contrast(v_channel),
tamura_directionality(v_channel),
]

# Compute Euclidean distance as a measure of similarity


score = euclidean(query_features, features)
similarities.append((image_path, score, features))

# Sort images by similarity score (lower is more similar)


similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
print("Query Image:", image_query_path)
print("Query Features:", query_features)

plt.figure(figsize=(20, 10))
for i in range(min(top_k, len(similarities))):
image_path, score, features = similarities[i]
print(f"{i+1}. {image_path} - Score: {score:.4f} - Features:
{features}")
image = Image.open(image_path)
plt.subplot(2, top_k, i + 1)
plt.imshow(image)
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()

Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images


N°5/2774.jpg
Query Features: [0.21697062987316532, 0.08335964635301547,
0.39826945470765673, 0.25879584543066086, 0.315731173847722,
0.06366411432868299]
1. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2774.jpg
- Score: 0.0000 - Features: [0.21697062987316532, 0.08335964635301547,
0.39826945470765673, 0.25879584543066086, 0.315731173847722,
0.06366411432868299]
2. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2772.jpg
- Score: 0.0206 - Features: [0.20017890566442056, 0.08868201861832435,
0.40047556621334107, 0.255503554749333, 0.30590470171474704,
0.06437701569349912]
3. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2660.jpg
- Score: 0.0298 - Features: [0.1968942478678911, 0.09338582677165354,
0.3990168169652637, 0.24310637330697252, 0.3274429729436923,
0.06438931117862234]
4. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2856.jpg
- Score: 0.0349 - Features: [0.2269784173317083, 0.09409155710438777,
0.40548104025250126, 0.24217669418434692, 0.2898794894706387,
0.0658496271726412]
5. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/3033.jpg
- Score: 0.0367 - Features: [0.23333701334442067, 0.09731703444991269,
0.39414120423213633, 0.2878796895345052, 0.31755241569097775,
0.06827032345188133]
6. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2665.jpg
- Score: 0.0464 - Features: [0.20966157971099042, 0.09139523578026251,
0.3970991581464031, 0.2315154056923062, 0.351564645620133,
0.06481734668258315]
7. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2846.jpg
- Score: 0.0514 - Features: [0.25779008949045823, 0.08266748617086662,
0.40177245067602063, 0.26706344211436106, 0.28574502775030103,
0.06383741593250651]
8. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2781.jpg
- Score: 0.0533 - Features: [0.22633470005978687, 0.08045622688039457,
0.4034622719122222, 0.27579914043152254, 0.2664885250529124,
0.06374117026824444]
9. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/3014.jpg
- Score: 0.0536 - Features: [0.2585930391623752, 0.08276301130033424,
0.4023533823443306, 0.22605849122689445, 0.32279061380945145,
0.06347171321860177]
10. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2688.jpg - Score: 0.0550 - Features: [0.21040211931389805,
0.1003225806451613, 0.408904646835786, 0.2102837070140963,
0.3008907641351197, 0.06561654526534859]
haralick hsv
import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import mahotas as mh

# Function to compute Haralick features


def compute_haralick_features(image):
"""
Computes Haralick features from the input image.
:param image: Grayscale (2D) numpy array.
:return: 13-dimensional vector of Haralick features.
"""
features = mh.features.haralick(image).mean(axis=0) # Average
across angles
return features

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image as HSV and compute Haralick features on the Value
(V) channel
query_image = Image.open(image_query_path).convert("HSV")
query_array = np.array(query_image) # Convert to numpy array
v_channel = query_array[:, :, 2] # Extract the Value (V) channel
query_features = compute_haralick_features(v_channel)

# Haralick feature names for descriptions


feature_names = [
"Angular Second Moment (ASM)",
"Contrast",
"Correlation",
"Sum of Squares: Variance",
"Inverse Difference Moment (IDM)",
"Sum Average",
"Sum Variance",
"Sum Entropy",
"Entropy",
"Difference Variance",
"Difference Entropy",
"Information Measure of Correlation 1",
"Information Measure of Correlation 2"
]

# List to store similarity scores


similarities = []

# Loop through each image to calculate Haralick features and compute


similarity
for image_path in image_paths:
image = Image.open(image_path).convert("HSV")
image_array = np.array(image)
v_channel = image_array[:, :, 2] # Extract the Value (V) channel

# Compute Haralick features for the current image


features = compute_haralick_features(v_channel)

# Compute Euclidean distance as a measure of similarity


score = euclidean(query_features, features)
similarities.append((image_path, score, features))

# Sort images by similarity score (lower is more similar)


similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
print("Query Image:", image_query_path)
print("Query Features:")
for name, value in zip(feature_names, query_features):
print(f" {name}: {value:.4f}")

plt.figure(figsize=(20, 10))
for i in range(min(top_k, len(similarities))):
image_path, score, features = similarities[i]
#print(f"\nRank {i+1}: {image_path}")
#print(f" Similarity Score: {score:.4f}")
#print(" Features:")
#for name, value in zip(feature_names, features):
# print(f" {name}: {value:.4f}")

image = Image.open(image_path)
plt.subplot(2, top_k, i + 1)
plt.imshow(image, cmap='gray')
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()
Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2774.jpg
Query Features:
Angular Second Moment (ASM): 0.0001
Contrast: 2108.8377
Correlation: 0.5862
Sum of Squares: Variance: 2547.8373
Inverse Difference Moment (IDM): 0.0360
Sum Average: 319.2288
Sum Variance: 8082.5116
Sum Entropy: 8.3205
Entropy: 13.5561
Difference Variance: 0.0000
Difference Entropy: 6.5003
Information Measure of Correlation 1: -0.1915
Information Measure of Correlation 2: 0.9712

gabor hsv
import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import cv2

# Function to apply Gabor filters and extract features


def gabor_features(img, frequencies=[0.1, 0.2, 0.3], angles=[0,
np.pi/4, np.pi/2, 3*np.pi/4]):
"""
Extracts Gabor filter responses as features.
- frequencies: List of spatial frequencies.
- angles: List of orientations in radians.
"""
img = img.astype(np.float32)
features = []
for theta in angles:
for freq in frequencies:
# Create Gabor kernel
kernel = cv2.getGaborKernel(ksize=(15, 15), sigma=4.0,
theta=theta, lambd=1/freq, gamma=0.5, psi=0)
# Filter the image
filtered = cv2.filter2D(img, cv2.CV_32F, kernel)
# Use mean and standard deviation of the filtered response
as features
features.append(np.mean(filtered))
features.append(np.std(filtered))
return features

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image and compute Gabor features


query_image = Image.open(image_query_path).convert("HSV")
query_array = np.array(query_image)
query_array = query_array / 255.0 # Normalize the image
query_features = gabor_features(query_array)

# List to store similarity scores


similarities = []

# Loop through each image to calculate Gabor features and compute


similarity
for image_path in image_paths:
image = Image.open(image_path).convert("HSV")
image_array = np.array(image)
image_array = image_array / 255.0 # Normalize the image
# Compute Gabor features for the current image
features = gabor_features(image_array)
# Compute Euclidean distance as a measure of similarity
score = euclidean(query_features, features)
similarities.append((image_path, score))

# Sort images by similarity score (lower is more similar)


similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
print("Query Image:", image_query_path)
print("Query Features:", query_features)

plt.figure(figsize=(20, 10))
for i in range(min(top_k, len(similarities))):
image_path, score = similarities[i]
print(f"{i+1}. {image_path} - Score: {score:.4f}")
image = Image.open(image_path)
plt.subplot(2, top_k, i + 1)
plt.imshow(image, cmap='gray')
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()

Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images


N°5/2774.jpg
Query Features: [0.7737094, 5.1270514, 0.78098327, 2.0772831,
1.7718228, 1.333809, 10.92961, 5.2317104, 0.7993963, 2.1531837,
0.35620195, 1.2123643, 0.80945396, 4.8307414, 0.7836735, 2.23209,
1.7645326, 1.3384725, 10.917528, 5.1473303, 0.79890674, 2.0213199,
0.35669255, 1.1969672]
1. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2774.jpg
- Score: 0.0000
2. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2846.jpg
- Score: 2.3883
3. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2951.jpg
- Score: 2.5517
4. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2948.jpg
- Score: 2.6817
5. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2966.jpg
- Score: 2.9718
6. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2956.jpg
- Score: 3.0721
7. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2940.jpg
- Score: 3.5847
8. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2753.jpg
- Score: 3.5937
9. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2954.jpg
- Score: 3.8134
10. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2973.jpg - Score: 3.9012

filtre d'ondelette hsv


import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import pywt # PyWavelets library for wavelet decomposition

# Function to compute wavelet features


def wavelet_features(img, wavelet="haar", level=2):
"""
Extracts features using Discrete Wavelet Transform (DWT).
:param img: Input grayscale image (numpy array).
:param wavelet: Type of wavelet to use (e.g., 'haar', 'db1').
:param level: Number of decomposition levels.
:return: List of wavelet features (mean and std for each detail
coefficient).
"""
# Perform wavelet decomposition
coeffs = pywt.wavedec2(img, wavelet, level=level)
features = []

# The first element in coeffs is the approximation coefficients


(cA)
cA = coeffs[0]

# Iterate over the remaining coefficients (detail coefficients)


for i in range(1, len(coeffs)):
cH, cV, cD = coeffs[i]
features.extend([
np.mean(cH), np.std(cH), # Horizontal details
np.mean(cV), np.std(cV), # Vertical details
np.mean(cD), np.std(cD) # Diagonal details
])
return features

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image as HSV and compute wavelet features on the Value
(V) channel
query_image = Image.open(image_query_path).convert("HSV")
query_array = np.array(query_image)
v_channel = query_array[:, :, 2] / 255.0 # Normalize the Value (V)
channel to [0, 1]
query_features = wavelet_features(v_channel)

# List to store similarity scores


similarities = []

# Loop through each image to calculate wavelet features and compute


similarity
for image_path in image_paths:
image = Image.open(image_path).convert("HSV")
image_array = np.array(image)
v_channel = image_array[:, :, 2] / 255.0 # Normalize the Value
(V) channel to [0, 1]
# Compute wavelet features for the current image
features = wavelet_features(v_channel)
# Compute Euclidean distance as a measure of similarity
score = euclidean(query_features, features)
similarities.append((image_path, score))

# Sort images by similarity score (lower is more similar)


similarities.sort(key=lambda x: x[1])

# Display results
top_k = 10
print("Query Image:", image_query_path)
print("Query Features:", query_features)

plt.figure(figsize=(20, 10))
for i in range(min(top_k, len(similarities))):
image_path, score = similarities[i]
print(f"{i+1}. {image_path} - Score: {score:.4f}")
image = Image.open(image_path)
plt.subplot(2, top_k, i + 1)
plt.imshow(image, cmap='gray')
plt.title(f"Rank {i+1}\nScore: {score:.2f}")
plt.axis('off')

plt.tight_layout()
plt.show()

Query Image: C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images


N°5/2774.jpg
Query Features: [0.006320210375817002, 0.26132273127489825,
0.0024752348856209023, 0.24986578463526757, 0.0015127144607843118,
0.17555870497344098, 0.0001959507761437906, 0.1307076992958428,
0.0020073784722222225, 0.1317003533824071, 0.0007321027369281041,
0.09705769359264763]
1. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2774.jpg
- Score: 0.0000
2. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2770.jpg
- Score: 0.0234
3. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2940.jpg
- Score: 0.0352
4. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2670.jpg
- Score: 0.0404
5. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2772.jpg
- Score: 0.0421
6. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2846.jpg
- Score: 0.0433
7. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2954.jpg
- Score: 0.0439
8. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2967.jpg
- Score: 0.0481
9. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images N°5/2970.jpg
- Score: 0.0514
10. C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base d'images
N°5/2660.jpg - Score: 0.0523

Comparer et commenter les résultats HSV


import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import euclidean
import mahotas as mh
import cv2
import pywt

# Function to compute Tamura features


def tamura_features(img):
return [tamura_roughness(img), tamura_linearity(img),
tamura_regularity(img),
tamura_coarseness(img), tamura_contrast(img),
tamura_directionality(img)]

# Function to compute Haralick features


def haralick_features(img):
img = (img * 255).astype(np.uint8)
return mh.features.haralick(img).mean(axis=0)

# Function to compute Gabor features


def gabor_features(img, frequencies=[0.1, 0.2, 0.3], angles=[0,
np.pi/4, np.pi/2, 3*np.pi/4]):
features = []
for theta in angles:
for freq in frequencies:
kernel = cv2.getGaborKernel(ksize=(15, 15), sigma=4.0,
theta=theta, lambd=1/freq, gamma=0.5, psi=0)
filtered = cv2.filter2D(img, cv2.CV_32F, kernel)
features.append(np.mean(filtered))
features.append(np.std(filtered))
return features
# Function to compute Wavelet features
def wavelet_features(img, wavelet="haar", level=2):
coeffs = pywt.wavedec2(img, wavelet, level=level)
features = []
cA = coeffs[0]
for i in range(1, len(coeffs)):
cH, cV, cD = coeffs[i]
features.extend([np.mean(cH), np.std(cH), np.mean(cV),
np.std(cV), np.mean(cD), np.std(cD)])
return features

# Directory containing images


directory_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/"
image_query_path = "C:/Users/Ahmed/Desktop/Master 2/irdm/tp3/base
d'images N°5/2774.jpg"

# List to store file paths of images


image_paths = [os.path.join(directory_path, f) for f in
os.listdir(directory_path) if f.endswith((".jpg", ".jpeg", ".png"))]

# Load query image and compute all features


# Load query image and extract only the 'Value' channel (third channel
in HSV)
query_image = Image.open(image_query_path).convert("HSV")
query_array = np.array(query_image)[:, :, 2] / 255.0 # Use the Value
channel

# Compute features for each method


query_tamura = tamura_features(query_array)
query_haralick = haralick_features(query_array)
query_gabor = gabor_features(query_array)
query_wavelet = wavelet_features(query_array)

# Similarly for other images, extract the 'Value' channel


for image_path in image_paths:
image = Image.open(image_path).convert("HSV")
image_array = np.array(image)[:, :, 2] / 255.0 # Use the Value
channel

# Compute features for current image


features_tamura = tamura_features(image_array)
features_haralick = haralick_features(image_array)
features_gabor = gabor_features(image_array)
features_wavelet = wavelet_features(image_array)

# Compute Euclidean distance for each feature set


score_tamura = euclidean(query_tamura, features_tamura)
score_haralick = euclidean(query_haralick, features_haralick)
score_gabor = euclidean(query_gabor, features_gabor)
score_wavelet = euclidean(query_wavelet, features_wavelet)

similarities_tamura.append((image_path, score_tamura,
features_tamura))
similarities_haralick.append((image_path, score_haralick,
features_haralick))
similarities_gabor.append((image_path, score_gabor,
features_gabor))
similarities_wavelet.append((image_path, score_wavelet,
features_wavelet))

# Sort images by similarity score (lower is more similar) for each


method
similarities_tamura.sort(key=lambda x: x[1])
similarities_haralick.sort(key=lambda x: x[1])
similarities_gabor.sort(key=lambda x: x[1])
similarities_wavelet.sort(key=lambda x: x[1])

# Display results for all methods together


top_k = 10
fig, axes = plt.subplots(4, top_k, figsize=(20, 15))

# Tamura features display


for i in range(min(top_k, len(similarities_tamura))):
image_path, score, features = similarities_tamura[i]
image = Image.open(image_path)
axes[0, i].imshow(image, cmap='gray')
axes[0, i].set_title(f"Rank {i+1}\nScore: {score:.2f}")
axes[0, i].axis('off')

# Add a title for Tamura features row


axes[0, 0].set_title("Tamura Features", fontsize=14)

# Haralick features display


for i in range(min(top_k, len(similarities_haralick))):
image_path, score, features = similarities_haralick[i]
image = Image.open(image_path)
axes[1, i].imshow(image, cmap='gray')
axes[1, i].set_title(f"Rank {i+1}\nScore: {score:.2f}")
axes[1, i].axis('off')

# Add a title for Haralick features row


axes[1, 0].set_title("Haralick Features", fontsize=14)

# Gabor features display


for i in range(min(top_k, len(similarities_gabor))):
image_path, score, features = similarities_gabor[i]
image = Image.open(image_path)
axes[2, i].imshow(image, cmap='gray')
axes[2, i].set_title(f"Rank {i+1}\nScore: {score:.2f}")
axes[2, i].axis('off')

# Add a title for Gabor features row


axes[2, 0].set_title("Gabor Features", fontsize=14)

# Wavelet features display


for i in range(min(top_k, len(similarities_wavelet))):
image_path, score, features = similarities_wavelet[i]
image = Image.open(image_path)
axes[3, i].imshow(image, cmap='gray')
axes[3, i].set_title(f"Rank {i+1}\nScore: {score:.2f}")
axes[3, i].axis('off')

# Add a title for Wavelet features row


axes[3, 0].set_title("Wavelet Features", fontsize=14)

plt.tight_layout()
plt.show()

You might also like