0% found this document useful (0 votes)
47 views5 pages

AHP Python Code

The document describes a process for performing a pairwise comparison and calculating weights for various criteria using a matrix. It includes steps for normalizing the matrix, calculating the consistency index and ratio, and determining if the comparisons are consistent. Additionally, it outlines a method for reading raster data, reclassifying values, and applying weights to generate a weighted overlay, which is then saved as a new raster file.

Uploaded by

Aparajita Ghosh
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
47 views5 pages

AHP Python Code

The document describes a process for performing a pairwise comparison and calculating weights for various criteria using a matrix. It includes steps for normalizing the matrix, calculating the consistency index and ratio, and determining if the comparisons are consistent. Additionally, it outlines a method for reading raster data, reclassifying values, and applying weights to generate a weighted overlay, which is then saved as a new raster file.

Uploaded by

Aparajita Ghosh
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 5

import numpy as np

# Define the pairwise comparison matrix

pairwise_matrix = np.array([

[1, 3, 3, 2, 5, 5, 7],

[1/3, 1, 3, 2, 3, 3, 5],

[1/3, 1/3, 1, 1/2, 2, 2, 3],

[1/2, 1/2, 2, 1, 5, 5, 7],

[1/5, 1/3, 1/3, 1/5, 1, 1/2, 5],

[1/5, 1/3, 1/2, 1/5, 2, 1, 5],

[1/7, 1/5, 1/3, 1/7, 1/5, 1/5, 1]

])

# Step 1: Normalize the pairwise comparison matrix

column_sums = pairwise_matrix.sum(axis=0)

normalized_matrix = pairwise_matrix / column_sums

# Step 2: Calculate the weights (priority vector)

weights = normalized_matrix.mean(axis=1)

# Step 3: Calculate λ_max

weighted_sum = pairwise_matrix @ weights

lambda_max = np.mean(weighted_sum / weights)

# Step 4: Calculate the Consistency Index (CI)

n = pairwise_matrix.shape[0] # Number of criteria

CI = (lambda_max - n) / (n - 1)

# Step 5: Calculate the Consistency Ratio (CR)


RI_dict = {1: 0, 2: 0, 3: 0.58, 4: 0.9, 5: 1.12, 6: 1.24, 7: 1.32, 8: 1.41, 9: 1.45}

RI = RI_dict[n] # Random consistency index for 7 criteria

CR = CI / RI if RI else 0

# Display results

print("Pairwise Comparison Matrix:")

print(pairwise_matrix)

print("\nNormalized Matrix:")

print(normalized_matrix)

print("\nWeights (Priority Vector):")

for i, weight in enumerate(weights):

print(f"Criterion {i+1}: {weight:.4f}")

print(f"\nλ_max: {lambda_max:.4f}")

print(f"Consistency Index (CI): {CI:.4f}")

print(f"Consistency Ratio (CR): {CR:.4f}")

if CR < 0.1:

print("The pairwise matrix is consistent.")

else:

print("The pairwise matrix is not consistent. Please revise the comparisons.")


import rasterio

import numpy as np

# Define the file paths for each raster layer (update these with your file paths)

rasters = {

'PISR': 'path_to_pISR.tif',

'MAAT': 'path_to_MAAT.tif',

'LST': 'path_to_LST.tif',

'TWI': 'path_to_TWI.tif',

'LULC': 'path_to_LULC.tif',

'Slope': 'path_to_Slope.tif',

'Aspect': 'path_to_Aspect.tif'

# Define the priority weights for each criterion

weights = {

'PISR': 0.27,

'MAAT': 0.11,

'LST': 0.37,

'TWI': 0.08,

'LULC': 0.026,

'Slope': 0.055,

'Aspect': 0.068

# Function to read a raster file and return its values as a numpy array

def read_raster(raster_path):

with rasterio.open(raster_path) as src:


return src.read(1), src.transform, src.crs

# Function to reclassify raster values from 1-4 scale to 1-5 scale

def reclassify_to_5_scale(raster_array):

# Assume that 1 is the strongest correlation and 4 is the weakest

# Mapping the raster values (1-4) to the new scale (1-5)

reclassified = np.interp(raster_array, [1, 4], [1, 5])

return reclassified

# Initialize an empty list to store the weighted layers

weighted_layers = []

# Loop through each raster, reclassify and weight

for criterion, raster_path in rasters.items():

# Read the raster

raster_array, transform, crs = read_raster(raster_path)

# Reclassify to scale of 1-5

reclassified_raster = reclassify_to_5_scale(raster_array)

# Weight the raster

weighted_raster = reclassified_raster * weights[criterion]

# Append the weighted raster to the list

weighted_layers.append(weighted_raster)

# Stack all the weighted layers and sum them up

total_weighted_overlay = np.sum(weighted_layers, axis=0)


# Normalize the output if necessary (optional, depending on how you want the final scale)

# For example, if you want the final result to stay between 1 and 5, use np.clip()

total_weighted_overlay = np.clip(total_weighted_overlay, 1, 5)

# Save the result as a new raster

output_path = 'path_to_output_weighted_overlay.tif'

with rasterio.open(rasters['PISR']) as src:

# Get metadata from the first raster

metadata = src.meta

metadata.update(dtype=rasterio.float32, count=1)

# Write the output raster

with rasterio.open(output_path, 'w', **metadata) as dst:

dst.write(total_weighted_overlay.astype(rasterio.float32), 1)

print(f"Weighted overlay has been saved to {output_path}")

You might also like