0% found this document useful (0 votes)
25 views10 pages

Aiml Lab

The document discusses several machine learning algorithms: 1) The A* search algorithm is defined to find the shortest path between start and stop nodes by maintaining open and closed sets. 2) The AO* algorithm extends A* with heuristics to find optimal paths in graphs more efficiently. Pseudocode is provided. 3) The candidate elimination algorithm is used for inductive logic programming to generate hypotheses from examples. Pseudocode demonstrates the steps. 4) The ID3 decision tree algorithm uses information gain to recursively build a classification tree from a training set. Pseudocode shows how it works. 5) An artificial neural network model is defined and trained using backpropagation to predict an output from inputs. 6

Uploaded by

Tejas Suri
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
25 views10 pages

Aiml Lab

The document discusses several machine learning algorithms: 1) The A* search algorithm is defined to find the shortest path between start and stop nodes by maintaining open and closed sets. 2) The AO* algorithm extends A* with heuristics to find optimal paths in graphs more efficiently. Pseudocode is provided. 3) The candidate elimination algorithm is used for inductive logic programming to generate hypotheses from examples. Pseudocode demonstrates the steps. 4) The ID3 decision tree algorithm uses information gain to recursively build a classification tree from a training set. Pseudocode shows how it works. 5) An artificial neural network model is defined and trained using backpropagation to predict an output from inputs. 6

Uploaded by

Tejas Suri
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 10

1.

a* algorithm

def aStarAlgo(start_node, stop_node):

open_set = set(start_node)
closed_set = set()
g = {}
parents = {}
g[start_node] = 0
parents[start_node] = start_node

while len(open_set) > 0 :


n = None

for v in open_set:
if n == None or g[v] + heuristic(v) < g[n] + heuristic(n):
n = v # n='A'

if n == stop_node or Graph_nodes[n] == None:


pass
else:
for (m, weight) in get_neighbors(n):

if m not in open_set and m not in closed_set:


open_set.add(m)
parents[m] = n
g[m] = g[n] + weight
else:
if g[m] > g[n] + weight:

g[m] = g[n] + weight

parents[m] = n
if m in closed_set:
closed_set.remove(m)
open_set.add(m)
if n == None:
print('Path does not exist!')
return None

if n == stop_node:
path = []

while parents[n] != n:
path.append(n)
n = parents[n]
path.append(start_node)

path.reverse()

print('Path found: {}'.format(path))


return path

open_set.remove(n)
closed_set.add(n)
print('Path does not exist!')
return None

def get_neighbors(v):
if v in Graph_nodes:
return Graph_nodes[v]
else:
return None
def heuristic(n):
H_dist = {
'A': 10,
'B': 8,
'C': 5,
'D': 7,
'E': 3,
'F': 6,
'G': 5,
'H': 3,
'I': 1,
'J': 0
}

return H_dist[n]

Graph_nodes = {

'A': [('B', 6), ('F', 3)],


'B': [('C', 3), ('D', 2)],
'C': [('D', 1), ('E', 5)],
'D': [('C', 1), ('E', 8)],
'E': [('I', 5), ('J', 5)],
'F': [('G', 1),('H', 7)] ,
'G': [('I', 3)],
'H': [('I', 2)],
'I': [('E', 5), ('J', 3)],

}
aStarAlgo('A', 'J')

2.AO *

class Graph:
def __init__(self, graph, heuristicNodeList, startNode):

self.graph = graph
self.H=heuristicNodeList
self.start=startNode
self.parent={}
self.status={}
self.solutionGraph={}

def applyAOStar(self):
self.aoStar(self.start, False)

def getNeighbors(self, v):


return self.graph.get(v,'')

def getStatus(self,v):
return self.status.get(v,0)

def setStatus(self,v, val):


self.status[v]=val

def getHeuristicNodeValue(self, n):


return self.H.get(n,0)

def setHeuristicNodeValue(self, n, value):


self.H[n]=value

def printSolution(self):
print("FOR GRAPH SOLUTION, TRAVERSE THE GRAPH FROM THE START
NODE:",self.start)
print("------------------------------------------------------------")
print(self.solutionGraph)
print("------------------------------------------------------------")

def computeMinimumCostChildNodes(self, v):


minimumCost=0
costToChildNodeListDict={}
costToChildNodeListDict[minimumCost]=[]
flag=True
for nodeInfoTupleList in self.getNeighbors(v):
cost=0
nodeList=[]
for c, weight in nodeInfoTupleList:
cost=cost+self.getHeuristicNodeValue(c)+weight
nodeList.append(c)

if flag==True:
minimumCost=cost
costToChildNodeListDict[minimumCost]=nodeList
flag=False
else:

if minimumCost>cost:
minimumCost=cost
costToChildNodeListDict[minimumCost]=nodeList

return minimumCost, costToChildNodeListDict[minimumCost]

def aoStar(self, v, backTracking):

print("HEURISTIC VALUES :", self.H)


print("SOLUTION GRAPH :", self.solutionGraph)
print("PROCESSING NODE :", v)
print("--------------------------------------------------------------------")

if self.getStatus(v) >= 0:
minimumCost, childNodeList = self.computeMinimumCostChildNodes(v)
self.setHeuristicNodeValue(v, minimumCost)
self.setStatus(v,len(childNodeList))

solved=True

for childNode in childNodeList:


self.parent[childNode]=v
if self.getStatus(childNode)!=-1:
solved=solved & False

if solved==True:
self.setStatus(v,-1)
self.solutionGraph[v]=childNodeList

if v!=self.start:

self.aoStar(self.parent[v], True)

if backTracking==False:

for childNode in childNodeList:

self.setStatus(childNode,0)

self.aoStar(childNode, False)

h1 = {'A': 1, 'B': 6, 'C': 2, 'D': 12, 'E': 2, 'F': 1, 'G': 5, 'H': 7, 'I': 7, 'J': 1,
'T': 3}
graph1 = {
'A': [[('B', 1), ('C', 1)], [('D', 1)]],
'B': [[('G', 1)], [('H', 1)]],
'C': [[('J', 1)]],
'D': [[('E', 1), ('F', 1)]],
'G': [[('I', 1)]]
}
G1= Graph(graph1, h1, 'A')
G1.applyAOStar()
G1.printSolution()

h2 = {'A': 1, 'B': 6, 'C': 12, 'D': 10, 'E': 4, 'F': 4, 'G': 5, 'H': 7} #s
graph2 = {
'A': [[('B', 1), ('C', 1)], [('D', 1)]], #
'B': [[('G', 1)], [('H', 1)]],
'D': [[('E', 1), ('F', 1)]]
}

G2 = Graph(graph2, h2, 'A')


G2.applyAOStar()
G2.printSolution()

3.candidate eliminagion

import csv

with open("trainingexamples.csv") as f:
csv_file = csv.reader(f)
data = list(csv_file)

specific = data[0][:-1]
general = [['?' for i in range(len(specific))] for j in range(len(specific))]

for i in data:
if i[-1] == "Yes":
for j in range(len(specific)):
if i[j] != specific[j]:
specific[j] = "?"
general[j][j] = "?"

elif i[-1] == "No":


for j in range(len(specific)):
if i[j] != specific[j]:
general[j][j] = specific[j]
else:
general[j][j] = "?"

print("\nStep " + str(data.index(i)+1) + " of Candidate Elimination


Algorithm")
print(specific)
print(general)

gh = [] # gh = general Hypothesis
for i in general:
for j in i:
if j != '?':
gh.append(i)
break
print("\nFinal Specific hypothesis:\n", specific)
print("\nFinal General hypothesis:\n", gh)

4.ID3 algo

import pandas as pd
from pprint import pprint
from sklearn.feature_selection import mutual_info_classif
from collections import Counter

def id3(df, target_attribute, attribute_names, default_class=None):


cnt=Counter(x for x in df[target_attribute])
if len(cnt)==1:
return next(iter(cnt))

elif df.empty or (not attribute_names):


return default_class

else:
gainz =
mutual_info_classif(df[attribute_names],df[target_attribute],discrete_features=True)
index_of_max=gainz.tolist().index(max(gainz))
best_attr=attribute_names[index_of_max]
tree={best_attr:{}}
remaining_attribute_names=[i for i in attribute_names if i!=best_attr]

for attr_val, data_subset in df.groupby(best_attr):


subtree=id3(data_subset, target_attribute,
remaining_attribute_names,default_class)
tree[best_attr][attr_val]=subtree

return tree

df=pd.read_csv("p-tennis.csv")

attribute_names=df.columns.tolist()
print("List of attribut name")

attribute_names.remove("PlayTennis")

for colname in df.select_dtypes("object"):


df[colname], _ = df[colname].factorize()
print(df)

tree= id3(df,"PlayTennis", attribute_names)


print("The tree structure")
pprint(tree)

5.Artificial Nureal Network?

import numpy as np

import matplotlib as m

X=np.array(([2,9],[1,5],[3,6]),dtype=float)

y=np.array(([92],[86],[89]),dtype=float)

X=X/np.amax(X,axis=0)

y=y/100

def sigmoid(x):

return 1/(1+np.exp(-x))

def derivatives_sigmoid(x):

return x*(1-x)

epoch=7000

lr=0.1

inputlayer_neurons=2

hiddenlayer_neurons=3

output_neurons=1

wh=np.random.uniform(size=(inputlayer_neurons,hiddenlayer_neurons))

bh=np.random.uniform(size=(1,hiddenlayer_neurons))

wout=np.random.uniform(size=(hiddenlayer_neurons,output_neurons))

bout=np.random.uniform(size=(1,output_neurons))

for i in range(epoch):

hinp1=np.dot(X,wh)

hinp=hinp1+bh

hlayer_act=sigmoid(hinp)

outinp1=np.dot(hlayer_act,wout)

outinp=outinp1+bout

output=sigmoid(outinp)

EO=y-output

outgrad=derivatives_sigmoid(output)
d_output=EO*outgrad

EH=d_output.dot(wout.T)

hiddengrad=derivatives_sigmoid(hlayer_act)

d_hiddenlayer=EH*hiddengrad

wout+=hlayer_act.T.dot(d_output)*lr

wh+=X.T.dot(d_hiddenlayer)*lr

print("Input:\n"+str(X))

print("Actual Output:\n"+str(y))

print("Predicted Output:\n",output)

6.Navie Bayesian Classifier ?

import pandas as pd
from sklearn import tree
from sklearn.preprocessing import LabelEncoder
from sklearn.naive_bayes import GaussianNB

data = pd.read_csv('p-tennis.csv')
print("The first 5 Values of data is :\n", data.head())

X = data.iloc[:, :-1]
print("\nThe First 5 values of the train data is\n", X.head())

y = data.iloc[:, -1]
print("\nThe First 5 values of train output is\n", y.head())

le_outlook = LabelEncoder()
X.Outlook = le_outlook.fit_transform(X.Outlook)

le_Temperature = LabelEncoder()
X.Temperature = le_Temperature.fit_transform(X.Temperature)

le_Humidity = LabelEncoder()
X.Humidity = le_Humidity.fit_transform(X.Humidity)

le_Windy = LabelEncoder()
X.Windy = le_Windy.fit_transform(X.Windy)

print("\nNow the Train output is\n", X.head())

le_PlayTennis = LabelEncoder()
y = le_PlayTennis.fit_transform(y)
print("\nNow the Train output is\n",y)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X,y, test_size = 0.20)

classifier = GaussianNB()
classifier.fit(X_train, y_train)

from sklearn.metrics import accuracy_score


print("Accuracy is:", accuracy_score(classifier.predict(X_test), y_test))

7.EM algo

from sklearn import datasets


from sklearn import metrics
from sklearn.cluster import KMeans
from sklearn.model_selection import train_test_split

iris = datasets.load_iris()
print(iris)
X_train,X_test,y_train,y_test = train_test_split(iris.data,iris.target)
model =KMeans(n_clusters=3)
model.fit(X_train,y_train)
model.score
print('K-Mean: ',metrics.accuracy_score(y_test,model.predict(X_test)))

from sklearn.mixture import GaussianMixture


model2 = GaussianMixture(n_components=3)
model2.fit(X_train,y_train)
model2.score
print('EM Algorithm:',metrics.accuracy_score(y_test,model2.predict(X_test)))

8.KNN

from sklearn.model_selection import train_test_split


from sklearn.neighbors import KNeighborsClassifier
from sklearn import datasets
iris=datasets.load_iris()
print("Iris Data set loaded...")
x_train, x_test, y_train, y_test =
train_test_split(iris.data,iris.target,test_size=0.1)
for i in range(len(iris.target_names)):
print("Label", i , "-",str(iris.target_names[i]))
classifier = KNeighborsClassifier(n_neighbors=5)
classifier.fit(x_train, y_train)
y_pred=classifier.predict(x_test)
print("Results of Classification using K-nn with K=5 ")
for r in range(0,len(x_test)):
print(" Sample:", str(x_test[r]), " Actual-label:", str(y_test[r])," Predicted-
label:", str(y_pred[r]))

print("Classification Accuracy :" , classifier.score(x_test,y_test));

9.Regression algo

import numpy as np
import matplotlib.pyplot as plt

x = np.linspace(-5, 5, 1000)
y = np.log(np.abs((x ** 2) - 1) + 0.5)
x = x + np.random.normal(scale=0.05, size=1000)
plt.scatter(x, y, alpha=0.3)
def local_regression(x0, x, y, tau):
x0 = np.r_[1, x0]
x = np.c_[np.ones(len(x)), x]
xw =x.T * radial_kernel(x0, x, tau)
beta = np.linalg.pinv(xw @ x) @ xw @ y
return x0 @ beta

def radial_kernel(x0, x, tau):


return np.exp(np.sum((x - x0) ** 2, axis=1) / (-2 * tau ** 2))

def plot_lr(tau):
domain = np.linspace(-5, 5, num=500)
pred = [local_regression(x0, x, y, tau) for x0 in domain]
plt.scatter(x, y, alpha=0.3)
plt.plot(domain, pred, color="red")
return plt

plot_lr(1).show()

You might also like