0% found this document useful (0 votes)
20 views18 pages

Aiml

AIML

Uploaded by

ashvanthraj26
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
20 views18 pages

Aiml

AIML

Uploaded by

ashvanthraj26
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 18

DEPTH FIRST SEARCH

PYTHON CODE

s = (1111 2 2 2 2 2 2 2 2 2 2 151515 15 15);

t = [3 5 4 214 611121310 7 9 81516171918 20) ;

G = graph(s,t);

plot(G)

v = dfsearch(G,17)

OUTPUT

v=

17
15
2
1
3
4

5
6

7
8

10
11
12
13
_I
14

16

18

19

20

3.5

25

15

OS'----'----'----'----'----'---'----'---'
-2 0 2 4 6 8 10 12 14
ENSEMBLIMG TECHNIQUES

STACKING

PYTHON CODING

from numpy import mean

from numpy Import std

from sklearn.datasets import make_classification

from sklearn.model_selection impon cross_val_score

from sklearn.model_selection import RepeatedStratmedKFold

from sklearn.linear_model import LogisticRegression

from sklearn.nelghbors Import KNeighborsClassifler

from sklearn.tree import DecisionTreeClassifier

from sklearn.svm import SVC

from sklea, n.nalve_bayes import GaussianNB

from matplotllb import pyplot

def get_dataset(J:

X,y = make_classification(n_samples=l000. n_features=20, n_informative=15,

n_redundant=5, random_state=l)

return X. y

def get_models():

models = diet()

models['lr'J = LogisticRegression()

models['knn') = KNelghborsClassifier()

models['cart'] = DecisionTreeClassifier{l
models['svm'J = SVC()

models['bayes') = GausslanNB()

return models
_J
>knn 0.931 (0.025)

>cart 0 .825 (0.043)

>svm 0 .957 (0.020)

>bayes 0.833 (0.031)

1.00

0.95

0.90 ~ 0 ~
0

0 .85 0

0.80

0.75

0.70
0

Ir knn cart svm bayes

7
def evaluate_model(model, X, y):

cv = RepeatedStratifiedKFold(n_splits=l0, n_repeats=3, random_state=1)

scores = cross_val_score(model, X. y, scoring='accuracy', cv=cv, n_jobs=-1 ,

error_score= 'raise')

return scores

X, y = get_dataset()

models= get_models()

results, names = list(), list()

for name, model in models.items():

scores = evaluate_model(model, X. y)

results.append(scores)

names.append(name)

print('>%s %.3f (%.3f)' % (name, mean(scores), std(scores)))

pyplot.boxplot(results, labels=names, showmeans=True)

pyplot.show()

OUTPUT

>Ir 0.866 (0.029)


/
1 ~oad f ishe ri ris
2 inds = ~st rcmp( species, 'setosa ' ) ;
3 X = meas(inds,3:4) ;
4 y = species( inds) ;
5 train_ feat= [meas ( 1: 40, : ) ; meas (51: 90, : ) ; meas (101: 140, : ) ] ;
6 train_label=[species (l:40, : ) ; species(51 :90 , : ) ; species(101 :140, : )]
7 test_ feat= [meas (41 : 50, : ) ; meas ( 91: 100 , : ) ; meas ( 141: 150, : ) ] ;
8 test_label=[ species( 41:50, : ) ; species(91:100, : ) ;species(141:150, : ) ]
9
10 train_ f eat= [meas ( 51 : 90, : ) ; meas (101: 140, : ) ] ;
11 train_label=[species (Sl:90, : ) ; species( 101:140, : ) ]
12 test_ feat= [meas ( 91 : 100, : ) ; meas (141 : 150, : ) ] ;
13 test_label=[ speci es(91:100 , : ) ; species( 141:150, :)]
14
15 ct reeModel = fit ct ree (train_ feat, train_ label) ;
16 predict ed_op=predict (ct reeModel,test _f eat ) ;
17

t est label=
20><1 cell array

20xl cell array {'versi col or ' }


{'versi color '}
{'versicolor'} {'versicolor'}
{'versicolor'} {'versi color'}
{'versicolor'} {'versi color '}
{'versicolor'} { 'versi color'}
{'versicolor'}
{'versicolor'} { 'versicolor'}
{ 'versicolor'} {'versi color '}
{ ' versicolor'} {'versicolor'}
{ 'versi color'} {'versi color'}
{ 'versi color'} { 'virgini ca ' }
{'virginica ' } {'virginica' }
{'virginica ' }
{'vi rginica ' } { 'vi rginica ' }
{'virginica' } { 'virginica' }
{ ' vi rgi nica ' } {'virginica' }
{ ' vi rginica ' } {'virginica' }
{'virginica ' } {'virginica'}
{'virginica' } {'virginica' }
{'virginica' } {'virginica ' }
{'virginica ' }
{'virginica' }
,
1 ~oad fis heriris
2 inds = ~strcmp(species , •setosa' ) ;
3 X = meas(inds,3 :4);
4 y = species(inds);
5 train_ f eat= [meas ( 1: 40, : ) ; meas ( 51: 90, : ) ; meas ( 101: 140, : ) ] ;
6 train_label=[ species(l :40, : ) ; species(Sl :90 , : ) ; speci es( 101 :140, : )]
7 test_feat=[meas(41:50, : ) ;meas(91:100 , :) ;meas( 141:150 , : ) ] ;
8 t est_label= [species(41 :50 , : ) ; species(91:100, :);species(141 :150 , : ) ]
9
10 train_feat=[meas(Sl :90 , : ) ;meas( 101 : 140, : )] ;
11 train_label=[ speci es(Sl :90 , :) ; species(l01:140, : ) ]
12 test_ feat= [meas ( 91: 100 , : ) ; meas ( 141: 150, : ) ] ;
13 t est_label= [species(91 :100, : ) ; species( 141:150 , : ) ]
14
15S\JT"l1odel = fit csvm(train_feat,train_label) ;
16 predicted_op=:predict (S\M-'lodel,test_feat};
17

test label= 20xl cell array

20xl cell array {'versicolor'}


{'versicolor'}
{ 'versicolor'} {'versicolor'}
{'versicolor'} {'versicolor'}
{'versicolor'} {'versicolor'}
{' versicolor'} {'versicolor'}
{'versicolor'} {'versicolor'}
{'versicolor'} { 'versicolor'}
{'versicolor'} {'versicolor'}
{'versicolor'} {'versicolor'}
{' versicolor'} {'virginica' }
{'versicolor'} {'virginica' }
{ 'virginica' } {'virgin ica' }
{'virginica' } {'virginica' }
{'virginica' } {'virginica' }
{'virginica' } { 'virginica' }
{'virginica' } {'virginica' }
{'virginica' } {'virginica ' }
{'virginica' } {'virginica' }
{'virginica' } {'virginica' }
{'virginica' }
{'virginica' }
>> svm
I II

Bayes1an r egress1on:
1

f.rom1s;klearn1.dat.as,ets i1 m1p0rt llc a1d_boston


1

from sklearni.1m1odell_s,ele,ction ilmpoirt


t1 In_'test_spl li t
ra1
from1siklea,rn .mI etrics i1 1
m port r2_SC0're 1 1

from1s!klearn,. linear_model lm1port Ba'y'e.sianRi dge


1 1

da1tas et = lc ad_b1ost onl)


1 1 1

X, y = 1da1.as,et1data., 1dataset~targ1e t
X_train, X_Jest, y_train, ·y_test = train_t:est_spUt(X,,
1

v, test.......size = 0,15, ra1n,dom_state = 42)


(BAYESIAN REGRESSION:)rmo,del = Ba·yesianRidge()
1
modetflt(X_traln,, y_trainl
1
pr·edlict'ion = m,odeil'"predlict1(X_test)1
1 1

1
1
pri1n t(r Tes,t Set r2 score : tr2,_ sc01 re(y_test,
predliction) )1'11 )
1

Out1
pu't :
Bayesian networks:

import numpy as np
import pandas as pd
importcsv
from pgmpy.est imators import
MaximumlikelihoodEst imator
from pgmpy.models import BayesianModel
from pgmpy.inference import
VarlableEHmination
heartDisease = pd.read_csv('heart.csv')
heartDisease =
heartDisease.replace('?',np.nan)
print('Samp le Instances from t he dataset
are given below')
prlnt(heartDisease.head())
prlnt ('\n Attributes and datatypes')
print (heartDisease.dtypes)
model=Bayesla nModel( [('age','hea rtd
isease'),( 'gender', 'heartdisease'),{' exang','h
eartdlsease' ), ('cp','heartdlsease'),('heartdis
ease',' restecg'),('hea rtdlsease', 'chol') I)

tun:U'.q CFD U1nq llu1&::1 llttl1t.ocd utlUtc:t

lnft:cc1nq v1ui 1a,u1.:: St:v::,1

I 11unl1lnu1 ~IUUtdl.a.,..t) I

I be1rUl.1cuec01 0.:0:2 I
I tt1n:t1111itl11 0.0000 I

I bl1:t411tu1 UI o.io:s 1
-------------I
I l:clrt:h~~t141 I O.HI!

----♦------·-
I tun4UtUt ;:l:1 (hu-..4uu..u1 I

I hlt:WtlUtfOI O.lC!O I
·-·--·--------
I 0.1:st I
htart:UltlltC!I

I hta:t:11J~l21

I tt1:tl112cue1J1 o.u,· I
I 1a1rw2eue111 0.:12! I
6:05 PM GI tl/J $ ·· @ "' .,,11 C!D

MATLAB Drive ' multiple.m


l xl = [1; 2; 3; 4];
2 X 2 = [ 4; 5; 8; 2] ;
3 y = [1; 6; 8; 12] ;
4 n=length (xl);
5 xl sqr = xl. * xl;
6 x2sqr = x2. * x2;
7 xly = xl. "'y;
8 x2y = x2. * y;
9 xlx2 = xl. * x2;
10 sum xl = sum ( xl);
11 sum- x2 = sum (x2);
12 sum=y = sum(y);
13 sum_xlsqr = sum(xlsqr);
14 sum_x2sqr = sum(x2sqr);
15 sum_xly = sum(xly);
16 sum_x2y = sum (x2y);
17 sum xlx2 = sum ( xlx2 ) ;
18 y_bar = sum_y/n;
19 xl bar= sum xl / n;
20 x2-ba r = sum - x2/ n;
21 xlsqr_bar = sum_xlsqr/ n;
22 x2sqr_bar = sum_x2sqr/ n;
23 xly_bar = sum_xly/ n;
24 x2y_bar = sum_x2y/ n;
25 xlx2 bar= sum xlx2/ n;
26 sum_xlwholesqr-= sum_xl."'2;
27 sumr_xlsqr = sum_xlsqr-(sum_xlwholesqr/ n);
28 sum_x2wholesqr = sum_x2."'2;
29 sumr_x2sqr = sum_x2sqr-(sum_x2wholesqr/ n);
30 sumr_xly = sum_xly- ( (sum_xl • sum_y) /n);
31 sumr_x2y = sum_x2y- ( (sum_x2~sum_y )/n);
32 sumr xlx2 = sum xlx2- ( (sum xl•sum x2)/n ) ;
33 %%find b1\ 01, - - -

34 nrl = (sumr_x2sqr •sumr_xly ) - (sumr_xlx2•sumr_x2y);


35 dr =(sumr_xlsqr t< sumr_x2sqr ) - ( (sumr_xlx2 ) . "'2);
36 bl = nrl/dr;
37 %%find b2\%
38 n r2 = (sumr_xlsq r *sumr_x2y ) - (sumr_xlx2•sumr_xly);
39 b2 = nr2/dr;
40 °6%f ind bG"6%
41 b0 = y_bar-(bl• xl _bar) - (b2*x2_bar);
42 ypred = bG+ (bl• xl ) + ( b2"'x2) ;
43 scatter3(xl,x2,y, ' filled' );
44 hold on ;
45 plot3{xl,x2,y, ' r' . 'linewidth' ,2);
46 plot3(xl,x2,ypred, 'g' , 'linewidth ' ,2);
47
48
49
50
38x4 table
Event Node Edge Edgelndex

start node 1 NaN NaN NaN


discovernode 1 NaN NaN NaN
edgetonew NaN 1 7 1
discovernode 7 NaN NaN NaN
finishnode 1 NaN NaN NaN
edgetonew NaN 7 3 10
discovernode 3 NaN NaN NaN
finishnode 7 NaN NaN NaN
edgetofinished NaN 3 1 3
edgetonew NaN 3 5 4
di scovernode 5 NaN NaN NaN
edgetonew NaN 3 6 5
discovernode 6 NaN NaN NaN
edgetonew NaN 3 8 6

discovernode 2 NaN NaN NaN


finishnode 4 NaN NaN NaN
edgetofinished NaN 2 6 2
finishnode 2 NaN NaN NaN
start node 9 NaN NaN NaN
discovernode 9 NaN NaN NaN
edgetofinished NaN 9 1 12
edgetofi nished NaN 9 6 13
edgetofinished NaN 9 8 14
finishnode 9 NaN NaN NaN
start node 10 NaN NaN NaN
discovernode 10 NaN NaN NaN
edgetofinished NaN 10 2 15
finishnode 10 NaN NaN NaN
-. - -- --
MATLAB Drive ) bfs.m

1 s = [ 1 2 3 3 3 3 4 5 6 7 8 9 9 9 10);
2 t = [ 7 6 1 5 6 8 2 4 4 3 7 1 6 8 2];
3 G = digraph (s ,t);
4 plot (G )
S T= bfsearch(G, 1, 'all events' , 'Restart' , true )
6
• 10

•6 .. 4

,I
,9
'
I/
·•- -
l
~

"'
- • 3 -- 1
\ /1 ~

\I
~7
SUPPORT VECTOR MACHINE
l
PYTHON CODE

Import numpy a.s np

Import matploUib.pyplot as pit

from skleam Im.port svm, datasets

Iris = datasets.loadJrls()
X = Iris.data[:. :2]

y = Iris 1a,ge1
C=1.0

SVC = svm .SVCtkomcl ='linear', C = 1).lit(X. Yl

x_mln, x_max = X(:, OJ.mini) • 1, X[:. OJ .max() ..-1

y_mln, y_max = X[:, 1J.mln() • t, X(:, l J.max() +-1

h = (x_max I x_mln)/ 100

)(J(, yy = np.mesrhgrld(np.arange(x_mln, x. max. h). np.arange(Y_min, y _ma x. h)l


pll subplot(l . , . 1)

Z = svc.prcdlct(np.c_(xx.ravel(), yy ravel()))
Z = Z.reshape(1CX.shape)

pll.contourf(xx. yy, Z. cmap = pit cm .Paired. alpha = 0 8)

plt.scatlru(X[', OJ, X[·. l J, c = y, cmap = plLcm Pahedl


pll.xlabet('Sepal length' )

plL. ylabel('Sepal Width'}

pll.xllm(xx.mln(), xx.max())
plLUtle('SVC with linear kernel')

plLshow()

OUTPUT
-.
SVC with linear kernel

5.0 11

4.5
I,

~-0

5 3.5
;i '1
;r: I

i 3.0
I'

2. 5

2.0

L5

LO
6
5ep,1l l1!nQth
K - MEANS CLUSTER
l
PYTHON CODE
import matplotlib.pyplot as pit

X = (4, 5, 10, 4, 3, 11. 14 , 6, 10, 12]

y = [21, 19, 24, 17, 16. 25, 24, 22, 21, 21]
plt. scatter(x, yJ

pit.show()
from skleam.cluster import KMeans

data = list(zip(x, y))

Inertias = O
for I in range(1,11):

l<means = KMeans(n_clusters=i)
kmeans.frt(data)

iner1Ias.append(kmeans.inenia.J

plt. plot(range(1 ,11), inertias, marker='o'J

plt.title('Elbow method')

plt.xlabel('Nurnber of clusters')
pit. ylabel('lnertla')

pit.show()

kmeans = KMeans(n_clusters=2)

kmeans.tit(data)

plt.searter(x. y. e=kmeailS.labels.J
pit. show()

OUTPUT

l r
L

24
' •

22

• .. •
20


18


15 • .
4 6 I 10 12 14
J l

24 · • •
22 •
• • •
,o

18


16 • . .
• 6 e 10 u t•

Elbow method

Numl>ff or cluSle,s

7
ENSEMBUNG TECHNIQUES
BAGGING
from numpy import mean

from numpy impon std

from skleam.da1ase1s Import make_ctassificalfon

lrom sklearn model_selectlon lml)Ort c,oss_val_score

from ~klaam model_soloctlon lm?Ort RopeoIodS1roI1t1odKFold

lrom sl<loam.ensembte Import BagglngClassifler

X, y = make_classfficallon(n_samples=lOOO, n_features=20. r1.Jnfonnative=15,

n_redUndant=5. ranclom_st ale=5)

model = BagQITT(JClassffterO
cv " RepeatedSlrallnedKFold(n_spl tsa10, .,_,epeats=3, I-andom_state.,_ 11

11_scores = cross_val_score(model, X, y, scorlng-'accuracy·. cv-cv. n_jobs"·1,


error_scorc=-rnls-c',

prlnt('Accuracy: %.31 (%.3 fl' % (mean(n_scores), sl d(n_scores)))

OUTPUT
Accuracy 0.864 (0.038)

You might also like