Aiml
Aiml
PYTHON CODE
G = graph(s,t);
plot(G)
v = dfsearch(G,17)
OUTPUT
v=
17
15
2
1
3
4
5
6
7
8
10
11
12
13
_I
14
16
18
19
20
3.5
25
15
OS'----'----'----'----'----'---'----'---'
-2 0 2 4 6 8 10 12 14
ENSEMBLIMG TECHNIQUES
STACKING
PYTHON CODING
def get_dataset(J:
n_redundant=5, random_state=l)
return X. y
def get_models():
models = diet()
models['lr'J = LogisticRegression()
models['knn') = KNelghborsClassifier()
models['cart'] = DecisionTreeClassifier{l
models['svm'J = SVC()
models['bayes') = GausslanNB()
return models
_J
>knn 0.931 (0.025)
1.00
0.95
0.90 ~ 0 ~
0
0 .85 0
0.80
0.75
0.70
0
7
def evaluate_model(model, X, y):
error_score= 'raise')
return scores
X, y = get_dataset()
models= get_models()
scores = evaluate_model(model, X. y)
results.append(scores)
names.append(name)
pyplot.show()
OUTPUT
t est label=
20><1 cell array
Bayes1an r egress1on:
1
X, y = 1da1.as,et1data., 1dataset~targ1e t
X_train, X_Jest, y_train, ·y_test = train_t:est_spUt(X,,
1
1
1
pri1n t(r Tes,t Set r2 score : tr2,_ sc01 re(y_test,
predliction) )1'11 )
1
Out1
pu't :
Bayesian networks:
import numpy as np
import pandas as pd
importcsv
from pgmpy.est imators import
MaximumlikelihoodEst imator
from pgmpy.models import BayesianModel
from pgmpy.inference import
VarlableEHmination
heartDisease = pd.read_csv('heart.csv')
heartDisease =
heartDisease.replace('?',np.nan)
print('Samp le Instances from t he dataset
are given below')
prlnt(heartDisease.head())
prlnt ('\n Attributes and datatypes')
print (heartDisease.dtypes)
model=Bayesla nModel( [('age','hea rtd
isease'),( 'gender', 'heartdisease'),{' exang','h
eartdlsease' ), ('cp','heartdlsease'),('heartdis
ease',' restecg'),('hea rtdlsease', 'chol') I)
I 11unl1lnu1 ~IUUtdl.a.,..t) I
I be1rUl.1cuec01 0.:0:2 I
I tt1n:t1111itl11 0.0000 I
I bl1:t411tu1 UI o.io:s 1
-------------I
I l:clrt:h~~t141 I O.HI!
----♦------·-
I tun4UtUt ;:l:1 (hu-..4uu..u1 I
I hlt:WtlUtfOI O.lC!O I
·-·--·--------
I 0.1:st I
htart:UltlltC!I
I hta:t:11J~l21
I tt1:tl112cue1J1 o.u,· I
I 1a1rw2eue111 0.:12! I
6:05 PM GI tl/J $ ·· @ "' .,,11 C!D
1 s = [ 1 2 3 3 3 3 4 5 6 7 8 9 9 9 10);
2 t = [ 7 6 1 5 6 8 2 4 4 3 7 1 6 8 2];
3 G = digraph (s ,t);
4 plot (G )
S T= bfsearch(G, 1, 'all events' , 'Restart' , true )
6
• 10
•6 .. 4
,I
,9
'
I/
·•- -
l
~
"'
- • 3 -- 1
\ /1 ~
\I
~7
SUPPORT VECTOR MACHINE
l
PYTHON CODE
Iris = datasets.loadJrls()
X = Iris.data[:. :2]
y = Iris 1a,ge1
C=1.0
Z = svc.prcdlct(np.c_(xx.ravel(), yy ravel()))
Z = Z.reshape(1CX.shape)
pll.xllm(xx.mln(), xx.max())
plLUtle('SVC with linear kernel')
plLshow()
OUTPUT
-.
SVC with linear kernel
5.0 11
4.5
I,
~-0
5 3.5
;i '1
;r: I
i 3.0
I'
2. 5
2.0
L5
LO
6
5ep,1l l1!nQth
K - MEANS CLUSTER
l
PYTHON CODE
import matplotlib.pyplot as pit
y = [21, 19, 24, 17, 16. 25, 24, 22, 21, 21]
plt. scatter(x, yJ
pit.show()
from skleam.cluster import KMeans
Inertias = O
for I in range(1,11):
l<means = KMeans(n_clusters=i)
kmeans.frt(data)
iner1Ias.append(kmeans.inenia.J
plt.title('Elbow method')
plt.xlabel('Nurnber of clusters')
pit. ylabel('lnertla')
pit.show()
kmeans = KMeans(n_clusters=2)
kmeans.tit(data)
plt.searter(x. y. e=kmeailS.labels.J
pit. show()
OUTPUT
l r
L
24
' •
22
•
• .. •
20
•
18
•
15 • .
4 6 I 10 12 14
J l
•
24 · • •
22 •
• • •
,o
•
18
•
16 • . .
• 6 e 10 u t•
Elbow method
Numl>ff or cluSle,s
7
ENSEMBUNG TECHNIQUES
BAGGING
from numpy import mean
model = BagQITT(JClassffterO
cv " RepeatedSlrallnedKFold(n_spl tsa10, .,_,epeats=3, I-andom_state.,_ 11
OUTPUT
Accuracy 0.864 (0.038)