DL Lab PRGS
DL Lab PRGS
import numpy as np
frommatplotlib import pyplot as plt
import pandas as pd
frommatplotlib.colors import ListedColormap
1
self.errors.append(error)
return self
# Net Input method for summing the given matrix inputs and their corresponding weights.
defnet_input(self, x):
return np.dot(x, self.weights[1:]) + self.weights[0]
2
markers = ('s', 'x', 'o', '^', 'v')
colors = ('red', 'blue', 'lightgreen', 'gray', 'cyan')
cmap = ListedColormap(colors[:len(np.unique(y))])
# plot the decision surface
x1_min, x1_max = X[:, 0].min() - 1, X[:, 0].max() + 1
x2_min, x2_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, resolution),
np.arange(x2_min, x2_max, resolution))
Z = classifier.predict(np.array([xx1.ravel(), xx2.ravel()]).T)
Z = Z.reshape(xx1.shape)
plt.contourf(xx1, xx2, Z, alpha=0.4, cmap=cmap)
plt.xlim(xx1.min(), xx1.max())
plt.ylim(xx2.min(), xx2.max())
# plot class samples
foridx, cl in enumerate(np.unique(y)):
plt.scatter(x=X[y == cl, 0], y=X[y == cl, 1],
alpha=0.8, c=cmap(idx),
marker=markers[idx], label=cl)
Task2
# sigmoid function
import numpy as np
frommatplotlib import pyplot
def sig(x):
return 1/(1 + np.exp(-x))
ip=[x for x in range(-100,100)]
op=[sig(x) for x in ip]
3
pyplot.plot(ip,op)
pyplot.show()
#RELU FUNCTION
frommatplotlib import pyplot
def rectified(x):
return max(0.0,x)
ip=[x for x in range(-100,100)]
op=[rectified(x) for x in ip]
pyplot.plot(ip,op)
pyplot.show()
#TANH FUNCTION
from math import exp
frommatplotlib import pyplot
defmytanh(x):
return ( exp(x)-exp(-x)/exp(x)+exp(-x) )
ip=[x for x in range(-100,100)]
op=[mytanh(x) for x in ip]
pyplot.plot(ip,op)
pyplot.show()
#SOFTMAX FUNCTION
import numpy as np
frommatplotlib import pyplot
defsoftmax(x):
return (np.exp(x)/np.sum(np.exp(x)))
ip=[12.0,13.0,14.0]
op=softmax(ip)
print(op)
4
pyplot.plot(ip,op)
pyplot.show()
print(np.sum(op))