Practical 5
Practical 5
IMPORTING LIBRARIES
In [3]: plt.axes(projection='3d')
plt.plot(Data_Set[:,0],Data_Set[:,1],Data_Set[:,2],'.')
Out[6]: array([[0.41848923],
[0.32338244],
[0.43007395]])
EVALUTING HYPOTHESIS(predicted y)
In [7]: #hpothesis
h = np.dot((W.T), x_Train)
h
In [8]: #SE = square error = array of square errors of every indivisual data-point
SE = ((h - y_Train) ** 2)
print(SE)
SUM OF SQUARE-ERROR
Out[9]: 4917023281209.861
Out[10]: 70243189731.56944
FINAL THETAS
Out[14]: array([[159.96456673],
[159.86945994],
[159.97615145]])
VALIDATION
VALIDATION GRAPH
TESTING
In [18]: #Testing
h_Test = np.dot(W.T,x_Test)
loss_Test = h_Test - y_Test
loss_Test
TESTING GRAPH
In [20]: plt.axes(projection='3d')
plt.plot(Data_Set[:,0],Data_Set[:,1],Data_Set[:,2],'.',color = 'green')
plt.title('Data-Points')
ax = fig.add_subplot(111,projection='3d')
plt.xlabel('X1(Feature-1)')
plt.ylabel('X2(Feature-2)')
#Final Hyper-plane
a = W[1]
b = -1
c = W[2]
d = -(W[0])
x = np.linspace(-100,100,10)
y = np.linspace(-100,100,10)
X,Y = np.meshgrid(x,y)
Z = (d + a*X + b*Y) / c
fig = plt.figure()
ax = fig.add_subplot(111,projection='3d')
surf = ax.plot_surface(X, Z, Y , label = 'oooo')
ax.view_init(30,160)
plt.title('Final Hyper-Plane')
plt.xlabel('X1(Feature-1)')
plt.ylabel('X2(Feature-2)')