Softmax Regression Scratch
Softmax Regression Scratch
Imports
%matplotlib inline
import matplotlib.pyplot as plt
import pandas as pd
import torch
import torch.nn.functional as F
Preparing a dataset
##########################
### DATASET
##########################
d = {'Iris-versicolor': 1,
'Iris-virginica': 2,
'Iris-setosa': 0,
}
df['y'] = df['y'].map(d)
torch.manual_seed(123)
shuffle_idx = torch.randperm(y.size(0), dtype=torch.long)
X, y = X[shuffle_idx], y[shuffle_idx]
percent80 = int(shuffle_idx.size(0)*0.8)
def softmax(z):
return (torch.exp(z.t()) / torch.sum(torch.exp(z), dim=1)).t()
class SoftmaxRegression1():
def __init__(self, num_features, num_classes):
self.num_features = num_features
self.num_classes = num_classes
self.weights = torch.zeros(num_classes, num_features, # h x m
dtype=torch.float32, device=DEVICE)
self.bias = torch.zeros(num_classes, dtype=torch.float32, device=DEVICE) # h
X_train = X_train.to(DEVICE)
y_train = y_train.to(DEVICE)
print('\nModel parameters:')
print(' Weights: %s' % model1.weights)
print(' Bias: %s' % model1.bias)
X_test = X_test.to(DEVICE)
y_test = y_test.to(DEVICE)
Z = Z.reshape(xx1.shape)
plt.contourf(xx1, xx2, Z, alpha=0.4, cmap=cmap)
plt.xlim(xx1.min(), xx1.max())
plt.ylim(xx2.min(), xx2.max())
plt.tight_layout()
plt.show()
High-level implementation using the nn.Module API
class SoftmaxRegression2(torch.nn.Module):
self.linear.weight.detach().zero_()
self.linear.bias.detach().zero_()
# Note: the trailing underscore
# means "in-place operation" in the context
# of PyTorch
X_train = X_train.to(DEVICE)
y_train = y_train.to(DEVICE)
X_test = X_test.to(DEVICE)
y_test = y_test.to(DEVICE)
num_epochs = 50
for epoch in range(num_epochs):
print('\nModel parameters:')
print(' Weights: %s' % model2.linear.weight)
print(' Bias: %s' % model2.linear.bias)
X_test = X_test.to(DEVICE)
y_test = y_test.to(DEVICE)
plt.tight_layout()
plt.show()
Loading [MathJax]/jax/output/CommonHTML/fonts/TeX/fontdata.js