MLP Pytorch Softmax Crossentr
MLP Pytorch Softmax Crossentr
%load_ext watermark
%watermark -a 'Sebastian Raschka' -v -p torch
torch: 1.9.0a0+d819a21
Imports
import matplotlib.pyplot as plt
import pandas as pd
import torch
%matplotlib inline
import time
import numpy as np
from torchvision import datasets
from torchvision import transforms
from torch.utils.data import DataLoader
import torch.nn.functional as F
import torch
RANDOM_SEED = 1
BATCH_SIZE = 100
NUM_EPOCHS = 100
DEVICE = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
##########################
### MNIST DATASET
##########################
test_dataset = datasets.MNIST(root='data',
train=False,
transform=transforms.ToTensor())
train_loader = DataLoader(dataset=train_dataset,
batch_size=BATCH_SIZE,
shuffle=True)
test_loader = DataLoader(dataset=test_dataset,
batch_size=BATCH_SIZE,
shuffle=False)
Model
class MLP(torch.nn.Module):
self.num_classes = num_classes
#################################
### Model Initialization
#################################
torch.manual_seed(RANDOM_SEED)
model = MLP(num_features=28*28,
num_hidden=100,
num_classes=10)
model = model.to(DEVICE)
#################################
### Training
#################################
start_time = time.time()
minibatch_cost = []
epoch_cost = []
for epoch in range(NUM_EPOCHS):
model.train()
for batch_idx, (features, targets) in enumerate(train_loader):
cost.backward()
plt.plot(range(len(minibatch_cost)), minibatch_cost)
plt.ylabel('Cross Entropy')
plt.xlabel('Minibatch')
plt.show()
plt.plot(range(len(epoch_cost)), epoch_cost)
plt.ylabel('Cross Entropy')
plt.xlabel('Epoch')
plt.show()