RNN Text Generation
RNN Text Generation
Python Code:
class TextDataset(Dataset):
def __init__(self, encoded_text, sequence_length):
self.data = []
for i in range(len(encoded_text) - sequence_length):
input_seq = encoded_text[i:i + sequence_length]
target = encoded_text[i + sequence_length]
self.data.append((input_seq, target))
def __len__(self):
return len(self.data)
# Hyperparameters
embedding_dim = 64
hidden_dim = 128
learning_rate = 0.01
epochs = 20
# Forward pass
outputs, hidden = model(inputs, hidden)
loss = criterion(outputs, targets)
# Backward pass
optimizer.zero_grad()
loss.backward()
optimizer.step()
total_loss += loss.item()
hidden = model.init_hidden(1)
input_tensor = torch.tensor(input_seq).unsqueeze(0) # Add batch dimension
for _ in range(num_words):
with torch.no_grad():
output, hidden = model(input_tensor, hidden)
next_word_idx = torch.argmax(output, dim=1).item()
next_word = idx_to_word[next_word_idx]
return generated
# Generate text
start_sequence = "Vijay"
num_words = 20
generated_text = generate_text(model, start_sequence, num_words)
print("\nGenerated Text:")
print(generated_text)