0% found this document useful (0 votes)
11 views10 pages

Assignment 7

Uploaded by

nihal
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
11 views10 pages

Assignment 7

Uploaded by

nihal
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 10

assignment-7

November 12, 2024

[2]: import numpy as np


from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, LSTM, Dense, Embedding
from tensorflow.keras.utils import to_categorical

# Sample input and target texts (replace with actual data)


input_texts = [
"Hello, how are you?",
"I am fine.",
"What is your name?",
"Nice to meet you."
]

target_texts = [
"<start> Bonjour, comment allez-vous ? <end>",
"<start> Je vais bien. <end>",
"<start> Quel est votre nom ? <end>",
"<start> Enchanté de vous rencontrer. <end>"
]

# Hyperparameters
max_encoder_seq_length = 10 # Max length of input sentences
max_decoder_seq_length = 15 # Max length of target sentences
embedding_dim = 256 # Dimension of word embeddings

# Tokenizer for source (input) language


source_tokenizer = Tokenizer()
source_tokenizer.fit_on_texts(input_texts)
input_sequences = source_tokenizer.texts_to_sequences(input_texts)
input_sequences = pad_sequences(input_sequences, maxlen=max_encoder_seq_length,␣
↪padding='post')

# Tokenizer for target language


target_tokenizer = Tokenizer()
target_tokenizer.fit_on_texts(target_texts)

1
target_sequences = target_tokenizer.texts_to_sequences(target_texts)
target_sequences = pad_sequences(target_sequences,␣
↪maxlen=max_decoder_seq_length, padding='post')

# Vocabulary sizes
num_encoder_tokens = len(source_tokenizer.word_index) + 1
num_decoder_tokens = len(target_tokenizer.word_index) + 1

# One-hot encode the target sequences for training


decoder_target_data = np.zeros((len(target_texts), max_decoder_seq_length,␣
↪num_decoder_tokens), dtype='float32')

for i, target_sequence in enumerate(target_sequences):


for t, token in enumerate(target_sequence):
if t > 0: # Offset by 1 to ignore start token
decoder_target_data[i, t - 1, token] = 1.0

# Encoder
encoder_inputs = Input(shape=(None,))
encoder_embedding = Embedding(input_dim=num_encoder_tokens,␣
↪output_dim=embedding_dim)(encoder_inputs)

encoder_lstm = LSTM(256, return_state=True)


encoder_outputs, state_h, state_c = encoder_lstm(encoder_embedding)
encoder_states = [state_h, state_c]

# Decoder
decoder_inputs = Input(shape=(None,))
decoder_embedding = Embedding(input_dim=num_decoder_tokens,␣
↪output_dim=embedding_dim)(decoder_inputs)

decoder_lstm = LSTM(256, return_sequences=True, return_state=True)


decoder_outputs, _, _ = decoder_lstm(decoder_embedding,␣
↪initial_state=encoder_states)

decoder_dense = Dense(num_decoder_tokens, activation='softmax')


decoder_outputs = decoder_dense(decoder_outputs)

# Seq2Seq Model
model = Model([encoder_inputs, decoder_inputs], decoder_outputs)

# Compile model
model.compile(optimizer='rmsprop', loss='categorical_crossentropy',␣
↪metrics=['accuracy'])

# Train the model


model.fit([input_sequences, target_sequences], decoder_target_data,
batch_size=64, epochs=100, validation_split=0.2)

2
# Inference Encoder model
encoder_model = Model(encoder_inputs, encoder_states)

# Inference Decoder model


decoder_state_input_h = Input(shape=(256,))
decoder_state_input_c = Input(shape=(256,))
decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]

decoder_lstm_outputs, state_h, state_c = decoder_lstm(decoder_embedding,␣


↪initial_state=decoder_states_inputs)

decoder_states = [state_h, state_c]


decoder_outputs = decoder_dense(decoder_lstm_outputs)
decoder_model = Model([decoder_inputs] + decoder_states_inputs,␣
↪[decoder_outputs] + decoder_states)

# Function to generate translations


def decode_sequence(input_seq):
states_value = encoder_model.predict(input_seq)
target_seq = np.zeros((1, 1))
target_seq[0, 0] = target_tokenizer.word_index['<start>']

stop_condition = False
decoded_sentence = ''

while not stop_condition:


output_tokens, h, c = decoder_model.predict([target_seq] + states_value)

sampled_token_index = np.argmax(output_tokens[0, -1, :])


sampled_word = target_tokenizer.index_word.get(sampled_token_index, '')
decoded_sentence += ' ' + sampled_word

if sampled_word == '<end>' or len(decoded_sentence) >␣


↪max_decoder_seq_length:
stop_condition = True

target_seq = np.zeros((1, 1))


target_seq[0, 0] = sampled_token_index

states_value = [h, c]

return decoded_sentence

# Testing the translation


for seq_index in range(4): # Test the first 4 sequences
input_seq = input_sequences[seq_index: seq_index + 1]
decoded_sentence = decode_sequence(input_seq)
print(f'Input sentence: {input_texts[seq_index]}')

3
print(f'Translated sentence: {decoded_sentence}')

Epoch 1/100
1/1 �������������������� 4s 4s/step -
accuracy: 0.0222 - loss: 2.6450 - val_accuracy: 0.6667 - val_loss: 2.3993
Epoch 2/100
1/1 �������������������� 0s 97ms/step -
accuracy: 0.6889 - loss: 2.3795 - val_accuracy: 0.6667 - val_loss: 1.9941
Epoch 3/100
1/1 �������������������� 0s 89ms/step -
accuracy: 0.6889 - loss: 1.9384 - val_accuracy: 0.6667 - val_loss: 1.2881
Epoch 4/100
1/1 �������������������� 0s 89ms/step -
accuracy: 0.6889 - loss: 1.1903 - val_accuracy: 0.6667 - val_loss: 1.1691
Epoch 5/100
1/1 �������������������� 0s 100ms/step -
accuracy: 0.6889 - loss: 1.0648 - val_accuracy: 0.6667 - val_loss: 1.3231
Epoch 6/100
1/1 �������������������� 0s 100ms/step -
accuracy: 0.6889 - loss: 1.2017 - val_accuracy: 0.6667 - val_loss: 1.1595
Epoch 7/100
1/1 �������������������� 0s 110ms/step -
accuracy: 0.6889 - loss: 1.0225 - val_accuracy: 0.6667 - val_loss: 1.2279
Epoch 8/100
1/1 �������������������� 0s 107ms/step -
accuracy: 0.6889 - loss: 1.0801 - val_accuracy: 0.6667 - val_loss: 1.1622
Epoch 9/100
1/1 �������������������� 0s 98ms/step -
accuracy: 0.6889 - loss: 0.9866 - val_accuracy: 0.6667 - val_loss: 1.1964
Epoch 10/100
1/1 �������������������� 0s 96ms/step -
accuracy: 0.6889 - loss: 1.0059 - val_accuracy: 0.6667 - val_loss: 1.1714
Epoch 11/100
1/1 �������������������� 0s 116ms/step -
accuracy: 0.6889 - loss: 0.9399 - val_accuracy: 0.6667 - val_loss: 1.1965
Epoch 12/100
1/1 �������������������� 0s 92ms/step -
accuracy: 0.6889 - loss: 0.9434 - val_accuracy: 0.6667 - val_loss: 1.1623
Epoch 13/100
1/1 �������������������� 0s 97ms/step -
accuracy: 0.6889 - loss: 0.8510 - val_accuracy: 0.6667 - val_loss: 1.2486
Epoch 14/100
1/1 �������������������� 0s 101ms/step -
accuracy: 0.6889 - loss: 0.9102 - val_accuracy: 0.7333 - val_loss: 1.1853
Epoch 15/100
1/1 �������������������� 0s 92ms/step -
accuracy: 0.7556 - loss: 0.8254 - val_accuracy: 0.6667 - val_loss: 1.3453

4
Epoch 16/100
1/1 �������������������� 0s 97ms/step -
accuracy: 0.6889 - loss: 0.9184 - val_accuracy: 0.6667 - val_loss: 1.1761
Epoch 17/100
1/1 �������������������� 0s 85ms/step -
accuracy: 0.7556 - loss: 0.7786 - val_accuracy: 0.6667 - val_loss: 1.2725
Epoch 18/100
1/1 �������������������� 0s 99ms/step -
accuracy: 0.7333 - loss: 0.8162 - val_accuracy: 0.7333 - val_loss: 1.2035
Epoch 19/100
1/1 �������������������� 0s 95ms/step -
accuracy: 0.8000 - loss: 0.7327 - val_accuracy: 0.6667 - val_loss: 1.3547
Epoch 20/100
1/1 �������������������� 0s 85ms/step -
accuracy: 0.7111 - loss: 0.8241 - val_accuracy: 0.7333 - val_loss: 1.2381
Epoch 21/100
1/1 �������������������� 0s 95ms/step -
accuracy: 0.8000 - loss: 0.7237 - val_accuracy: 0.6667 - val_loss: 1.3536
Epoch 22/100
1/1 �������������������� 0s 92ms/step -
accuracy: 0.7111 - loss: 0.7678 - val_accuracy: 0.7333 - val_loss: 1.2607
Epoch 23/100
1/1 �������������������� 0s 90ms/step -
accuracy: 0.8222 - loss: 0.6882 - val_accuracy: 0.6667 - val_loss: 1.3632
Epoch 24/100
1/1 �������������������� 0s 105ms/step -
accuracy: 0.8000 - loss: 0.7293 - val_accuracy: 0.7333 - val_loss: 1.2966
Epoch 25/100
1/1 �������������������� 0s 158ms/step -
accuracy: 0.8000 - loss: 0.6621 - val_accuracy: 0.6667 - val_loss: 1.4517
Epoch 26/100
1/1 �������������������� 0s 88ms/step -
accuracy: 0.7778 - loss: 0.7497 - val_accuracy: 0.7333 - val_loss: 1.3304
Epoch 27/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.8222 - loss: 0.6567 - val_accuracy: 0.6667 - val_loss: 1.4482
Epoch 28/100
1/1 �������������������� 0s 93ms/step -
accuracy: 0.7778 - loss: 0.7018 - val_accuracy: 0.7333 - val_loss: 1.3510
Epoch 29/100
1/1 �������������������� 0s 116ms/step -
accuracy: 0.8222 - loss: 0.6265 - val_accuracy: 0.6667 - val_loss: 1.4525
Epoch 30/100
1/1 �������������������� 0s 91ms/step -
accuracy: 0.8000 - loss: 0.6654 - val_accuracy: 0.7333 - val_loss: 1.3833
Epoch 31/100
1/1 �������������������� 0s 85ms/step -
accuracy: 0.8444 - loss: 0.6017 - val_accuracy: 0.6667 - val_loss: 1.5132

5
Epoch 32/100
1/1 �������������������� 0s 95ms/step -
accuracy: 0.8000 - loss: 0.6702 - val_accuracy: 0.7333 - val_loss: 1.4155
Epoch 33/100
1/1 �������������������� 0s 89ms/step -
accuracy: 0.8444 - loss: 0.5959 - val_accuracy: 0.6667 - val_loss: 1.5386
Epoch 34/100
1/1 �������������������� 0s 90ms/step -
accuracy: 0.8000 - loss: 0.6530 - val_accuracy: 0.7333 - val_loss: 1.4364
Epoch 35/100
1/1 �������������������� 0s 88ms/step -
accuracy: 0.8667 - loss: 0.5790 - val_accuracy: 0.6667 - val_loss: 1.5447
Epoch 36/100
1/1 �������������������� 0s 186ms/step -
accuracy: 0.8000 - loss: 0.6240 - val_accuracy: 0.7333 - val_loss: 1.4616
Epoch 37/100
1/1 �������������������� 0s 93ms/step -
accuracy: 0.8667 - loss: 0.5576 - val_accuracy: 0.6667 - val_loss: 1.5727
Epoch 38/100
1/1 �������������������� 0s 91ms/step -
accuracy: 0.8000 - loss: 0.6112 - val_accuracy: 0.7333 - val_loss: 1.4880
Epoch 39/100
1/1 �������������������� 0s 92ms/step -
accuracy: 0.8667 - loss: 0.5447 - val_accuracy: 0.6667 - val_loss: 1.5989
Epoch 40/100
1/1 �������������������� 0s 119ms/step -
accuracy: 0.8000 - loss: 0.5987 - val_accuracy: 0.7333 - val_loss: 1.5120
Epoch 41/100
1/1 �������������������� 0s 84ms/step -
accuracy: 0.8667 - loss: 0.5335 - val_accuracy: 0.6667 - val_loss: 1.6181
Epoch 42/100
1/1 �������������������� 0s 107ms/step -
accuracy: 0.8000 - loss: 0.5828 - val_accuracy: 0.7333 - val_loss: 1.5348
Epoch 43/100
1/1 �������������������� 0s 100ms/step -
accuracy: 0.8889 - loss: 0.5197 - val_accuracy: 0.6667 - val_loss: 1.6386
Epoch 44/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.8222 - loss: 0.5692 - val_accuracy: 0.7333 - val_loss: 1.5574
Epoch 45/100
1/1 �������������������� 0s 90ms/step -
accuracy: 0.8889 - loss: 0.5060 - val_accuracy: 0.6667 - val_loss: 1.6585
Epoch 46/100
1/1 �������������������� 0s 94ms/step -
accuracy: 0.8444 - loss: 0.5559 - val_accuracy: 0.7333 - val_loss: 1.5796
Epoch 47/100
1/1 �������������������� 0s 93ms/step -
accuracy: 0.8889 - loss: 0.4937 - val_accuracy: 0.6667 - val_loss: 1.6756

6
Epoch 48/100
1/1 �������������������� 0s 96ms/step -
accuracy: 0.8667 - loss: 0.5404 - val_accuracy: 0.7333 - val_loss: 1.6017
Epoch 49/100
1/1 �������������������� 0s 120ms/step -
accuracy: 0.8889 - loss: 0.4814 - val_accuracy: 0.6667 - val_loss: 1.6956
Epoch 50/100
1/1 �������������������� 0s 97ms/step -
accuracy: 0.8667 - loss: 0.5279 - val_accuracy: 0.7333 - val_loss: 1.6238
Epoch 51/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.8889 - loss: 0.4690 - val_accuracy: 0.6667 - val_loss: 1.7168
Epoch 52/100
1/1 �������������������� 0s 92ms/step -
accuracy: 0.8667 - loss: 0.5178 - val_accuracy: 0.7333 - val_loss: 1.6455
Epoch 53/100
1/1 �������������������� 0s 89ms/step -
accuracy: 0.9111 - loss: 0.4581 - val_accuracy: 0.6667 - val_loss: 1.7324
Epoch 54/100
1/1 �������������������� 0s 87ms/step -
accuracy: 0.8889 - loss: 0.5028 - val_accuracy: 0.7333 - val_loss: 1.6666
Epoch 55/100
1/1 �������������������� 0s 86ms/step -
accuracy: 0.9333 - loss: 0.4466 - val_accuracy: 0.6667 - val_loss: 1.7492
Epoch 56/100
1/1 �������������������� 0s 113ms/step -
accuracy: 0.9333 - loss: 0.4888 - val_accuracy: 0.7333 - val_loss: 1.6877
Epoch 57/100
1/1 �������������������� 0s 88ms/step -
accuracy: 0.9333 - loss: 0.4333 - val_accuracy: 0.6667 - val_loss: 1.7720
Epoch 58/100
1/1 �������������������� 0s 89ms/step -
accuracy: 0.9333 - loss: 0.4814 - val_accuracy: 0.7333 - val_loss: 1.7091
Epoch 59/100
1/1 �������������������� 0s 96ms/step -
accuracy: 0.9556 - loss: 0.4217 - val_accuracy: 0.6667 - val_loss: 1.7860
Epoch 60/100
1/1 �������������������� 0s 79ms/step -
accuracy: 0.9111 - loss: 0.4660 - val_accuracy: 0.7333 - val_loss: 1.7301
Epoch 61/100
1/1 �������������������� 0s 78ms/step -
accuracy: 0.9778 - loss: 0.4121 - val_accuracy: 0.6667 - val_loss: 1.7981
Epoch 62/100
1/1 �������������������� 0s 85ms/step -
accuracy: 0.9111 - loss: 0.4474 - val_accuracy: 0.7333 - val_loss: 1.7508
Epoch 63/100
1/1 �������������������� 0s 188ms/step -
accuracy: 0.9778 - loss: 0.3983 - val_accuracy: 0.6667 - val_loss: 1.8282

7
Epoch 64/100
1/1 �������������������� 0s 99ms/step -
accuracy: 0.9111 - loss: 0.4478 - val_accuracy: 0.7333 - val_loss: 1.7716
Epoch 65/100
1/1 �������������������� 0s 88ms/step -
accuracy: 0.9778 - loss: 0.3848 - val_accuracy: 0.6667 - val_loss: 1.8443
Epoch 66/100
1/1 �������������������� 0s 85ms/step -
accuracy: 0.9111 - loss: 0.4379 - val_accuracy: 0.7333 - val_loss: 1.7917
Epoch 67/100
1/1 �������������������� 0s 76ms/step -
accuracy: 0.9778 - loss: 0.3802 - val_accuracy: 0.6667 - val_loss: 1.8370
Epoch 68/100
1/1 �������������������� 0s 80ms/step -
accuracy: 0.9111 - loss: 0.3989 - val_accuracy: 0.7333 - val_loss: 1.8133
Epoch 69/100
1/1 �������������������� 0s 119ms/step -
accuracy: 0.9778 - loss: 0.3669 - val_accuracy: 0.6667 - val_loss: 1.8613
Epoch 70/100
1/1 �������������������� 0s 89ms/step -
accuracy: 0.9111 - loss: 0.3896 - val_accuracy: 0.7333 - val_loss: 1.8313
Epoch 71/100
1/1 �������������������� 0s 84ms/step -
accuracy: 0.9556 - loss: 0.3449 - val_accuracy: 0.6667 - val_loss: 1.9529
Epoch 72/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.9111 - loss: 0.4615 - val_accuracy: 0.7333 - val_loss: 1.8520
Epoch 73/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.9556 - loss: 0.3478 - val_accuracy: 0.6667 - val_loss: 1.8818
Epoch 74/100
1/1 �������������������� 0s 94ms/step -
accuracy: 0.9111 - loss: 0.3624 - val_accuracy: 0.7333 - val_loss: 1.8711
Epoch 75/100
1/1 �������������������� 0s 116ms/step -
accuracy: 0.9556 - loss: 0.3388 - val_accuracy: 0.6667 - val_loss: 1.8886
Epoch 76/100
1/1 �������������������� 0s 100ms/step -
accuracy: 0.9111 - loss: 0.3344 - val_accuracy: 0.7333 - val_loss: 1.8884
Epoch 77/100
1/1 �������������������� 0s 80ms/step -
accuracy: 0.9556 - loss: 0.3127 - val_accuracy: 0.7333 - val_loss: 1.9118
Epoch 78/100
1/1 �������������������� 0s 66ms/step -
accuracy: 0.9111 - loss: 0.3160 - val_accuracy: 0.7333 - val_loss: 1.9095
Epoch 79/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.9556 - loss: 0.2934 - val_accuracy: 0.6667 - val_loss: 1.9631

8
Epoch 80/100
1/1 �������������������� 0s 107ms/step -
accuracy: 0.9111 - loss: 0.3328 - val_accuracy: 0.7333 - val_loss: 1.9353
Epoch 81/100
1/1 �������������������� 0s 82ms/step -
accuracy: 0.9556 - loss: 0.2861 - val_accuracy: 0.6667 - val_loss: 2.0600
Epoch 82/100
1/1 �������������������� 0s 186ms/step -
accuracy: 0.9333 - loss: 0.4247 - val_accuracy: 0.7333 - val_loss: 1.9564
Epoch 83/100
1/1 �������������������� 0s 77ms/step -
accuracy: 0.9556 - loss: 0.3260 - val_accuracy: 0.6667 - val_loss: 1.9843
Epoch 84/100
1/1 �������������������� 0s 101ms/step -
accuracy: 0.9333 - loss: 0.3400 - val_accuracy: 0.7333 - val_loss: 1.9483
Epoch 85/100
1/1 �������������������� 0s 110ms/step -
accuracy: 0.9556 - loss: 0.2867 - val_accuracy: 0.7333 - val_loss: 1.9480
Epoch 86/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.9778 - loss: 0.2728 - val_accuracy: 0.7333 - val_loss: 1.9561
Epoch 87/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.9556 - loss: 0.2493 - val_accuracy: 0.7333 - val_loss: 1.9710
Epoch 88/100
1/1 �������������������� 0s 92ms/step -
accuracy: 0.9556 - loss: 0.2444 - val_accuracy: 0.7333 - val_loss: 1.9790
Epoch 89/100
1/1 �������������������� 0s 77ms/step -
accuracy: 0.9556 - loss: 0.2322 - val_accuracy: 0.7333 - val_loss: 1.9958
Epoch 90/100
1/1 �������������������� 0s 126ms/step -
accuracy: 0.9556 - loss: 0.2317 - val_accuracy: 0.7333 - val_loss: 1.9999
Epoch 91/100
1/1 �������������������� 0s 84ms/step -
accuracy: 0.9556 - loss: 0.2161 - val_accuracy: 0.7333 - val_loss: 2.0321
Epoch 92/100
1/1 �������������������� 0s 79ms/step -
accuracy: 0.9778 - loss: 0.2395 - val_accuracy: 0.7333 - val_loss: 2.0224
Epoch 93/100
1/1 �������������������� 0s 86ms/step -
accuracy: 0.9556 - loss: 0.2001 - val_accuracy: 0.6667 - val_loss: 2.1264
Epoch 94/100
1/1 �������������������� 0s 113ms/step -
accuracy: 1.0000 - loss: 0.3454 - val_accuracy: 0.7333 - val_loss: 2.0443
Epoch 95/100
1/1 �������������������� 0s 84ms/step -
accuracy: 0.9556 - loss: 0.2320 - val_accuracy: 0.6667 - val_loss: 2.0787

9
Epoch 96/100
1/1 �������������������� 0s 81ms/step -
accuracy: 0.9333 - loss: 0.2522 - val_accuracy: 0.7333 - val_loss: 2.0682
Epoch 97/100
1/1 �������������������� 0s 104ms/step -
accuracy: 0.9556 - loss: 0.2448 - val_accuracy: 0.6667 - val_loss: 2.0979
Epoch 98/100
1/1 �������������������� 0s 109ms/step -
accuracy: 0.9333 - loss: 0.2592 - val_accuracy: 0.7333 - val_loss: 2.0606
Epoch 99/100
1/1 �������������������� 0s 114ms/step -
accuracy: 0.9556 - loss: 0.2108 - val_accuracy: 0.7333 - val_loss: 2.0659
Epoch 100/100
1/1 �������������������� 0s 83ms/step -
accuracy: 0.9778 - loss: 0.2031 - val_accuracy: 0.7333 - val_loss: 2.0635
1/1 �������������������� 0s 230ms/step

---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
Cell In[2], line 119
117 for seq_index in range(4): # Test the first 4 sequences
118 input_seq = input_sequences[seq_index: seq_index + 1]
--> 119 decoded_sentence = decode_sequence(input_seq)
120 print(f'Input sentence: {input_texts[seq_index]}')
121 print(f'Translated sentence: {decoded_sentence}')

Cell In[2], line 94, in decode_sequence(input_seq)


92 states_value = encoder_model.predict(input_seq)
93 target_seq = np.zeros((1, 1))
---> 94 target_seq[0, 0] = target_tokenizer.word_index['<start>']
96 stop_condition = False
97 decoded_sentence = ''

KeyError: '<start>'

[ ]:

10

You might also like