Learn practical skills, build real-world projects, and advance your career
import jovian
jovian.commit()
[jovian] Saving notebook..
%matplotlib inline
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim

torch.manual_seed(1)
<torch._C.Generator at 0x107ca2f10>
word_to_ix = {"hello": 0, "world": 1}
embeds = nn.Embedding(2, 5)  # 2 words in vocab, 5 dimensional embeddings
lookup_tensor = torch.tensor([word_to_ix["hello"]], dtype=torch.long)
hello_embed = embeds(lookup_tensor)
print(hello_embed)
tensor([[ 0.6614, 0.2669, 0.0617, 0.6213, -0.4519]], grad_fn=<EmbeddingBackward>)