Learn practical skills, build real-world projects, and advance your career
import jovian
jovian.commit()
[jovian] Saving notebook..
[jovian] Updating notebook "6c34a9c414cb4a96847b96e91f21eecb" on https://jvn.io [jovian] Uploading notebook.. [jovian] Capturing environment.. [jovian] Committed successfully! https://jvn.io/rubensilver/6c34a9c414cb4a96847b96e91f21eecb
%matplotlib inline
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim

torch.manual_seed(1)
<torch._C.Generator at 0x7f9565d28bd0>
word_to_ix = {"hello": 0, "world": 1}
embeds = nn.Embedding(2, 5)  # 2 words in vocab, 5 dimensional embeddings
lookup_tensor = torch.tensor([word_to_ix["hello"]], dtype=torch.long)
hello_embed = embeds(lookup_tensor)
print(hello_embed)
tensor([[ 0.6614, 0.2669, 0.0617, 0.6213, -0.4519]], grad_fn=<EmbeddingBackward>)