# %% # code by Tae Hwan Jung @graykode import numpy as np import torch import torch.nn as nn import torch.optim as optim import matplotlib.pyplot as plt
word_sequence = " ".join(sentences).split() word_list = " ".join(sentences).split() word_list = list(set(word_list)) word_dict = {w: i for i, w inenumerate(word_list)} voc_size = len(word_list)
# Make skip gram of one size window skip_grams = [] for i inrange(1, len(word_sequence) - 1): target = word_dict[word_sequence[i]] context = [word_dict[word_sequence[i - 1]], word_dict[word_sequence[i + 1]]] for w in context: skip_grams.append([target, w])