import torch
[docs]class TokenEmbedding(torch.nn.Module):
"""
Token Embedding
It can be embedding matrix, language model (ELMo), neural machine translation model (CoVe) and features.
* Args:
vocab: Vocab (rqa.tokens.vocab)
"""
def __init__(self, vocab):
super(TokenEmbedding, self).__init__()
self.vocab = vocab
[docs] def forward(self, tokens):
""" embedding look-up """
raise NotImplementedError
[docs] def get_output_dim(self):
""" get embedding dimension """
raise NotImplementedError
[docs] def get_vocab_size(self):
return len(self.vocab)