mosaicml-mpt-7b-bnb-8bit-smashed / custom_embedding.py
sharpenb's picture
ef3db1afaaa550d55129cf82f22c217f212673ccb394ead070121bf74de5efb7
c51cf91 verified
raw
history blame
292 Bytes
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
class SharedEmbedding(nn.Embedding):
def forward(self, input: Tensor, unembed: bool=False) -> Tensor:
if unembed:
return F.linear(input, self.weight)
return super().forward(input)