Spaces:
Running on Zero
Running on Zero
File size: 2,593 Bytes
03022ee | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 | from typing import Optional
import torch
import torch.nn as nn
import numpy as np
class MakePadMask(nn.Module):
def __init__(self, max_seq_len=512, flip=True):
super().__init__()
if flip:
self.mask_pad = torch.Tensor(1 - np.tri(max_seq_len)).type(torch.bool)
else:
self.mask_pad = torch.Tensor(np.tri(max_seq_len)).type(torch.bool)
def forward(self, lengths, xs=None, length_dim=-1, maxlen=None):
"""Make mask tensor containing indices of padded part.
This implementation creates the same mask tensor with original make_pad_mask,
which can be converted into onnx format.
Dimension length of xs should be 2 or 3.
"""
if length_dim == 0:
raise ValueError("length_dim cannot be 0: {}".format(length_dim))
if xs is not None and len(xs.shape) == 3:
if length_dim == 1:
lengths = lengths.unsqueeze(1).expand(*xs.transpose(1, 2).shape[:2])
else:
lengths = lengths.unsqueeze(1).expand(*xs.shape[:2])
if maxlen is not None:
m = maxlen
elif xs is not None:
m = xs.shape[-1]
else:
m = torch.max(lengths)
mask = self.mask_pad[lengths - 1][..., :m].type(torch.float32)
if length_dim == 1:
return mask.transpose(1, 2)
else:
return mask
class sequence_mask(nn.Module):
def __init__(self, max_seq_len=512, flip=True):
super().__init__()
def forward(self, lengths, max_seq_len=None, dtype=torch.float32, device=None):
if max_seq_len is None:
max_seq_len = lengths.max()
row_vector = torch.arange(0, max_seq_len, 1).to(lengths.device)
matrix = torch.unsqueeze(lengths, dim=-1)
mask = row_vector < matrix
return mask.type(dtype).to(device) if device is not None else mask.type(dtype)
def normalize(
input: torch.Tensor, p: float = 2.0, dim: int = 1, out: Optional[torch.Tensor] = None
) -> torch.Tensor:
if out is None:
denom = input.norm(p, dim, keepdim=True).expand_as(input)
return input / denom
else:
denom = input.norm(p, dim, keepdim=True).expand_as(input)
return torch.div(input, denom, out=out)
def subsequent_mask(size: torch.Tensor):
return torch.ones(size, size).tril()
def MakePadMask_test():
feats_length = torch.tensor([10]).type(torch.long)
mask_fn = MakePadMask()
mask = mask_fn(feats_length)
print(mask)
if __name__ == "__main__":
MakePadMask_test()
|