for i in range(BATCH_SIZE): s_len = seqlen_1d[i] mask[i, 0:s_len] = 1 mask = torch.FloatTensor(mask).to(self.device)