Commit 98c4d81c authored by Mayumi Ohta's avatar Mayumi Ohta
Browse files

replace torch.div() with torch.floor_divide(); add as_tuple=False to...

replace torch.div() with torch.floor_divide(); add as_tuple=False to tensor.nonzero() [warning in torch 1.6.0]
parent e0cd39d6
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -43,7 +43,7 @@ class XentLoss(nn.Module):
        # give padding probability of 0 everywhere
        smooth_dist[:, self.pad_index] = 0
        # masking out padding area (sum of probabilities for padding area = 0)
        padding_positions = torch.nonzero(targets.data == self.pad_index)
        padding_positions = torch.nonzero(targets.data == self.pad_index, as_tuple=False)
        # pylint: disable=len-as-condition
        if len(padding_positions) > 0:
            smooth_dist.index_fill_(0, padding_positions.squeeze(), 0.0)
+3 −3
Original line number Diff line number Diff line
@@ -348,14 +348,14 @@ def beam_search(
                b = batch_offset[i]
                if end_condition[i]:
                    is_finished[i].fill_(1)
                finished_hyp = is_finished[i].nonzero().view(-1)
                finished_hyp = is_finished[i].nonzero(as_tuple=False).view(-1)
                # store finished hypotheses for this batch
                for j in finished_hyp:
                    # Check if the prediction has more than one EOS.
                    # If it has more than one EOS, it means that the
                    # prediction should have already been added to
                    # the hypotheses, so you don't have to add them again.
                    if (predictions[i, j, 1:] == eos_index).nonzero().numel() \
                    if (predictions[i, j, 1:] == eos_index).nonzero(as_tuple=False).numel() \
                            < 2:
                        # ignore start_token
                        hypotheses[b].append(
@@ -370,7 +370,7 @@ def beam_search(
                            break
                        results["scores"][b].append(score)
                        results["predictions"][b].append(pred)
            non_finished = end_condition.eq(False).nonzero().view(-1)
            non_finished = end_condition.eq(False).nonzero(as_tuple=False).view(-1)
            # if all sentences are translated, no need to go further
            # pylint: disable=len-as-condition
            if len(non_finished) == 0: