From cde977c3666043ee177c2722e23a67c111706438 Mon Sep 17 00:00:00 2001 From: Ning Dong Date: Tue, 17 Dec 2019 17:33:19 -0800 Subject: [PATCH] NAT bug fix Summary: Remove a duplicate operation. Line 1838 is doing the same thing. Earlier max_iter > 1 inference is broken internally. This fixes it, though we don't observe much performance gain for max_iter > 1. And the performance max_iter=1 doesn't change. Reviewed By: kahne Differential Revision: D19145640 fbshipit-source-id: d16ac37cfa2b77b5d25e2bd4d304c4847e71f6d1 --- pytorch_translate/ensemble_export.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pytorch_translate/ensemble_export.py b/pytorch_translate/ensemble_export.py index 36d5818e..d394af08 100644 --- a/pytorch_translate/ensemble_export.py +++ b/pytorch_translate/ensemble_export.py @@ -1838,7 +1838,6 @@ def generate(self, models, src_tokens, src_lengths, prefix_tokens=None): sent_idxs = script_skip_tensor(sent_idxs, not_terminated) prev_output_tokens = prev_decoder_out.output_tokens.clone() - sent_idxs = sent_idxs[not_terminated] return ( finalized_tokens_list,