From 493a1d29dcbe43e786752c344292b1de9f2fb74b Mon Sep 17 00:00:00 2001 From: Yih-Dar <2521628+ydshieh@users.noreply.github.com> Date: Fri, 10 Mar 2023 21:54:38 +0100 Subject: [PATCH] Revert "[GPT2] Propose fix for #21080 (#21853)" This reverts commit a3fef89b2694fac4dd642a3f77d3e96d0c3df82a. --- .../modeling_decision_transformer.py | 9 +------- src/transformers/models/gpt2/modeling_gpt2.py | 9 +------- tests/models/gpt2/test_modeling_gpt2.py | 21 ------------------- 3 files changed, 2 insertions(+), 37 deletions(-) diff --git a/src/transformers/models/decision_transformer/modeling_decision_transformer.py b/src/transformers/models/decision_transformer/modeling_decision_transformer.py index d489be4f2d3fd8..0add713d8e878e 100755 --- a/src/transformers/models/decision_transformer/modeling_decision_transformer.py +++ b/src/transformers/models/decision_transformer/modeling_decision_transformer.py @@ -553,14 +553,7 @@ def forward( past_key_values = tuple([None] * len(self.h)) else: past_length = past_key_values[0][0].size(-2) - - if attention_mask is not None and len(attention_mask.shape) == 2 and position_ids is None: - # create position_ids on the fly for batch generation - position_ids = attention_mask.long().cumsum(-1) - 1 - position_ids.masked_fill_(attention_mask == 0, 1) - if past_length > 0: - position_ids = position_ids[:, past_length : input_shape[-1] + past_length :] - elif position_ids is None: + if position_ids is None: position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device) position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1]) diff --git a/src/transformers/models/gpt2/modeling_gpt2.py b/src/transformers/models/gpt2/modeling_gpt2.py index 03a1a3c02e0bd5..bf665143148075 100644 --- a/src/transformers/models/gpt2/modeling_gpt2.py +++ b/src/transformers/models/gpt2/modeling_gpt2.py @@ -797,14 +797,7 @@ def forward( past_key_values = tuple([None] * len(self.h)) else: past_length = past_key_values[0][0].size(-2) - - if attention_mask is not None and len(attention_mask.shape) == 2 and position_ids is None: - # create position_ids on the fly for batch generation - position_ids = attention_mask.long().cumsum(-1) - 1 - position_ids.masked_fill_(attention_mask == 0, 1) - if past_length > 0: - position_ids = position_ids[:, past_length : input_shape[-1] + past_length :] - elif position_ids is None: + if position_ids is None: position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device) position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1]) diff --git a/tests/models/gpt2/test_modeling_gpt2.py b/tests/models/gpt2/test_modeling_gpt2.py index be5445525b4e9a..09d828fd7f33a9 100644 --- a/tests/models/gpt2/test_modeling_gpt2.py +++ b/tests/models/gpt2/test_modeling_gpt2.py @@ -590,27 +590,6 @@ def test_batch_generation(self): self.assertTrue(batch_out_sentence_tt != batch_out_sentence) # token_type_ids should change output self.assertListEqual(expected_output_sentence, [non_padded_sentence, padded_sentence]) - @slow - def test_batch_forward(self): - tokenizer = GPT2Tokenizer.from_pretrained("gpt2") - tokenizer.padding_side = "left" - - # This tokenizer has no pad token, so we have to set it in some way - # Define PAD Token = EOS Token = 50256 - tokenizer.pad_token = tokenizer.eos_token - - model = GPT2LMHeadModel.from_pretrained("gpt2", pad_token_id=tokenizer.eos_token_id) - sentences = ["Hello, my dog is a little bit of a mess. I'm not sure if he's"] - inputs = tokenizer(sentences, padding=True, return_tensors="pt") - logits = model(**inputs).logits[:, -1, :] - indexes = torch.argmax(logits).item() - - inputs_padded = tokenizer(sentences, padding="max_length", max_length=30, return_tensors="pt") - logits_padded = model(**inputs_padded).logits[:, -1, :] - indexes_padded = torch.argmax(logits_padded).item() - - self.assertTrue(indexes == indexes_padded) - @slow def test_batch_generation_2heads(self): model = GPT2DoubleHeadsModel.from_pretrained("gpt2")