From 6a87c8da4e6f009f0d00e901d53ab2548f9f7462 Mon Sep 17 00:00:00 2001 From: Konrad Gerlach Date: Fri, 10 Jan 2025 17:17:50 +0100 Subject: [PATCH] tests: make caching test more comples --- tests/test_environments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_environments.py b/tests/test_environments.py index 328fe07690..7a12bfd905 100644 --- a/tests/test_environments.py +++ b/tests/test_environments.py @@ -386,7 +386,7 @@ def test_cached_generate_batched(self): [True, True, True], past_key_values, past_attention_masks, past_input_ids ) - input_texts2 = [" short interim", " a slightly longer interim", "another interim"] + input_texts2 = [" short interim", " a somewhat longer section in between", "something else entirely! So, "] model_inputs2 = [self.gpt2_tokenizer(txt, return_tensors="pt").input_ids.squeeze() for txt in input_texts2] outputs_cached, _, _, _, _ = env._generate_batched(