From d766fef0570f37e619838a1fb9eb1cd8fea99906 Mon Sep 17 00:00:00 2001 From: Yunnglin Date: Thu, 26 Dec 2024 17:48:15 +0800 Subject: [PATCH] fix predict --- evalscope/config.py | 4 +++- evalscope/models/model_adapter.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/evalscope/config.py b/evalscope/config.py index 749193e..f696427 100644 --- a/evalscope/config.py +++ b/evalscope/config.py @@ -114,7 +114,9 @@ def from_json(json_file: str): def from_args(args: Namespace): # Convert Namespace to a dictionary and filter out None values args_dict = {k: v for k, v in vars(args).items() if v is not None} - del args_dict['func'] # Note: compat CLI arguments + + if 'func' in args_dict: + del args_dict['func'] # Note: compat CLI arguments return TaskConfig.from_dict(args_dict) diff --git a/evalscope/models/model_adapter.py b/evalscope/models/model_adapter.py index d52bdf7..a85fc63 100644 --- a/evalscope/models/model_adapter.py +++ b/evalscope/models/model_adapter.py @@ -429,7 +429,7 @@ def _model_generate(self, query: str, infer_cfg: dict) -> str: fix_do_sample_warning(self.generation_config) # Run inference - output_ids = self.model.generate(**inputs, generation_config=self.generation_config) + output_ids = self.model.generate(input_ids, generation_config=self.generation_config) response = self.tokenizer.decode(output_ids[0, len(input_ids[0]):], skip_special_tokens=True) return response