From 86055d76d59132d2ecad3c7d7024823b548fd103 Mon Sep 17 00:00:00 2001 From: "Chen, Zhentao" Date: Thu, 25 Jan 2024 16:39:05 +0800 Subject: [PATCH] fix optimize_model not working (#9995) --- python/llm/dev/benchmark/harness/bigdl_llm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/llm/dev/benchmark/harness/bigdl_llm.py b/python/llm/dev/benchmark/harness/bigdl_llm.py index f14d680a..b370301e 100644 --- a/python/llm/dev/benchmark/harness/bigdl_llm.py +++ b/python/llm/dev/benchmark/harness/bigdl_llm.py @@ -47,8 +47,8 @@ class BigDLLM(AutoCausalLM): if k not in self.AutoCausalLM_ARGS: self.bigdl_llm_kwargs[k] = kwargs.pop(k) - self.bigdl_llm_kwargs['use_cache'] = kwargs.get('use_cache', True) - self.bigdl_llm_kwargs['optimize_model'] = kwargs.get('optimize_model', True) + self.bigdl_llm_kwargs['use_cache'] = self.bigdl_llm_kwargs.get('use_cache', True) + self.bigdl_llm_kwargs['optimize_model'] = self.bigdl_llm_kwargs.get('optimize_model', True) AutoModelForCausalLM.from_pretrained = partial(AutoModelForCausalLM.from_pretrained, **self.bigdl_llm_kwargs) kwargs['trust_remote_code'] = kwargs.get('trust_remote_code', True)