Fix C-Eval ChatGLM loading issue (#10206)
* Add c-eval workflow and modify running files * Modify the chatglm evaluator file * Modify the ceval workflow for triggering test * Modify the ceval workflow file * Modify the ceval workflow file * Modify ceval workflow * Adjust the ceval dataset download * Add ceval workflow dependencies * Modify ceval workflow dataset download * Add ceval test dependencies * Add ceval test dependencies * Correct the result print * Fix the nightly test trigger time * Fix ChatGLM loading issue
This commit is contained in:
parent
94cb16fe40
commit
7cbc2429a6
1 changed files with 2 additions and 2 deletions
|
|
@ -22,7 +22,7 @@ from thefuzz import process
|
||||||
from transformers import AutoTokenizer
|
from transformers import AutoTokenizer
|
||||||
|
|
||||||
from evaluators.evaluator import Evaluator
|
from evaluators.evaluator import Evaluator
|
||||||
from bigdl.llm.transformers import AutoModelForCausalLM
|
from bigdl.llm.transformers import AutoModel
|
||||||
from transformers.generation.utils import LogitsProcessorList
|
from transformers.generation.utils import LogitsProcessorList
|
||||||
from transformers.generation.logits_process import LogitsProcessor
|
from transformers.generation.logits_process import LogitsProcessor
|
||||||
|
|
||||||
|
|
@ -42,7 +42,7 @@ class ChatGLMEvaluator(Evaluator):
|
||||||
self.model_path,
|
self.model_path,
|
||||||
trust_remote_code=True
|
trust_remote_code=True
|
||||||
)
|
)
|
||||||
self.model = AutoModelForCausalLM.from_pretrained(
|
self.model = AutoModel.from_pretrained(
|
||||||
self.model_path,
|
self.model_path,
|
||||||
load_in_low_bit=self.qtype,
|
load_in_low_bit=self.qtype,
|
||||||
optimize_model=True,
|
optimize_model=True,
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue