From 7a621a4db0f1c0007e1bb8c0f2be9bab28654dce Mon Sep 17 00:00:00 2001 From: "Keyan (Kyrie) Zhang" <79576162+Zhangky11@users.noreply.github.com> Date: Fri, 8 Mar 2024 13:38:52 +0800 Subject: [PATCH] Fix device_map bug by raise an error when using device_map=xpu (#10340) * Fix device_map bug by raise an error when using device_map=xpu * Fix sync error * Fix python style * Use invalidInputError instead of invalidOperationError --- python/llm/src/bigdl/llm/transformers/model.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/python/llm/src/bigdl/llm/transformers/model.py b/python/llm/src/bigdl/llm/transformers/model.py index daa420ce..248d61a9 100644 --- a/python/llm/src/bigdl/llm/transformers/model.py +++ b/python/llm/src/bigdl/llm/transformers/model.py @@ -145,6 +145,10 @@ class _BaseAutoModelClass: invalidInputError(model_hub in ["huggingface", "modelscope"], "The parameter `model_hub` is supposed to be `huggingface` or " f"`modelscope`, but got {model_hub}.") + invalidInputError(not ('device_map' in kwargs and 'xpu' in kwargs['device_map']), + "Please do not use `device_map` " + "with `xpu` value as an argument. " + "Use model.to('xpu') instead.") if model_hub == "huggingface": config_dict, _ = PretrainedConfig.get_config_dict(pretrained_model_name_or_path) elif model_hub == "modelscope":