LLM: fix langchain windows failure (#8417)
This commit is contained in:
parent
6251ad8934
commit
146662bc0d
3 changed files with 19 additions and 3 deletions
4
.github/workflows/llm_unit_tests_windows.yml
vendored
4
.github/workflows/llm_unit_tests_windows.yml
vendored
|
|
@ -64,6 +64,10 @@ jobs:
|
|||
- name: Run LLM unittests
|
||||
shell: bash
|
||||
run: |
|
||||
pip install pytest pydantic
|
||||
pip install -U langchain==0.0.184
|
||||
pip install -U chromadb==0.3.25
|
||||
pip install -U typing_extensions==4.5.0
|
||||
bash python/llm/test/run-llm-windows-tests.sh
|
||||
env:
|
||||
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ class Test_Models_Basics(TestCase):
|
|||
llm = BigdlLLM(
|
||||
model_path=self.llama_model_path,
|
||||
max_tokens=32,
|
||||
n_threads=22)
|
||||
n_threads=2)
|
||||
question = "What is AI?"
|
||||
result = llm(question)
|
||||
|
||||
|
|
@ -58,7 +58,7 @@ class Test_Models_Basics(TestCase):
|
|||
model_path=self.gptneox_model_path,
|
||||
model_family="gptneox",
|
||||
max_tokens=32,
|
||||
n_threads=22)
|
||||
n_threads=2)
|
||||
question = "What is AI?"
|
||||
result = llm(question)
|
||||
|
||||
|
|
@ -67,7 +67,7 @@ class Test_Models_Basics(TestCase):
|
|||
model_path=self.bloom_model_path,
|
||||
model_family="bloom",
|
||||
max_tokens=32,
|
||||
n_threads=22)
|
||||
n_threads=2)
|
||||
question = "What is AI?"
|
||||
result = llm(question)
|
||||
|
||||
|
|
|
|||
|
|
@ -44,3 +44,15 @@ time=$((now-start))
|
|||
|
||||
echo "Bigdl-llm inference test finished"
|
||||
echo "Time used:$time seconds"
|
||||
|
||||
|
||||
echo "# Start testing langchain"
|
||||
start=$(date "+%s")
|
||||
|
||||
python -m pytest -s ${ANALYTICS_ZOO_ROOT}/python/llm/test/langchain/test_langchain.py -k 'test_langchain_llm_bloom'
|
||||
|
||||
now=$(date "+%s")
|
||||
time=$((now-start))
|
||||
|
||||
echo "Bigdl-llm langchain test finished"
|
||||
echo "Time used:$time seconds"
|
||||
|
|
|
|||
Loading…
Reference in a new issue