remove bigdl-llm test to fix langchain UT (#12613)
This commit is contained in:
		
							parent
							
								
									9e895f04ec
								
							
						
					
					
						commit
						a596f1ae5f
					
				
					 2 changed files with 14 additions and 18 deletions
				
			
		
							
								
								
									
										25
									
								
								.github/workflows/llm_unit_tests.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								.github/workflows/llm_unit_tests.yml
									
									
									
									
										vendored
									
									
								
							| 
						 | 
					@ -212,39 +212,39 @@ jobs:
 | 
				
			||||||
          fi
 | 
					          fi
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Run LLM cli test (Linux)
 | 
					      - name: Run LLM cli test (Linux)
 | 
				
			||||||
        if: runner.os == 'Linux' 
 | 
					        if: runner.os == 'Linux'
 | 
				
			||||||
        uses: ./.github/actions/llm/cli-test-linux
 | 
					        uses: ./.github/actions/llm/cli-test-linux
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Setup Python Path
 | 
					      - name: Setup Python Path
 | 
				
			||||||
        if: runner.os == 'Windows' 
 | 
					        if: runner.os == 'Windows'
 | 
				
			||||||
        shell: bash
 | 
					        shell: bash
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
          # Get Python interpreter path
 | 
					          # Get Python interpreter path
 | 
				
			||||||
          python_path=$(python -c 'import sys; print(sys.executable)')
 | 
					          python_path=$(python -c 'import sys; print(sys.executable)')
 | 
				
			||||||
          python_dir=$(dirname "$python_path")
 | 
					          python_dir=$(dirname "$python_path")
 | 
				
			||||||
          scripts_dir="$python_dir/Scripts"
 | 
					          scripts_dir="$python_dir/Scripts"
 | 
				
			||||||
          
 | 
					
 | 
				
			||||||
          # Set environment variables
 | 
					          # Set environment variables
 | 
				
			||||||
          echo "PYTHON_DIR=$python_dir" >> $GITHUB_ENV
 | 
					          echo "PYTHON_DIR=$python_dir" >> $GITHUB_ENV
 | 
				
			||||||
          echo "SCRIPTS_DIR=$scripts_dir" >> $GITHUB_ENV
 | 
					          echo "SCRIPTS_DIR=$scripts_dir" >> $GITHUB_ENV
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Run LLM cli test (Windows)
 | 
					      - name: Run LLM cli test (Windows)
 | 
				
			||||||
        if: runner.os == 'Windows' 
 | 
					        if: runner.os == 'Windows'
 | 
				
			||||||
        shell: powershell
 | 
					        shell: powershell
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
          # Retrieve environment variables
 | 
					          # Retrieve environment variables
 | 
				
			||||||
          $pythonDir = $env:PYTHON_DIR
 | 
					          $pythonDir = $env:PYTHON_DIR
 | 
				
			||||||
          $scriptsDir = $env:SCRIPTS_DIR
 | 
					          $scriptsDir = $env:SCRIPTS_DIR
 | 
				
			||||||
          
 | 
					
 | 
				
			||||||
          # Update PATH
 | 
					          # Update PATH
 | 
				
			||||||
          $env:PATH = "$pythonDir;$scriptsDir;$env:PATH"
 | 
					          $env:PATH = "$pythonDir;$scriptsDir;$env:PATH"
 | 
				
			||||||
          
 | 
					
 | 
				
			||||||
          # Run tests
 | 
					          # Run tests
 | 
				
			||||||
          llm-cli.ps1 -t $env:THREAD_NUM -n 256 -x llama -m $env:LLAMA_INT4_CKPT_PATH -p 'Once upon a time,'
 | 
					          llm-cli.ps1 -t $env:THREAD_NUM -n 256 -x llama -m $env:LLAMA_INT4_CKPT_PATH -p 'Once upon a time,'
 | 
				
			||||||
          llm-cli.ps1 -t $env:THREAD_NUM -n 256 -x gptneox -m $env:GPTNEOX_INT4_CKPT_PATH -p 'Once upon a time,'
 | 
					          llm-cli.ps1 -t $env:THREAD_NUM -n 256 -x gptneox -m $env:GPTNEOX_INT4_CKPT_PATH -p 'Once upon a time,'
 | 
				
			||||||
          llm-cli.ps1 -t $env:THREAD_NUM -n 256 -x bloom -m $env:BLOOM_INT4_CKPT_PATH -p 'Once upon a time,'
 | 
					          llm-cli.ps1 -t $env:THREAD_NUM -n 256 -x bloom -m $env:BLOOM_INT4_CKPT_PATH -p 'Once upon a time,'
 | 
				
			||||||
          # llm-cli.ps1 -t $env:THREAD_NUM -x starcoder -m $env:STARCODER_INT4_CKPT_PATH -p 'def check_odd('
 | 
					          # llm-cli.ps1 -t $env:THREAD_NUM -x starcoder -m $env:STARCODER_INT4_CKPT_PATH -p 'def check_odd('
 | 
				
			||||||
          
 | 
					
 | 
				
			||||||
      - name: Run LLM inference test
 | 
					      - name: Run LLM inference test
 | 
				
			||||||
        shell: bash
 | 
					        shell: bash
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
| 
						 | 
					@ -399,7 +399,7 @@ jobs:
 | 
				
			||||||
            echo "Directory $VICUNA_7B_1_3_ORIGIN_PATH not found. Downloading from FTP server..."
 | 
					            echo "Directory $VICUNA_7B_1_3_ORIGIN_PATH not found. Downloading from FTP server..."
 | 
				
			||||||
            wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/llm/vicuna-7b-v1.3 -P $ORIGIN_DIR
 | 
					            wget -r -nH --no-verbose --cut-dirs=1 $LLM_FTP_URL/llm/vicuna-7b-v1.3 -P $ORIGIN_DIR
 | 
				
			||||||
          fi
 | 
					          fi
 | 
				
			||||||
          
 | 
					
 | 
				
			||||||
      - name: Run LLM inference test
 | 
					      - name: Run LLM inference test
 | 
				
			||||||
        shell: bash
 | 
					        shell: bash
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
| 
						 | 
					@ -412,7 +412,7 @@ jobs:
 | 
				
			||||||
            fi
 | 
					            fi
 | 
				
			||||||
          fi
 | 
					          fi
 | 
				
			||||||
          python -m pip install datasets librosa soundfile einops tiktoken transformers_stream_generator
 | 
					          python -m pip install datasets librosa soundfile einops tiktoken transformers_stream_generator
 | 
				
			||||||
          
 | 
					
 | 
				
			||||||
          bash python/llm/test/run-llm-inference-tests-gpu.sh
 | 
					          bash python/llm/test/run-llm-inference-tests-gpu.sh
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Run LLM example tests
 | 
					      - name: Run LLM example tests
 | 
				
			||||||
| 
						 | 
					@ -430,7 +430,7 @@ jobs:
 | 
				
			||||||
            fi
 | 
					            fi
 | 
				
			||||||
          fi
 | 
					          fi
 | 
				
			||||||
          bash python/llm/test/run-llm-example-tests-gpu.sh
 | 
					          bash python/llm/test/run-llm-example-tests-gpu.sh
 | 
				
			||||||
          
 | 
					
 | 
				
			||||||
      - name: Get Langchain version
 | 
					      - name: Get Langchain version
 | 
				
			||||||
        shell: bash
 | 
					        shell: bash
 | 
				
			||||||
        id: get_langchain_version
 | 
					        id: get_langchain_version
 | 
				
			||||||
| 
						 | 
					@ -446,7 +446,7 @@ jobs:
 | 
				
			||||||
          repository: "langchain-ai/langchain"
 | 
					          repository: "langchain-ai/langchain"
 | 
				
			||||||
          ref: ${{ join(steps.get_langchain_version.outputs.*, '\n') }}
 | 
					          ref: ${{ join(steps.get_langchain_version.outputs.*, '\n') }}
 | 
				
			||||||
          path: langchain_upstream
 | 
					          path: langchain_upstream
 | 
				
			||||||
            
 | 
					
 | 
				
			||||||
      - name: Run LLM langchain GPU test
 | 
					      - name: Run LLM langchain GPU test
 | 
				
			||||||
        shell: bash
 | 
					        shell: bash
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
| 
						 | 
					@ -462,10 +462,9 @@ jobs:
 | 
				
			||||||
            fi
 | 
					            fi
 | 
				
			||||||
          fi
 | 
					          fi
 | 
				
			||||||
          bash python/llm/test/run-llm-langchain-tests-gpu.sh
 | 
					          bash python/llm/test/run-llm-langchain-tests-gpu.sh
 | 
				
			||||||
          
 | 
					
 | 
				
			||||||
          pip install -U langchain
 | 
					          pip install -U langchain
 | 
				
			||||||
          pip install -U langchain-community
 | 
					          pip install -U langchain-community
 | 
				
			||||||
          pip install --pre --upgrade bigdl-llm[all]
 | 
					 | 
				
			||||||
          bash python/llm/test/run-langchain-upstream-tests.sh
 | 
					          bash python/llm/test/run-langchain-upstream-tests.sh
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Run LLM llamaindex GPU test
 | 
					      - name: Run LLM llamaindex GPU test
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -19,9 +19,7 @@ cp ${ANALYTICS_ZOO_ROOT}/langchain_upstream/libs/community/tests/integration_tes
 | 
				
			||||||
 | 
					
 | 
				
			||||||
source ${ANALYTICS_ZOO_ROOT}/python/llm/test/run-llm-check-function.sh
 | 
					source ${ANALYTICS_ZOO_ROOT}/python/llm/test/run-llm-check-function.sh
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pytest_check_error python -m pytest -s ${ANALYTICS_ZOO_ROOT}/langchain_upstream/test_bigdl_llm.py
 | 
					pytest_check_error python -m pytest -s ${ANALYTICS_ZOO_ROOT}/langchain_upstream/test_ipex_llm.py
 | 
				
			||||||
# disable this test temporarily
 | 
					 | 
				
			||||||
# pytest_check_error python -m pytest -s ${ANALYTICS_ZOO_ROOT}/langchain_upstream/test_ipex_llm.py
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
echo ">>> Testing LangChain upstream ipynb"
 | 
					echo ">>> Testing LangChain upstream ipynb"
 | 
				
			||||||
cp ${ANALYTICS_ZOO_ROOT}/langchain_upstream/docs/docs/integrations/llms/ipex_llm.ipynb ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.ipynb
 | 
					cp ${ANALYTICS_ZOO_ROOT}/langchain_upstream/docs/docs/integrations/llms/ipex_llm.ipynb ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.ipynb
 | 
				
			||||||
| 
						 | 
					@ -29,6 +27,5 @@ bash ./apps/ipynb2py.sh ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_examp
 | 
				
			||||||
sed -i '/^get_ipython/d' ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.py
 | 
					sed -i '/^get_ipython/d' ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.py
 | 
				
			||||||
sed -i "s,model_id=\"[^\"]*\",model_id=\"$TEST_IPEXLLM_MODEL_IDS\",g" ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.py
 | 
					sed -i "s,model_id=\"[^\"]*\",model_id=\"$TEST_IPEXLLM_MODEL_IDS\",g" ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.py
 | 
				
			||||||
sed -i 's|saved_lowbit_model_path = "./vicuna-7b-1.5-low-bit"|saved_lowbit_model_path = "./langchain_upstream/vicuna-7b-1.5-low-bit"|' ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.py
 | 
					sed -i 's|saved_lowbit_model_path = "./vicuna-7b-1.5-low-bit"|saved_lowbit_model_path = "./langchain_upstream/vicuna-7b-1.5-low-bit"|' ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.py
 | 
				
			||||||
# disable this test temporarily
 | 
					ipex_workaround_wrapper python ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.py
 | 
				
			||||||
# ipex_workaround_wrapper python ${ANALYTICS_ZOO_ROOT}/langchain_upstream/langchain_example.py
 | 
					 | 
				
			||||||
rm -rf ${ANALYTICS_ZOO_ROOT}/langchain_upstream
 | 
					rm -rf ${ANALYTICS_ZOO_ROOT}/langchain_upstream
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in a new issue