diff --git a/scripts/distribute/ci_case_auto.sh b/scripts/distribute/ci_case_auto.sh index 74cd6a3a944a..8707289b7fe2 100755 --- a/scripts/distribute/ci_case_auto.sh +++ b/scripts/distribute/ci_case_auto.sh @@ -1854,8 +1854,6 @@ function before_hook_for_gpt() { if [[ $FLAGS_install_deps == 0 ]];then echo -e "\033[31m ---- Install requirements for GPT auto cases \033[0m" python -m pip install -r requirements.txt --force-reinstall - python -m pip install --no-cache-dir https://paddlenlp.bj.bcebos.com/wheels/paddlenlp-ci-py3-none-any.whl --force-reinstall --no-dependencies - python -c "import paddlenlp; print('paddlenlp commit:',paddlenlp.version.commit)"; else echo -e "\033[31m ---- Skip install requirements for GPT auto cases \033[0m" fi @@ -1886,6 +1884,8 @@ function before_hook_for_llama() { env | grep FLAGS export http_proxy=${proxy} export https_proxy=${proxy} + python -m pip install -r $root_path/requirements.txt + python -m pip install -r $root_path/requirements-dev.txt if [[ ! $FLAGS_download_data =~ "llama" ]];then echo -e "\033[31m ---- Download LLaMA data \033[0m" rm -rf data diff --git a/scripts/distribute/ci_case_dy.sh b/scripts/distribute/ci_case_dy.sh index 23f5b026b9f9..eea1dab79290 100644 --- a/scripts/distribute/ci_case_dy.sh +++ b/scripts/distribute/ci_case_dy.sh @@ -515,8 +515,6 @@ function before_hook_for_gpt() { if [[ $FLAGS_install_deps == 0 ]];then echo -e "\033[31m ---- Install requirements for GPT dygraph cases \033[0m" python -m pip install -r requirements.txt --force-reinstall - python -m pip install --no-cache-dir https://paddlenlp.bj.bcebos.com/wheels/paddlenlp-ci-py3-none-any.whl --force-reinstall --no-dependencies - python -c "import paddlenlp; print('paddlenlp commit:',paddlenlp.version.commit)"; else echo -e "\033[31m ---- Skip install requirements for GPT dygraph cases \033[0m" fi @@ -614,7 +612,7 @@ function before_hook_for_llm_gpt() { export http_proxy=${proxy} export https_proxy=${proxy} python -m pip install -r $root_path/requirements.txt - python -m pip install regex + python -m pip install -r $root_path/requirements-dev.txt if [[ ! $FLAGS_download_data =~ "llm_gpt" ]];then echo -e "\033[31m ---- Download llm GPT data \033[0m" rm -rf data diff --git a/scripts/distribute/run_ci.sh b/scripts/distribute/run_ci.sh index a987e9608e73..5b586f7402f3 100644 --- a/scripts/distribute/run_ci.sh +++ b/scripts/distribute/run_ci.sh @@ -43,25 +43,24 @@ install_paddle(){ } install_paddlenlp(){ - echo -e "\033[31m ---- Install paddlenlp \033" - cd ${nlp_dir} + echo -e "\033[31m ---- Install paddlenlp by set PYTHONPATH \033" + export PYTHONPATH=${nlp_dir}:$PYTHONPATH sed -i -e "s/paddlenlp/#paddlenlp/g" model_zoo/gpt-3/requirements.txt - export http_proxy=${proxy} && export https_proxy=${proxy} - python -m pip uninstall paddlenlp -y - rm -rf build/ && rm -rf paddlenlp.egg-info/ && rm -rf dist/ - python -m pip install --ignore-installed -r requirements.txt - python -m pip install --ignore-installed -r requirements-dev.txt - python setup.py install - python setup.py build_ext - python setup.py bdist_wheel - unset http_proxy && unset https_proxy - cd - + # export http_proxy=${proxy} && export https_proxy=${proxy} + # python -m pip uninstall paddlenlp -y + # rm -rf build/ && rm -rf paddlenlp.egg-info/ && rm -rf dist/ + # python -m pip install --ignore-installed -r requirements.txt + # python -m pip install --ignore-installed -r requirements-dev.txt + # python setup.py install + # python setup.py build_ext + # python setup.py bdist_wheel + # unset http_proxy && unset https_proxy + # cd - python -c "import paddlenlp; print('paddlenlp commit:',paddlenlp.version.commit)"; } #################################### get_diff_TO_case(){ cd ${nlp_dir} -export FLAGS_paddlenlp=0 for file_name in `git diff --numstat upstream/${AGILE_COMPILE_BRANCH} |awk '{print $NF}'`;do arr_file_name=(${file_name//// }) dir1=${arr_file_name[0]} @@ -70,9 +69,6 @@ for file_name in `git diff --numstat upstream/${AGILE_COMPILE_BRANCH} |awk '{pri dir4=${arr_file_name[3]} file_item=$dir1/$dir2/$dir3/$dir4 echo "file_name:"${file_name}, "path:"${file_item} - if [[ ${dir1} =~ "paddlenlp" ]];then - export FLAGS_paddlenlp=1 - fi if [ ! -f ${file_name} ];then # 针对pr删掉文件 continue elif [[ ${file_name##*.} == "md" ]] || [[ ${file_name##*.} == "rst" ]] || [[ ${dir1} == "docs" ]];then @@ -129,10 +125,9 @@ if [[ ${#case_list[*]} -ne 0 ]];then # Install paddle install_paddle - if [[ FLAGS_paddlenlp -eq 1 ]] || [[ $(contain_case llama_auto ${case_list[@]}; echo $?) -eq 1 ]];then - # 安装本地paddlenlp - install_paddlenlp - fi + # Install paddlenlp + install_paddlenlp + case_num=1 export FLAGS_install_deps=0 export FLAGS_download_data=""