7474 libswresample-dev
7575 libswscale-dev
7676 pciutils
77+ python3-dev
7778 TORCH_INDEX : ' --pre --index-url https://download.pytorch.org/whl/nightly/xpu'
7879 AGENT_TOOLSDIRECTORY : /tmp/xpu-tool
7980
@@ -154,8 +155,12 @@ jobs:
154155 env :
155156 PYTORCH_DEBUG_XPU_FALLBACK : ' 1'
156157 TRANSFORMERS_TEST_DEVICE_SPEC : ' spec.py'
157- # enable pytest parallel run, and continue others if meets crash case such as segmentation fault
158- PYTEST_ADDOPTS : -rsf --timeout 600 --timeout_method=thread --dist worksteal ${{ needs.prepare.outputs.pytest_extra_args }}
158+ # Usage of `--dist loadfile` is a must as HF tests has complex setups including
159+ # setUpClass and @first_run clauses. So 'loadfile' stratagy allows to minimize
160+ # race conditions scope. Besides, that's how HF Transformers recommend to run
161+ # tests and how they run them in their own CI.
162+ # See: https://github.com/huggingface/transformers/blob/v4.56.2/CONTRIBUTING.md?plain=1#L312
163+ PYTEST_ADDOPTS : -rsf --timeout 600 --timeout_method=thread --dist loadfile ${{ needs.prepare.outputs.pytest_extra_args }}
159164 strategy :
160165 fail-fast : false
161166 max-parallel : 1
@@ -224,21 +229,9 @@ jobs:
224229 fi
225230 - name : Prepare OS environment
226231 run : |
227- # as jobs might run in parallel on the same system, apt-get might
228- # step into the lock hold by other job
229- start_time=$SECONDS
230- while ! sudo apt-get update; do
231- sleep 1;
232- if (( $SECONDS - start_time > 60 )); then false; fi
233- done
234- while ! sudo apt-get install -y $PACKAGES; do
235- sleep 1;
236- if (( $SECONDS - start_time > 60 )); then false; fi
237- done
238- while ! git lfs install; do
239- sleep 1;
240- if (( $SECONDS - start_time > 60 )); then false; fi
241- done
232+ sudo apt-get update
233+ sudo apt-get install -y $PACKAGES
234+ git lfs install
242235 - name : Setup python-${{ env.python }}
243236 uses : actions/setup-python@v5
244237 with :
@@ -250,12 +243,17 @@ jobs:
250243 pip install -U pip wheel setuptools
251244 - name : Prepare pytorch and deps
252245 run : |
253- pip install junitparser
254246 pip install $TORCH_INDEX \
255247 torch==${{ needs.prepare.outputs.torch }} \
256248 torchvision==${{ needs.prepare.outputs.torchvision }} \
257249 torchaudio==${{ needs.prepare.outputs.torchaudio }} \
258250 pytorch-triton-xpu==${{needs.prepare.outputs.triton }}
251+ pip install \
252+ junitparser \
253+ pytest \
254+ pytest-timeout \
255+ pytest-xdist \
256+ pytest-shard
259257 - name : Prepare Transformers
260258 run : |
261259 pwd
@@ -281,8 +279,6 @@ jobs:
281279 xpu-smi discovery -y --json --dump -1
282280 - name : Sanity check installed packages
283281 run : |
284- # Use latest pytest
285- pip install -U pytest pytest-timeout pytest-xdist pytest-shard
286282 # These checks are to exit earlier if for any reason Transformers
287283 # reinstalled torch packages back to CUDA versions (not expected).
288284 pip show torch | grep Version | grep xpu
0 commit comments