Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
baptistecolle committed Jan 7, 2025
1 parent 9585870 commit d8d6f64
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 4 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/debug-dind.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
group: gcp-ct5lp-hightpu-8t
container:
image: us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:r2.4.0_3.10_tpuvm
options: --shm-size "16gb" --ipc host --privileged ${{ vars.V5_LITEPOD_8_ENV}} -v /mnt/hf_cache:/mnt/hf_cache -e PJRT_DEVICE=TPU
options: --shm-size "16gb" --ipc host --privileged ${{ vars.V5_LITEPOD_8_ENV}} --network test-network -v /mnt/hf_cache:/mnt/hf_cache -e PJRT_DEVICE=TPU

steps:
- name: Checkout code
Expand All @@ -33,7 +33,7 @@ jobs:
run: |
# Run the whoami container with environment variables
# @pauline adding --ipc host, --privileged and/or --network host did not help me. I also tried to use different ports 80, 8080, 5001
docker run ${{ vars.V5_LITEPOD_8_ENV}} -d --name network-test -p 5001:80 traefik/whoami
docker run ${{ vars.V5_LITEPOD_8_ENV}} -d --name network-test --network test-network -p 5001:80 traefik/whoami
# Wait for container to be ready
sleep 5
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/test-pytorch-xla-tpu-tgi-integration.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name: Optimum TPU / Test TGI on TPU / Integration Tests

on:
# push:
push:
pull_request:
branches: [ main ]
paths:
Expand All @@ -20,7 +20,7 @@ jobs:

container:
image: us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:r2.4.0_3.10_tpuvm
options: --shm-size "16gb" --ipc host --privileged ${{ vars.V5_LITEPOD_8_ENV}} -v /mnt/hf_cache:/mnt/hf_cache -e PJRT_DEVICE=TPU
options: --shm-size "16gb" --ipc host --privileged ${{ vars.V5_LITEPOD_8_ENV}} --network test-network -v /mnt/hf_cache:/mnt/hf_cache -e PJRT_DEVICE=TPU

env:
PJRT_DEVICE: TPU
Expand Down Expand Up @@ -49,6 +49,7 @@ jobs:
run: |
# Start docker container in background
docker run -d --name tgi-tests-gpt2 \
--network test-network \
-e LOG_LEVEL=info,text_generation_router,text_generation_launcher=debug \
-e HF_HUB_ENABLE_HF_TRANSFER=0 \
-e MAX_BATCH_SIZE=4 \
Expand Down

0 comments on commit d8d6f64

Please sign in to comment.