Skip to content

Commit

Permalink
change current_device position
Browse files Browse the repository at this point in the history
  • Loading branch information
hellozmz committed Mar 30, 2024
1 parent f4e4d4a commit f510bd4
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions dipu/tests/python/individual_scripts/test_rt_ddp.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,9 @@ def forward(self, x):
def demo_basic_ddp(rank, world_size, port):
import torch_dipu

print(f"Running basic DDP example on rank {rank} {torch.cuda.current_device()}")
# print(f"Running basic DDP example on rank {rank} {torch.cuda.current_device()}")
torch.cuda.set_device(rank)
print(f"Running basic DDP example on rank {rank} {torch.cuda.current_device()}")
backend = "nccl"
dev1 = rank

Expand Down Expand Up @@ -100,8 +101,9 @@ def demo_basic_ddp(rank, world_size, port):
def demo_allreduce(rank, world_size, port):
import torch_dipu

print(f"Running basic DDP example on rank {rank} {torch.cuda.current_device()}")
# print(f"Running basic DDP example on rank {rank} {torch.cuda.current_device()}")
torch.cuda.set_device(rank)
print(f"Running basic DDP example on rank {rank} {torch.cuda.current_device()}")
dev1 = rank

setup(rank, world_size, port)
Expand Down Expand Up @@ -151,7 +153,7 @@ def demo_allgather(rank, world_size, port):
import torch_dipu

setup(rank, world_size, port)

print(f'rank={rank}')
src1 = torch.ones((2, 4)).to(rank)
dests = torch.zeros((world_size * 2, 4)).to(rank)
dests = [
Expand Down

0 comments on commit f510bd4

Please sign in to comment.