File size: 492 Bytes
17558f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
import torch
import torch.distributed as dist
def init_process():
dist.init_process_group(backend="nccl")
torch.cuda.set_device(dist.get_rank())
def example_reduce():
tensor = torch.tensor([dist.get_rank()] * 4, dtype=torch.float32).cuda()
print(f"Before reduce on rank {dist.get_rank()}: {tensor}")
dist.all_reduce(tensor, op=dist.ReduceOp.SUM)
print(f"After reduce on rank {dist.get_rank()}: {tensor}")
init_process()
example_reduce()
dist.destroy_process_group() |