File size: 583 Bytes
51adf3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import torch
import torch.distributed as dist

def init_process():
    dist.init_process_group(backend="nccl")
    torch.cuda.set_device(dist.get_rank())

def example_broadcast():
    if dist.get_rank() == 0:
        tensor = torch.tensor([1, 2, 3, 4], dtype=torch.float32).cuda()
    else:
        tensor = torch.zeros(4, dtype=torch.float32).cuda()

    print(f"Before broadcast on rank {dist.get_rank()}: {tensor}")
    dist.broadcast(tensor, src=0)
    print(f"After broadcast on rank {dist.get_rank()}: {tensor}")

init_process()
example_broadcast()
dist.destroy_process_group()