Do not init process group if already initialized

Signed-off-by: Antoni Baum <antoni.baum@protonmail.com>
This commit is contained in:
Antoni Baum 2023-05-31 22:17:01 +00:00
parent db2ebe3947
commit 1809159aff

View File

@ -2,6 +2,7 @@ import os
import torch import torch
from datetime import timedelta from datetime import timedelta
from loguru import logger
def initialize_torch_distributed(): def initialize_torch_distributed():
@ -23,13 +24,16 @@ def initialize_torch_distributed():
backend = "gloo" backend = "gloo"
options = None options = None
# Call the init process. if not torch.distributed.is_initialized():
torch.distributed.init_process_group( # Call the init process.
backend=backend, torch.distributed.init_process_group(
world_size=world_size, backend=backend,
rank=rank, world_size=world_size,
timeout=timedelta(seconds=60), rank=rank,
pg_options=options, timeout=timedelta(seconds=60),
) pg_options=options,
)
else:
logger.warning("torch.distributed is already initialized.")
return torch.distributed.group.WORLD, rank, world_size return torch.distributed.group.WORLD, rank, world_size