From 7ac690a95847a29d8420abfe4548835cadf0b747 Mon Sep 17 00:00:00 2001 From: Sadra Barikbin Date: Wed, 4 Sep 2024 19:36:45 +0330 Subject: [PATCH] Update ignite/distributed/utils.py Co-authored-by: vfdev --- ignite/distributed/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ignite/distributed/utils.py b/ignite/distributed/utils.py index ee1449133b3..d03e265d6ae 100644 --- a/ignite/distributed/utils.py +++ b/ignite/distributed/utils.py @@ -356,10 +356,10 @@ def all_reduce( def all_gather_tensors_with_shapes( tensor: torch.Tensor, shapes: Sequence[Sequence[int]], group: Optional[Union[Any, List[int]]] = None ) -> List[torch.Tensor]: - """Gather tensors of possibly different shapes but with the same number of dimensions from across processes. + """Gather tensors of possibly different shapes but with the same number of dimensions across processes. This function gets the shapes of participating tensors as input so you should know them beforehand. If your tensors - are of different number of dimensions or you don't know their shapes beforehand, you could use + are of different number of dimensions or you don't know their shapes beforehand, you can use `torch.distributed.all_gather_object()`, otherwise this method is quite faster. Examples: