We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 64aea56 commit bfeb06aCopy full SHA for bfeb06a
apex/transformer/utils.py
@@ -8,7 +8,8 @@
8
# The following 4 lines are for backward comparability with
9
# older PyTorch.
10
if "all_gather_into_tensor" not in dir(torch.distributed):
11
- assert torch.distributed.is_available(), "PyTorch Distributed is Not available or Disabled."
+ if not torch.distributed.is_available():
12
+ raise RuntimeError("PyTorch Distributed is Not available or Disabled.")
13
torch.distributed.all_gather_into_tensor = torch.distributed._all_gather_base
14
15
def ensure_divisibility(numerator, denominator):
0 commit comments