Skip to content

Commit

Permalink
Fix test_randperm_device_compatibility for 1 GPU (#59484) (#59502)
Browse files Browse the repository at this point in the history
Summary:
Do not try to create tensors on 2nd device if device_count() == 1

Fixes #{issue number}

Pull Request resolved: #59484

Reviewed By: ngimel

Differential Revision: D28910673

Pulled By: malfet

fbshipit-source-id: e3517f31a463dd049ce8a5155409b7b716c8df18
  • Loading branch information
malfet authored Jun 5, 2021
1 parent abe996a commit bc446f6
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions test/test_tensor_creation_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -3331,20 +3331,21 @@ def test_randperm_device_compatibility(self, device):
# n=0 is a special case that we don't need to use generator, thus no error even if
# device and generator don't match
torch.randperm(0, device='cuda:0', generator=torch.Generator(device='cuda:1'))
torch.randperm(0, device='cuda:1', generator=torch.Generator(device='cuda:0'))
if torch.cuda.device_count() > 1:
torch.randperm(0, device='cuda:1', generator=torch.Generator(device='cuda:0'))
torch.randperm(0, device='cuda', generator=torch.Generator(device='cpu'))
torch.randperm(0, device='cpu', generator=torch.Generator(device='cuda'))

for n in (1, 3, 100, 30000):
torch.randperm(n, device='cuda', generator=torch.Generator(device='cuda:0'))
torch.randperm(n, device='cuda:0', generator=torch.Generator(device='cuda'))
# For cuda:0 to match cuda:1, we are making consistent device type matching
# behavior just like torch.randint. Longer term, generator should ignore
# device ordinal, since it's not used anyway.
torch.randint(low=0, high=n + 1, size=(1,), device="cuda:0", generator=torch.Generator(device='cuda:1'))
torch.randperm(n, device='cuda:0', generator=torch.Generator(device='cuda:1'))
if torch.cuda.device_count() > 1:
# For cuda:0 to match cuda:1, we are making consistent device type matching
# behavior just like torch.randint. Longer term, generator should ignore
# device ordinal, since it's not used anyway.
torch.randint(low=0, high=n + 1, size=(1,), device="cuda:0", generator=torch.Generator(device='cuda:1'))
torch.randint(low=0, high=n + 1, size=(1,), device="cuda:1", generator=torch.Generator(device='cuda:0'))
torch.randperm(n, device='cuda:0', generator=torch.Generator(device='cuda:1'))
torch.randperm(n, device='cuda:1', generator=torch.Generator(device='cuda:0'))

regex = 'Expected a .* device type for generator but found .*'
Expand Down

0 comments on commit bc446f6

Please sign in to comment.