File tree Expand file tree Collapse file tree 1 file changed +0
-3
lines changed Expand file tree Collapse file tree 1 file changed +0
-3
lines changed Original file line number Diff line number Diff line change 44import torch .distributed as dist
55import torch .distributed ._symmetric_memory as symm_mem
66from torch .testing ._internal .common_distributed import MultiProcessTestCase
7- from torch .testing ._internal .common_distributed import skip_if_lt_x_gpu
87from torch .testing ._internal .common_utils import instantiate_parametrized_tests
98from torch .testing ._internal .common_utils import run_tests
109
@@ -43,7 +42,6 @@ def _init_process(self):
4342 )
4443 torch .manual_seed (42 + self .rank )
4544
46- @skip_if_lt_x_gpu (4 )
4745 def test_all_gather_matmul (self ):
4846 self ._init_process ()
4947
@@ -100,7 +98,6 @@ def test_all_gather_matmul(self):
10098 torch .cuda .current_stream ().wait_stream (backend_stream )
10199 dist .destroy_process_group ()
102100
103- @skip_if_lt_x_gpu (4 )
104101 def test_all_reduce (self ):
105102 self ._init_process ()
106103
You can’t perform that action at this time.
0 commit comments