1- import pytest
21import sys
2+
3+ import pytest
34from mpi4py import MPI
45
56from .algo import partition , lower_bound
1112
1213def mark_skip (item ):
1314 comm = MPI .COMM_WORLD
14- n_rank = comm .Get_size ()
15+ n_rank = comm .size
1516 n_proc_test = get_n_proc_for_test (item )
1617 skip_msg = f"Not enough procs to execute: { n_proc_test } required but only { n_rank } available"
1718 item .add_marker (pytest .mark .skip (reason = skip_msg ), append = False )
@@ -38,8 +39,7 @@ def create_sub_comm_of_size(global_comm, n_proc, mpi_comm_creation_function):
3839 assert 0 , 'Unknown MPI communicator creation function. Available: `MPI_Comm_create`, `MPI_Comm_split`'
3940
4041def create_sub_comms_for_each_size (global_comm , mpi_comm_creation_function ):
41- i_rank = global_comm .Get_rank ()
42- n_rank = global_comm .Get_size ()
42+ n_rank = global_comm .size
4343 sub_comms = [None ] * n_rank
4444 for i in range (0 ,n_rank ):
4545 n_proc = i + 1
@@ -48,8 +48,7 @@ def create_sub_comms_for_each_size(global_comm, mpi_comm_creation_function):
4848
4949
5050def add_sub_comm (items , global_comm , test_comm_creation , mpi_comm_creation_function ):
51- i_rank = global_comm .Get_rank ()
52- n_rank = global_comm .Get_size ()
51+ n_rank = global_comm .size
5352
5453 # Strategy 'by_rank': create one sub-communicator by size, from sequential (size=1) to n_rank
5554 if test_comm_creation == 'by_rank' :
@@ -109,7 +108,7 @@ def pytest_runtestloop(self, session) -> bool:
109108 _ = yield
110109 # prevent return value being non-zero (ExitCode.NO_TESTS_COLLECTED)
111110 # when no test run on non-master
112- if self .global_comm .Get_rank () != 0 and session .testscollected == 0 :
111+ if self .global_comm .rank != 0 and session .testscollected == 0 :
113112 session .testscollected = 1
114113 return True
115114
@@ -132,7 +131,7 @@ def pytest_runtest_logreport(self, report):
132131
133132
134133def prepare_items_to_run (items , comm ):
135- i_rank = comm .Get_rank ()
134+ i_rank = comm .rank
136135
137136 items_to_run = []
138137
@@ -164,7 +163,7 @@ def prepare_items_to_run(items, comm):
164163
165164
166165def items_to_run_on_this_proc (items_by_steps , items_to_skip , comm ):
167- i_rank = comm .Get_rank ()
166+ i_rank = comm .rank
168167
169168 items = []
170169
@@ -207,7 +206,7 @@ def pytest_runtestloop(self, session) -> bool:
207206 if session .config .option .collectonly :
208207 return True
209208
210- n_workers = self .global_comm .Get_size ()
209+ n_workers = self .global_comm .size
211210
212211 add_n_procs (session .items )
213212
@@ -217,12 +216,12 @@ def pytest_runtestloop(self, session) -> bool:
217216 items_by_steps , items_to_skip , self .global_comm
218217 )
219218
220- for i , item in enumerate ( items ) :
219+ for item in items :
221220 nextitem = None
222221 run_item_test (item , nextitem , session )
223222
224223 # prevent return value being non-zero (ExitCode.NO_TESTS_COLLECTED) when no test run on non-master
225- if self .global_comm .Get_rank () != 0 and session .testscollected == 0 :
224+ if self .global_comm .rank != 0 and session .testscollected == 0 :
226225 session .testscollected = 1
227226 return True
228227
@@ -244,8 +243,8 @@ def pytest_runtest_logreport(self, report):
244243 gather_report_on_local_rank_0 (report )
245244
246245 # master ranks of each sub_comm must send their report to rank 0
247- if sub_comm .Get_rank () == 0 : # only master are concerned
248- if self .global_comm .Get_rank () != 0 : # if master is not global master, send
246+ if sub_comm .rank == 0 : # only master are concerned
247+ if self .global_comm .rank != 0 : # if master is not global master, send
249248 self .global_comm .send (report , dest = 0 )
250249 elif report .master_running_proc != 0 : # else, recv if test run remotely
251250 # In the line below, MPI.ANY_TAG will NOT clash with communications outside the framework because self.global_comm is private
@@ -342,7 +341,7 @@ def wait_test_to_complete(items_to_run, session, available_procs, inter_comm):
342341 for sub_rank in sub_ranks :
343342 if sub_rank != first_rank_done :
344343 rank_original_idx = inter_comm .recv (source = sub_rank , tag = WORK_DONE_TAG )
345- assert ( rank_original_idx == original_idx ) # sub_rank is supposed to have worked on the same test
344+ assert rank_original_idx == original_idx # sub_rank is supposed to have worked on the same test
346345
347346 # the procs are now available
348347 for sub_rank in sub_ranks :
@@ -499,7 +498,7 @@ def pytest_runtest_logreport(self, report):
499498 sub_comm = report .sub_comm
500499 gather_report_on_local_rank_0 (report )
501500
502- if sub_comm .Get_rank () == 0 : # if local master proc, send
501+ if sub_comm .rank == 0 : # if local master proc, send
503502 # The idea of the scheduler is the following:
504503 # The server schedules test over clients
505504 # A client executes the test then report to the server it is done
0 commit comments