doctest_mpi.h (7174B)
1 #pragma once 2 3 #ifdef DOCTEST_CONFIG_IMPLEMENT 4 5 #include "doctest/extensions/mpi_sub_comm.h" 6 #include "mpi_reporter.h" 7 #include <unordered_map> 8 9 namespace doctest { 10 11 // Each time a MPI_TEST_CASE is executed on N procs, 12 // we need a sub-communicator of N procs to execute it. 13 // It is then registered here and can be re-used 14 // by other tests that requires a sub-comm of the same size 15 std::unordered_map<int,mpi_sub_comm> sub_comms_by_size; 16 17 // Record if at least one MPI_TEST_CASE was registered "skipped" 18 // because there is not enought procs to execute it 19 int nb_test_cases_skipped_insufficient_procs = 0; 20 21 22 std::string thread_level_to_string(int thread_lvl); 23 int mpi_init_thread(int argc, char *argv[], int required_thread_support); 24 void mpi_finalize(); 25 26 27 // Can be safely called before MPI_Init() 28 // This is needed for MPI_TEST_CASE because we use doctest::skip() 29 // to prevent execution of tests where there is not enough procs, 30 // but doctest::skip() is called during test registration, that is, before main(), and hence before MPI_Init() 31 int mpi_comm_world_size() { 32 #if defined(OPEN_MPI) 33 const char* size_str = std::getenv("OMPI_COMM_WORLD_SIZE"); 34 #elif defined(I_MPI_VERSION) || defined(MPI_VERSION) // Intel MPI + MPICH (at least) 35 const char* size_str = std::getenv("PMI_SIZE"); // see https://community.intel.com/t5/Intel-oneAPI-HPC-Toolkit/Environment-variables-defined-by-intel-mpirun/td-p/1096703 36 #else 37 #error "Unknown MPI implementation: please submit an issue or a PR to doctest. Meanwhile, you can look at the output of e.g. `mpirun -np 3 env` to search for an environnement variable that contains the size of MPI_COMM_WORLD and extend this code accordingly" 38 #endif 39 if (size_str==nullptr) return 1; // not launched with mpirun/mpiexec, so assume only one process 40 return std::stoi(size_str); 41 } 42 43 // Record size of MPI_COMM_WORLD with mpi_comm_world_size() 44 int world_size_before_init = mpi_comm_world_size(); 45 46 47 std::string thread_level_to_string(int thread_lvl) { 48 switch (thread_lvl) { 49 case MPI_THREAD_SINGLE: return "MPI_THREAD_SINGLE"; 50 case MPI_THREAD_FUNNELED: return "MPI_THREAD_FUNNELED"; 51 case MPI_THREAD_SERIALIZED: return "MPI_THREAD_SERIALIZED"; 52 case MPI_THREAD_MULTIPLE: return "MPI_THREAD_MULTIPLE"; 53 default: return "Invalid MPI thread level"; 54 } 55 } 56 int mpi_init_thread(int argc, char *argv[], int required_thread_support) { 57 int provided_thread_support; 58 MPI_Init_thread(&argc, &argv, required_thread_support, &provided_thread_support); 59 60 int world_size; 61 MPI_Comm_size(MPI_COMM_WORLD,&world_size); 62 if (world_size_before_init != world_size) { 63 DOCTEST_INTERNAL_ERROR( 64 "doctest found "+std::to_string(world_size_before_init)+" MPI processes before `MPI_Init_thread`," 65 " but MPI_COMM_WORLD is actually of size "+std::to_string(world_size)+".\n" 66 "This is most likely due to your MPI implementation not being well supported by doctest. Please report this issue on GitHub" 67 ); 68 } 69 70 if (provided_thread_support!=required_thread_support) { 71 std::cout << 72 "WARNING: " + thread_level_to_string(required_thread_support) + " was asked, " 73 + "but only " + thread_level_to_string(provided_thread_support) + " is provided by the MPI library\n"; 74 } 75 return provided_thread_support; 76 } 77 void mpi_finalize() { 78 // We need to destroy all created sub-communicators before calling MPI_Finalize() 79 doctest::sub_comms_by_size.clear(); 80 MPI_Finalize(); 81 } 82 83 } // doctest 84 85 #else // DOCTEST_CONFIG_IMPLEMENT 86 87 #include "doctest/extensions/mpi_sub_comm.h" 88 #include <unordered_map> 89 #include <exception> 90 91 namespace doctest { 92 93 extern std::unordered_map<int,mpi_sub_comm> sub_comms_by_size; 94 extern int nb_test_cases_skipped_insufficient_procs; 95 extern int world_size_before_init; 96 int mpi_comm_world_size(); 97 98 int mpi_init_thread(int argc, char *argv[], int required_thread_support); 99 void mpi_finalize(); 100 101 template<int nb_procs, class F> 102 void execute_mpi_test_case(F func) { 103 auto it = sub_comms_by_size.find(nb_procs); 104 if (it==end(sub_comms_by_size)) { 105 bool was_emplaced = false; 106 std::tie(it,was_emplaced) = sub_comms_by_size.emplace(std::make_pair(nb_procs,mpi_sub_comm(nb_procs))); 107 assert(was_emplaced); 108 } 109 const mpi_sub_comm& sub = it->second; 110 if (sub.comm != MPI_COMM_NULL) { 111 func(sub.rank,nb_procs,sub.comm,std::integral_constant<int,nb_procs>{}); 112 }; 113 } 114 115 inline bool 116 insufficient_procs(int test_nb_procs) { 117 static const int world_size = mpi_comm_world_size(); 118 bool insufficient = test_nb_procs>world_size; 119 if (insufficient) { 120 ++nb_test_cases_skipped_insufficient_procs; 121 } 122 return insufficient; 123 } 124 125 } // doctest 126 127 128 #define DOCTEST_MPI_GEN_ASSERTION(rank_to_test, assertion, ...) \ 129 static_assert(rank_to_test<test_nb_procs_as_int_constant.value,"Trying to assert on a rank greater than the number of procs of the test!"); \ 130 if(rank_to_test == test_rank) assertion(__VA_ARGS__) 131 132 #define DOCTEST_MPI_WARN(rank_to_test, ...) DOCTEST_MPI_GEN_ASSERTION(rank_to_test,DOCTEST_WARN,__VA_ARGS__) 133 #define DOCTEST_MPI_CHECK(rank_to_test, ...) DOCTEST_MPI_GEN_ASSERTION(rank_to_test,DOCTEST_CHECK,__VA_ARGS__) 134 #define DOCTEST_MPI_REQUIRE(rank_to_test, ...) DOCTEST_MPI_GEN_ASSERTION(rank_to_test,DOCTEST_REQUIRE,__VA_ARGS__) 135 #define DOCTEST_MPI_WARN_FALSE(rank_to_test, ...) DOCTEST_MPI_GEN_ASSERTION(rank_to_test,DOCTEST_WARN_FALSE,__VA_ARGS__) 136 #define DOCTEST_MPI_CHECK_FALSE(rank_to_test, ...) DOCTEST_MPI_GEN_ASSERTION(rank_to_test,DOCTEST_CHECK_FALSE,__VA_ARGS__) 137 #define DOCTEST_MPI_REQUIRE_FALSE(rank_to_test, ...) DOCTEST_MPI_GEN_ASSERTION(rank_to_test,DOCTEST_REQUIRE_FALSE,__VA_ARGS__) 138 139 #define DOCTEST_CREATE_MPI_TEST_CASE(name,nb_procs,func) \ 140 static void func(DOCTEST_UNUSED int test_rank, DOCTEST_UNUSED int test_nb_procs, DOCTEST_UNUSED MPI_Comm test_comm, DOCTEST_UNUSED std::integral_constant<int,nb_procs>); \ 141 TEST_CASE(name * doctest::description("MPI_TEST_CASE") * doctest::skip(doctest::insufficient_procs(nb_procs))) { \ 142 doctest::execute_mpi_test_case<nb_procs>(func); \ 143 } \ 144 static void func(DOCTEST_UNUSED int test_rank, DOCTEST_UNUSED int test_nb_procs, DOCTEST_UNUSED MPI_Comm test_comm, DOCTEST_UNUSED std::integral_constant<int,nb_procs> test_nb_procs_as_int_constant) 145 // DOC: test_rank, test_nb_procs, and test_comm are available UNDER THESE SPECIFIC NAMES in the body of the unit test 146 // DOC: test_nb_procs_as_int_constant is equal to test_nb_procs, but as a compile time value 147 // (used in CHECK-like macros to assert the checked rank exists) 148 149 #define DOCTEST_MPI_TEST_CASE(name,nb_procs) \ 150 DOCTEST_CREATE_MPI_TEST_CASE(name,nb_procs,DOCTEST_ANONYMOUS(DOCTEST_MPI_FUNC)) 151 152 153 // == SHORT VERSIONS OF THE MACROS 154 #if !defined(DOCTEST_CONFIG_NO_SHORT_MACRO_NAMES) 155 #define MPI_WARN DOCTEST_MPI_WARN 156 #define MPI_CHECK DOCTEST_MPI_CHECK 157 #define MPI_REQUIRE DOCTEST_MPI_REQUIRE 158 #define MPI_WARN_FALSE DOCTEST_MPI_WARN_FALSE 159 #define MPI_CHECK_FALSE DOCTEST_MPI_CHECK_FALSE 160 #define MPI_REQUIRE_FALSE DOCTEST_MPI_REQUIRE_FALSE 161 162 #define MPI_TEST_CASE DOCTEST_MPI_TEST_CASE 163 #endif // DOCTEST_CONFIG_NO_SHORT_MACRO_NAMES 164 165 166 #endif // DOCTEST_CONFIG_IMPLEMENT