From fc299dfda2fb4ecdd510c717057ae15475540a13 Mon Sep 17 00:00:00 2001 From: "Hugo U. R. Strand" Date: Wed, 28 Sep 2022 11:29:27 +0300 Subject: [PATCH 1/5] [mpi] replace nullptr with MPI_COMM_NULL --- src/pomerol/TwoParticleGFContainer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pomerol/TwoParticleGFContainer.cpp b/src/pomerol/TwoParticleGFContainer.cpp index 6c7511b4..c6adf74b 100644 --- a/src/pomerol/TwoParticleGFContainer.cpp +++ b/src/pomerol/TwoParticleGFContainer.cpp @@ -82,7 +82,7 @@ TwoParticleGFContainer::computeAll_split(bool clearTerms, FreqVec const& freqs, MPI_Barrier(comm); int comp = 0; - MPI_Comm comm_split = nullptr; + MPI_Comm comm_split = MPI_COMM_NULL; MPI_Comm_split(comm, proc_colors[comm_rank], comm_rank, &comm_split); for(auto iter = NonTrivialElements.begin(); iter != NonTrivialElements.end(); iter++, comp++) { From e97b20370536c4138fec4c55a5c865403e0dbfd2 Mon Sep 17 00:00:00 2001 From: "Hugo U. R. Strand" Date: Wed, 28 Sep 2022 11:41:50 +0300 Subject: [PATCH 2/5] [mpi] MPI_CXX_DOUBLE_COMPLEX -> MPI_C_DOUBLE_COMPLEX --- src/pomerol/Hamiltonian.cpp | 4 ++-- src/pomerol/TwoParticleGF.cpp | 2 +- src/pomerol/TwoParticleGFContainer.cpp | 4 ++-- src/pomerol/TwoParticleGFPart.cpp | 6 +++--- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/pomerol/Hamiltonian.cpp b/src/pomerol/Hamiltonian.cpp index 81690f99..48871efb 100644 --- a/src/pomerol/Hamiltonian.cpp +++ b/src/pomerol/Hamiltonian.cpp @@ -42,7 +42,7 @@ template void Hamiltonian::prepareImpl(LOperatorTypeRC const& HOp, M std::map job_map = skel.run(comm, false); MPI_Barrier(comm); - MPI_Datatype H_dt = C ? MPI_CXX_DOUBLE_COMPLEX : MPI_DOUBLE; + MPI_Datatype H_dt = C ? MPI_C_DOUBLE_COMPLEX : MPI_DOUBLE; for(int p = 0; p < static_cast(parts.size()); ++p) { auto& part = parts[p]; @@ -77,7 +77,7 @@ template void Hamiltonian::computeImpl(MPI_Comm const& comm) { // Start distributing data MPI_Barrier(comm); - MPI_Datatype H_dt = C ? MPI_CXX_DOUBLE_COMPLEX : MPI_DOUBLE; + MPI_Datatype H_dt = C ? MPI_C_DOUBLE_COMPLEX : MPI_DOUBLE; for(int p = 0; p < static_cast(parts.size()); ++p) { auto& part = parts[p]; auto& H = part.getMatrix(); diff --git a/src/pomerol/TwoParticleGF.cpp b/src/pomerol/TwoParticleGF.cpp index 3fbdce86..7f4192a9 100644 --- a/src/pomerol/TwoParticleGF.cpp +++ b/src/pomerol/TwoParticleGF.cpp @@ -185,7 +185,7 @@ std::vector TwoParticleGF::compute(bool clear, FreqVec const& freqs MPI_Allreduce(MPI_IN_PLACE, m_data.data(), static_cast(m_data.size()), - MPI_CXX_DOUBLE_COMPLEX, + MPI_C_DOUBLE_COMPLEX, MPI_SUM, comm); diff --git a/src/pomerol/TwoParticleGFContainer.cpp b/src/pomerol/TwoParticleGFContainer.cpp index c6adf74b..91e9fb9f 100644 --- a/src/pomerol/TwoParticleGFContainer.cpp +++ b/src/pomerol/TwoParticleGFContainer.cpp @@ -108,11 +108,11 @@ TwoParticleGFContainer::computeAll_split(bool clearTerms, FreqVec const& freqs, freq_data = storage[iter->first]; freq_data_size = static_cast(freq_data.size()); MPI_Bcast(&freq_data_size, 1, MPI_LONG, sender, comm); - MPI_Bcast(freq_data.data(), freq_data_size, MPI_CXX_DOUBLE_COMPLEX, sender, comm); + MPI_Bcast(freq_data.data(), freq_data_size, MPI_C_DOUBLE_COMPLEX, sender, comm); } else { MPI_Bcast(&freq_data_size, 1, MPI_LONG, sender, comm); freq_data.resize(freq_data_size); - MPI_Bcast(freq_data.data(), freq_data_size, MPI_CXX_DOUBLE_COMPLEX, sender, comm); + MPI_Bcast(freq_data.data(), freq_data_size, MPI_C_DOUBLE_COMPLEX, sender, comm); } out[iter->first] = freq_data; diff --git a/src/pomerol/TwoParticleGFPart.cpp b/src/pomerol/TwoParticleGFPart.cpp index 47a284cc..5c99f498 100644 --- a/src/pomerol/TwoParticleGFPart.cpp +++ b/src/pomerol/TwoParticleGFPart.cpp @@ -87,7 +87,7 @@ MPI_Datatype TwoParticleGFPart::NonResonantTerm::mpi_datatype() { offsetof(NonResonantTerm, Weight)}; // NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays) MPI_Datatype types[] = { - MPI_CXX_DOUBLE_COMPLEX, // ComplexType Coeff + MPI_C_DOUBLE_COMPLEX, // ComplexType Coeff MPI_DOUBLE, // RealType Poles[3] MPI_CXX_BOOL, // bool isz4 MPI_LONG // long Weight @@ -140,8 +140,8 @@ MPI_Datatype TwoParticleGFPart::ResonantTerm::mpi_datatype() { offsetof(ResonantTerm, Weight)}; // NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays) MPI_Datatype types[] = { - MPI_CXX_DOUBLE_COMPLEX, // ComplexType ResCoeff - MPI_CXX_DOUBLE_COMPLEX, // ComplexType NonResCoeff + MPI_C_DOUBLE_COMPLEX, // ComplexType ResCoeff + MPI_C_DOUBLE_COMPLEX, // ComplexType NonResCoeff MPI_DOUBLE, // RealType Poles[3] MPI_CXX_BOOL, // bool isz1z2 MPI_LONG // long Weight From 1bf965fa58f129c226507f6436810c3db700b704 Mon Sep 17 00:00:00 2001 From: "Hugo U. R. Strand" Date: Wed, 28 Sep 2022 11:44:50 +0300 Subject: [PATCH 3/5] [mpi] MPI_CXX_BOOL -> MPI_C_BOOL --- src/pomerol/TwoParticleGFPart.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pomerol/TwoParticleGFPart.cpp b/src/pomerol/TwoParticleGFPart.cpp index 5c99f498..0cef2231 100644 --- a/src/pomerol/TwoParticleGFPart.cpp +++ b/src/pomerol/TwoParticleGFPart.cpp @@ -89,7 +89,7 @@ MPI_Datatype TwoParticleGFPart::NonResonantTerm::mpi_datatype() { MPI_Datatype types[] = { MPI_C_DOUBLE_COMPLEX, // ComplexType Coeff MPI_DOUBLE, // RealType Poles[3] - MPI_CXX_BOOL, // bool isz4 + MPI_C_BOOL, // bool isz4 MPI_LONG // long Weight }; MPI_Type_create_struct(4, blocklengths, displacements, types, &dt); @@ -143,7 +143,7 @@ MPI_Datatype TwoParticleGFPart::ResonantTerm::mpi_datatype() { MPI_C_DOUBLE_COMPLEX, // ComplexType ResCoeff MPI_C_DOUBLE_COMPLEX, // ComplexType NonResCoeff MPI_DOUBLE, // RealType Poles[3] - MPI_CXX_BOOL, // bool isz1z2 + MPI_C_BOOL, // bool isz1z2 MPI_LONG // long Weight }; MPI_Type_create_struct(5, blocklengths, displacements, types, &dt); From 041075b21769ac919fcd2ba98c3f200fca1f843b Mon Sep 17 00:00:00 2001 From: Igor Krivenko Date: Thu, 29 Sep 2022 16:24:25 +0200 Subject: [PATCH 4/5] [mpi] Add a CMake option to switch between MPI_CXX_* and MPI_C_* datatypes --- CMakeLists.txt | 15 +++++++++++++++ src/pomerol/Hamiltonian.cpp | 4 ++-- src/pomerol/TwoParticleGF.cpp | 2 +- src/pomerol/TwoParticleGFContainer.cpp | 4 ++-- src/pomerol/TwoParticleGFPart.cpp | 18 +++++++++--------- 5 files changed, 29 insertions(+), 14 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index b065875d..19539a1e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -85,6 +85,21 @@ message(STATUS "MPI includes: ${MPI_CXX_INCLUDE_PATH}") message(STATUS "MPI C++ libs: ${MPI_CXX_LIBRARIES}") message(STATUS "MPI flags: ${MPI_CXX_COMPILE_FLAGS} ${MPI_C_COMPILE_FLAGS}") +# +# Workaround for MPI implementations that do not properly support +# MPI_CXX_* datatypes +# +option(Use_MPI_C_datatypes + "Use MPI_C_* datatypes instead of similar MPI_CXX_* datatypes" OFF) +mark_as_advanced(Use_MPI_C_datatypes) +if(Use_MPI_C_datatypes) + add_definitions(-DPOMEROL_MPI_BOOL=MPI_C_BOOL + -DPOMEROL_MPI_DOUBLE_COMPLEX=MPI_C_DOUBLE_COMPLEX) +else(Use_MPI_C_datatypes) + add_definitions(-DPOMEROL_MPI_BOOL=MPI_CXX_BOOL + -DPOMEROL_MPI_DOUBLE_COMPLEX=MPI_CXX_DOUBLE_COMPLEX) +endif(Use_MPI_C_datatypes) + # Boost find_package(Boost 1.54.0 REQUIRED) message(STATUS "Boost includes: ${Boost_INCLUDE_DIRS}" ) diff --git a/src/pomerol/Hamiltonian.cpp b/src/pomerol/Hamiltonian.cpp index 48871efb..1db06772 100644 --- a/src/pomerol/Hamiltonian.cpp +++ b/src/pomerol/Hamiltonian.cpp @@ -42,7 +42,7 @@ template void Hamiltonian::prepareImpl(LOperatorTypeRC const& HOp, M std::map job_map = skel.run(comm, false); MPI_Barrier(comm); - MPI_Datatype H_dt = C ? MPI_C_DOUBLE_COMPLEX : MPI_DOUBLE; + MPI_Datatype H_dt = C ? POMEROL_MPI_DOUBLE_COMPLEX : MPI_DOUBLE; for(int p = 0; p < static_cast(parts.size()); ++p) { auto& part = parts[p]; @@ -77,7 +77,7 @@ template void Hamiltonian::computeImpl(MPI_Comm const& comm) { // Start distributing data MPI_Barrier(comm); - MPI_Datatype H_dt = C ? MPI_C_DOUBLE_COMPLEX : MPI_DOUBLE; + MPI_Datatype H_dt = C ? POMEROL_MPI_DOUBLE_COMPLEX : MPI_DOUBLE; for(int p = 0; p < static_cast(parts.size()); ++p) { auto& part = parts[p]; auto& H = part.getMatrix(); diff --git a/src/pomerol/TwoParticleGF.cpp b/src/pomerol/TwoParticleGF.cpp index 7f4192a9..f11073ad 100644 --- a/src/pomerol/TwoParticleGF.cpp +++ b/src/pomerol/TwoParticleGF.cpp @@ -185,7 +185,7 @@ std::vector TwoParticleGF::compute(bool clear, FreqVec const& freqs MPI_Allreduce(MPI_IN_PLACE, m_data.data(), static_cast(m_data.size()), - MPI_C_DOUBLE_COMPLEX, + POMEROL_MPI_DOUBLE_COMPLEX, MPI_SUM, comm); diff --git a/src/pomerol/TwoParticleGFContainer.cpp b/src/pomerol/TwoParticleGFContainer.cpp index 91e9fb9f..fc61338e 100644 --- a/src/pomerol/TwoParticleGFContainer.cpp +++ b/src/pomerol/TwoParticleGFContainer.cpp @@ -108,11 +108,11 @@ TwoParticleGFContainer::computeAll_split(bool clearTerms, FreqVec const& freqs, freq_data = storage[iter->first]; freq_data_size = static_cast(freq_data.size()); MPI_Bcast(&freq_data_size, 1, MPI_LONG, sender, comm); - MPI_Bcast(freq_data.data(), freq_data_size, MPI_C_DOUBLE_COMPLEX, sender, comm); + MPI_Bcast(freq_data.data(), freq_data_size, POMEROL_MPI_DOUBLE_COMPLEX, sender, comm); } else { MPI_Bcast(&freq_data_size, 1, MPI_LONG, sender, comm); freq_data.resize(freq_data_size); - MPI_Bcast(freq_data.data(), freq_data_size, MPI_C_DOUBLE_COMPLEX, sender, comm); + MPI_Bcast(freq_data.data(), freq_data_size, POMEROL_MPI_DOUBLE_COMPLEX, sender, comm); } out[iter->first] = freq_data; diff --git a/src/pomerol/TwoParticleGFPart.cpp b/src/pomerol/TwoParticleGFPart.cpp index 0cef2231..508f2a7a 100644 --- a/src/pomerol/TwoParticleGFPart.cpp +++ b/src/pomerol/TwoParticleGFPart.cpp @@ -87,10 +87,10 @@ MPI_Datatype TwoParticleGFPart::NonResonantTerm::mpi_datatype() { offsetof(NonResonantTerm, Weight)}; // NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays) MPI_Datatype types[] = { - MPI_C_DOUBLE_COMPLEX, // ComplexType Coeff - MPI_DOUBLE, // RealType Poles[3] - MPI_C_BOOL, // bool isz4 - MPI_LONG // long Weight + POMEROL_MPI_DOUBLE_COMPLEX, // ComplexType Coeff + MPI_DOUBLE, // RealType Poles[3] + POMEROL_MPI_BOOL, // bool isz4 + MPI_LONG // long Weight }; MPI_Type_create_struct(4, blocklengths, displacements, types, &dt); MPI_Type_commit(&dt); @@ -140,11 +140,11 @@ MPI_Datatype TwoParticleGFPart::ResonantTerm::mpi_datatype() { offsetof(ResonantTerm, Weight)}; // NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays) MPI_Datatype types[] = { - MPI_C_DOUBLE_COMPLEX, // ComplexType ResCoeff - MPI_C_DOUBLE_COMPLEX, // ComplexType NonResCoeff - MPI_DOUBLE, // RealType Poles[3] - MPI_C_BOOL, // bool isz1z2 - MPI_LONG // long Weight + POMEROL_MPI_DOUBLE_COMPLEX, // ComplexType ResCoeff + POMEROL_MPI_DOUBLE_COMPLEX, // ComplexType NonResCoeff + MPI_DOUBLE, // RealType Poles[3] + POMEROL_MPI_BOOL, // bool isz1z2 + MPI_LONG // long Weight }; MPI_Type_create_struct(5, blocklengths, displacements, types, &dt); MPI_Type_commit(&dt); From dc930081c996e4e29e4554232ee13902c6da2cf0 Mon Sep 17 00:00:00 2001 From: Igor Krivenko Date: Fri, 30 Sep 2022 13:55:23 +0200 Subject: [PATCH 5/5] doc: Document usage of CMake option `Use_MPI_C_datatypes` --- CMakeLists.txt | 3 ++- README.md | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 19539a1e..6825fade 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -90,7 +90,8 @@ message(STATUS "MPI flags: ${MPI_CXX_COMPILE_FLAGS} ${MPI_C_COMPILE_FLAGS}") # MPI_CXX_* datatypes # option(Use_MPI_C_datatypes - "Use MPI_C_* datatypes instead of similar MPI_CXX_* datatypes" OFF) + "Workaround: Use MPI_C_* datatypes instead of similar MPI_CXX_* datatypes" + OFF) mark_as_advanced(Use_MPI_C_datatypes) if(Use_MPI_C_datatypes) add_definitions(-DPOMEROL_MPI_BOOL=MPI_C_BOOL diff --git a/README.md b/README.md index 77e6d54e..c63bfe04 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,12 @@ two-particle Green's functions as well as susceptibilities. The library, _libpomerol_ is built. It can be used for linking with executables. Some working executables are given in `prog` subdirectory. +> :warning: It has been [reported](https://github.com/aeantipov/pomerol/pull/60) +that some [MPICH](https://www.mpich.org/)-based MPI implementations, such as HPE +Cray MPI may not properly support `MPI_CXX_*` datatypes, which pomerol's code +depends on. In case you see failing MPI unit tests when linking to said MPI +libraries, try using CMake option `-DUse_MPI_C_datatypes=ON`. + ## Interfacing with your own code and other libraries Check the `tutorial` directory for an example of a pomerol-based code that is