Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • mosaic/software/parallel-computing/openfpm/openfpm_vcluster
  • argupta/openfpm_vcluster
2 results
Show changes
Commits on Source (10)
......@@ -32,7 +32,9 @@ set (Boost_NO_BOOST_CMAKE OFF)
find_package(Boost 1.72.0 REQUIRED COMPONENTS unit_test_framework iostreams program_options OPTIONAL_COMPONENTS fiber context)
find_package(MPI REQUIRED)
find_package(PETSc)
find_package(OpenMP)
if (NOT CUDA_ON_BACKEND STREQUAL "HIP")
find_package(OpenMP)
endif()
if (CUDA_ON_BACKEND STREQUAL "HIP" AND NOT HIP_FOUND)
find_package(HIP)
endif()
......
......@@ -93,7 +93,6 @@ endif()
add_dependencies(vcluster ofpmmemory)
target_include_directories (vcluster_test PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
target_include_directories (vcluster_test PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/../../openfpm_devices/src/)
target_include_directories (vcluster_test PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/../../openfpm_vcluster/src/)
......@@ -150,8 +149,6 @@ endif()
target_link_libraries(vcluster_dl ${PETSC_LIBRARIES})
target_link_libraries(vcluster ${PETSC_LIBRARIES})
target_link_libraries(vcluster ofpmmemory)
target_link_libraries(vcluster_dl ofpmmemory)
if (TEST_COVERAGE)
target_link_libraries(vcluster_test -lgcov)
......@@ -165,20 +162,20 @@ target_link_libraries(vcluster_test ${MPI_C_LIBRARIES})
target_link_libraries(vcluster_test ${MPI_CXX_LIBRARIES})
install(TARGETS vcluster vcluster_dl DESTINATION openfpm_vcluster/lib COMPONENT OpenFPM)
install(FILES MPI_wrapper/MPI_IallreduceW.hpp
MPI_wrapper/MPI_IrecvW.hpp
MPI_wrapper/MPI_IBcastW.hpp
MPI_wrapper/MPI_IsendW.hpp
MPI_wrapper/MPI_util.hpp
install(TARGETS vcluster vcluster_dl ${ADDITIONAL_OPENFPM_LIBS} DESTINATION openfpm_vcluster/lib COMPONENT OpenFPM)
install(FILES MPI_wrapper/MPI_IallreduceW.hpp
MPI_wrapper/MPI_IrecvW.hpp
MPI_wrapper/MPI_IBcastW.hpp
MPI_wrapper/MPI_IsendW.hpp
MPI_wrapper/MPI_util.hpp
MPI_wrapper/MPI_IAllGather.hpp
DESTINATION openfpm_vcluster/include/MPI_wrapper
COMPONENT OpenFPM)
install(FILES VCluster/VCluster_base.hpp
VCluster/VCluster.hpp
VCluster/VCluster.hpp
VCluster/VCluster_meta_function.hpp
DESTINATION openfpm_vcluster/include/VCluster
DESTINATION openfpm_vcluster/include/VCluster
COMPONENT OpenFPM)
install (FILES util/Vcluster_log.hpp
......
......@@ -24,7 +24,14 @@ public:
*/
static inline void recv(size_t proc , size_t tag ,void * buf, size_t sz, MPI_Request & req)
{
MPI_SAFE_CALL(MPI_Irecv(buf,sz,MPI_BYTE, proc, tag , MPI_COMM_WORLD,&req));
if (sz < 2147483647)
{
MPI_SAFE_CALL(MPI_Irecv(buf,sz,MPI_BYTE, proc, tag , MPI_COMM_WORLD,&req));
}
else
{
MPI_SAFE_CALL(MPI_Irecv(buf,sz >> 8,MPI_DOUBLE, proc, tag , MPI_COMM_WORLD,&req));
}
}
};
......@@ -39,7 +46,14 @@ template<typename T> class MPI_IrecvW
public:
static inline void recv(size_t proc , size_t tag ,openfpm::vector<T> & v, MPI_Request & req)
{
MPI_SAFE_CALL(MPI_Irecv(v.getPointer(), v.size() * sizeof(T),MPI_BYTE, proc, tag , MPI_COMM_WORLD,&req));
if (v.size() * sizeof(T) < 2147483647)
{
MPI_SAFE_CALL(MPI_Irecv(v.getPointer(), v.size() * sizeof(T),MPI_BYTE, proc, tag , MPI_COMM_WORLD,&req));
}
else
{
MPI_SAFE_CALL(MPI_Irecv(v.getPointer(),(v.size() * sizeof(T)) >> 8,MPI_DOUBLE, proc, tag , MPI_COMM_WORLD,&req));
}
}
};
......
......@@ -18,7 +18,15 @@ class MPI_IsendWB
public:
static inline void send(size_t proc , size_t tag ,const void * buf, size_t sz, MPI_Request & req)
{
MPI_Isend(buf, sz,MPI_BYTE, proc, tag , MPI_COMM_WORLD,&req);
if (sz <= 2147483647)
{
MPI_Isend(buf, sz,MPI_BYTE, proc, tag , MPI_COMM_WORLD,&req);
}
else
{
MPI_Isend(buf, sz >> 3 ,MPI_DOUBLE, proc, tag , MPI_COMM_WORLD,&req);
}
}
};
......@@ -33,7 +41,14 @@ template<typename T, typename Mem, template<typename> class gr> class MPI_IsendW
public:
static inline void send(size_t proc , size_t tag ,openfpm::vector<T,Mem,gr> & v, MPI_Request & req)
{
MPI_Isend(v.getPointer(), v.size() * sizeof(T),MPI_BYTE, proc, tag , MPI_COMM_WORLD,&req);
if (v.size() * sizeof(T) <= 2147483647)
{
MPI_Isend(v.getPointer(), v.size() * sizeof(T),MPI_BYTE, proc, tag , MPI_COMM_WORLD,&req);
}
else
{
MPI_Isend(v.getPointer(), (v.size() * sizeof(T)) >> 3,MPI_DOUBLE, proc, tag , MPI_COMM_WORLD,&req);
}
}
};
......
......@@ -245,7 +245,10 @@ class Vcluster_base
// std::cout << "TAG: " << SEND_SPARSE + (NBX_cnt + NBX_prc_qcnt)*131072 + i << " " << NBX_cnt << " " << NBX_prc_qcnt << " " << " rank: " << rank() << " " << NBX_prc_cnt_base << " nbx_cycle: " << nbx_cycle << std::endl;
MPI_SAFE_CALL(MPI_Issend(ptr[i], sz[i], MPI_BYTE, prc[i], SEND_SPARSE + (NBX_cnt + NBX_prc_qcnt)*131072 + i, MPI_COMM_WORLD,&req.last()));
if (sz[i] > 2147483647)
{MPI_SAFE_CALL(MPI_Issend(ptr[i], (sz[i] >> 3) + 1 , MPI_DOUBLE, prc[i], SEND_SPARSE + (NBX_cnt + NBX_prc_qcnt)*131072 + i, MPI_COMM_WORLD,&req.last()));}
else
{MPI_SAFE_CALL(MPI_Issend(ptr[i], sz[i], MPI_BYTE, prc[i], SEND_SPARSE + (NBX_cnt + NBX_prc_qcnt)*131072 + i, MPI_COMM_WORLD,&req.last()));}
log.logSend(prc[i]);
}
}
......@@ -628,10 +631,23 @@ public:
if (i >= NQUEUE || NBX_active[i] == NBX_Type::NBX_UNACTIVE || NBX_active[i] == NBX_Type::NBX_KNOWN || NBX_active[i] == NBX_Type::NBX_KNOWN_PRC)
{return;}
int msize;
int msize_;
long int msize;
bool big_data = true;
// Get the message tag and size
MPI_SAFE_CALL(MPI_Get_count(&stat_t,MPI_BYTE,&msize));
MPI_SAFE_CALL(MPI_Get_count(&stat_t,MPI_DOUBLE,&msize_));
if (msize_ == MPI_UNDEFINED)
{
big_data = false;
MPI_SAFE_CALL(MPI_Get_count(&stat_t,MPI_BYTE,&msize_));
msize = msize_;
}
else
{
msize = ((size_t)msize_) << 3;
}
// Ok we check if the TAG come from one of our send TAG
if (stat_t.MPI_TAG >= (int)(SEND_SPARSE + NBX_prc_cnt_base*131072) && stat_t.MPI_TAG < (int)(SEND_SPARSE + (NBX_prc_cnt_base + NBX_prc_qcnt + 1)*131072))
......@@ -664,8 +680,15 @@ public:
memset(ptr,0xFF,msize);
#endif
#endif
MPI_SAFE_CALL(MPI_Recv(ptr,msize,MPI_BYTE,stat_t.MPI_SOURCE,stat_t.MPI_TAG,MPI_COMM_WORLD,&stat_t));
if (big_data == true)
{
// std::cout << "RECEVING BIG MESSAGE " << msize_ << " " << msize << std::endl;
MPI_SAFE_CALL(MPI_Recv(ptr,msize >> 3,MPI_DOUBLE,stat_t.MPI_SOURCE,stat_t.MPI_TAG,MPI_COMM_WORLD,&stat_t));
}
else
{
MPI_SAFE_CALL(MPI_Recv(ptr,msize,MPI_BYTE,stat_t.MPI_SOURCE,stat_t.MPI_TAG,MPI_COMM_WORLD,&stat_t));
}
#ifdef SE_CLASS2
check_valid(ptr,msize);
#endif
......
......@@ -43,12 +43,12 @@ struct unpack_selector_with_prp
{
T unp;
ExtPreAlloc<HeapMemory> & mem = *(new ExtPreAlloc<HeapMemory>(recv_buf.get(i).size(),recv_buf.get(i)));
ExtPreAlloc<Memory> & mem = *(new ExtPreAlloc<Memory>(recv_buf.get(i).size(),recv_buf.get(i)));
mem.incRef();
Unpack_stat ps;
Unpacker<T,HeapMemory>::template unpack<>(mem, unp, ps);
Unpacker<T,Memory>::template unpack<>(mem, unp, ps);
size_t recv_size_old = recv.size();
// Merge the information
......