Skip to content

Commit

Permalink
Use config flag for locality-aware mpi
Browse files Browse the repository at this point in the history
  • Loading branch information
Gerald Paul Bowen Collom committed Jun 28, 2023
1 parent 53dfbe3 commit a918397
Show file tree
Hide file tree
Showing 4 changed files with 244 additions and 2 deletions.
15 changes: 15 additions & 0 deletions src/parcsr_mv/_hypre_parcsr_mv.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ extern "C" {
#ifndef HYPRE_PAR_CSR_COMMUNICATION_HEADER
#define HYPRE_PAR_CSR_COMMUNICATION_HEADER

#ifdef HYPRE_USING_NODE_AWARE_MPI
#include "mpi_advance.h"
#endif

/*--------------------------------------------------------------------------
* hypre_ParCSRCommPkg:
* Structure containing information for doing communications
Expand Down Expand Up @@ -59,13 +63,20 @@ typedef struct
void *recv_data_buffer;
HYPRE_Int num_requests;
hypre_MPI_Request *requests;
#ifdef HYPRE_USING_NODE_AWARE_MPI
MPIX_Request *Xrequest;
#endif
} hypre_ParCSRCommHandle;

typedef hypre_ParCSRCommHandle hypre_ParCSRPersistentCommHandle;

typedef struct _hypre_ParCSRCommPkg
{
MPI_Comm comm;
#ifdef HYPRE_USING_NODE_AWARE_MPI
MPIX_Comm *neighbor_comm;
MPIX_Comm *neighborT_comm;
#endif
HYPRE_Int num_components;
HYPRE_Int num_sends;
HYPRE_Int *send_procs;
Expand All @@ -75,6 +86,10 @@ typedef struct _hypre_ParCSRCommPkg
HYPRE_Int num_recvs;
HYPRE_Int *recv_procs;
HYPRE_Int *recv_vec_starts;
#ifdef HYPRE_USING_NODE_AWARE_MPI
long *global_send_indices;
long *global_recv_indices;
#endif
/* remote communication information */
hypre_MPI_Datatype *send_mpi_types;
hypre_MPI_Datatype *recv_mpi_types;
Expand Down
25 changes: 25 additions & 0 deletions src/parcsr_mv/new_commpkg.c
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@
*-----------------------------------------------------*/

#include "_hypre_parcsr_mv.h"
#ifdef HYPRE_USING_NODE_AWARE_MPI
#include <mpi.h>
#endif

/* some debugging tools*/
#define mydebug 0
Expand Down Expand Up @@ -546,6 +549,28 @@ hypre_ParCSRCommPkgCreateApart
num_sends, send_procs, send_map_starts,
send_map_elmts,
&comm_pkg);
#ifdef HYPRE_USING_NODE_AWARE_MPI
MPIX_Dist_graph_create_adjacent( comm, num_recvs, hypre_ParCSRCommPkgRecvProcs(comm_pkg),
MPI_UNWEIGHTED, num_sends, hypre_ParCSRCommPkgSendProcs(comm_pkg),
MPI_UNWEIGHTED, MPI_INFO_NULL, 0, &(comm_pkg->neighbor_comm));
MPIX_Dist_graph_create_adjacent( comm, num_sends, hypre_ParCSRCommPkgSendProcs(comm_pkg),
MPI_UNWEIGHTED, num_recvs, hypre_ParCSRCommPkgRecvProcs(comm_pkg),
MPI_UNWEIGHTED, 0, 0, &(comm_pkg->neighborT_comm));

HYPRE_Int num_send_elmts = send_map_starts[num_sends];
comm_pkg->global_send_indices = hypre_CTAlloc(long, num_send_elmts, HYPRE_MEMORY_HOST);
for (HYPRE_Int i = 0; i < num_send_elmts; ++i)
{
comm_pkg->global_send_indices[i] = send_map_elmts[i] + first_col_diag;
}

HYPRE_Int num_recv_elmts = recv_vec_starts[num_recvs];
comm_pkg->global_recv_indices = hypre_CTAlloc(long, num_recv_elmts, HYPRE_MEMORY_HOST);
for (HYPRE_Int i = 0; i < num_recv_elmts; ++i)
{
comm_pkg->global_recv_indices[i] = col_map_off_d[i];
}
#endif

return hypre_error_flag;
}
Expand Down
Loading

0 comments on commit a918397

Please sign in to comment.