mpi_mapping_auxiliaries_m Module

Auxiliary routines for MPI mapping across planes


Uses

  • module~~mpi_mapping_auxiliaries_m~~UsesGraph module~mpi_mapping_auxiliaries_m mpi_mapping_auxiliaries_m module~csrmat_m csrmat_m module~mpi_mapping_auxiliaries_m->module~csrmat_m module~precision_m precision_m module~mpi_mapping_auxiliaries_m->module~precision_m module~screen_io_m screen_io_m module~mpi_mapping_auxiliaries_m->module~screen_io_m mpi mpi module~mpi_mapping_auxiliaries_m->mpi module~csrmat_m->module~precision_m module~csrmat_m->module~screen_io_m iso_c_binding iso_c_binding module~csrmat_m->iso_c_binding module~error_handling_m error_handling_m module~csrmat_m->module~error_handling_m module~list_operations_m list_operations_m module~csrmat_m->module~list_operations_m module~status_codes_m status_codes_m module~csrmat_m->module~status_codes_m netcdf netcdf module~csrmat_m->netcdf module~precision_m->mpi module~precision_m->iso_c_binding iso_fortran_env iso_fortran_env module~precision_m->iso_fortran_env module~precision_m->netcdf module~screen_io_m->module~precision_m module~screen_io_m->iso_fortran_env module~screen_io_m->netcdf module~error_handling_m->module~precision_m module~error_handling_m->module~screen_io_m module~error_handling_m->mpi module~error_handling_m->module~status_codes_m module~error_handling_m->netcdf module~comm_handling_m comm_handling_m module~error_handling_m->module~comm_handling_m module~list_operations_m->module~precision_m module~list_operations_m->module~screen_io_m module~comm_handling_m->mpi

Used by

  • module~~mpi_mapping_auxiliaries_m~~UsedByGraph module~mpi_mapping_auxiliaries_m mpi_mapping_auxiliaries_m module~auxiliaries_test_diffusion_m auxiliaries_test_diffusion_m module~auxiliaries_test_diffusion_m->module~mpi_mapping_auxiliaries_m module~map_factory_s map_factory_s module~map_factory_s->module~mpi_mapping_auxiliaries_m module~mesh_cart_communicate_s mesh_cart_communicate_s module~mesh_cart_communicate_s->module~mpi_mapping_auxiliaries_m program~test_diffusion test_diffusion program~test_diffusion->module~auxiliaries_test_diffusion_m

Interfaces

public interface getdata_fwdbwdplane

Overloading routine getdata_fwdbwdplane for integer and real data

  • private subroutine getdata_fwdbwdplane_fp(comm, step, nsend, usend, nrecv, urecv)

    Communicates data (FP) between planes receives data from plane rank + step and sends data to plane rank-step with periodicity in communication

    Arguments

    Type IntentOptional Attributes Name
    integer, intent(in) :: comm

    MPI Communicator

    integer, intent(in) :: step

    Step size of communication

    integer, intent(in) :: nsend

    Dimension of array to be sent data

    real(kind=FP), intent(in), dimension(nsend) :: usend

    Array to be sent

    integer, intent(out) :: nrecv

    Dimension of received array

    real(kind=FP), intent(out), allocatable, dimension(:) :: urecv

    Array to be received

  • private subroutine getdata_fwdbwdplane_int(comm, step, nsend, usend, nrecv, urecv)

    Communicates data (Integer) between planes receives data from plane rank + step and sends data to plane rank-step with periodicity in communication

    Arguments

    Type IntentOptional Attributes Name
    integer, intent(in) :: comm

    MPI Communicator

    integer, intent(in) :: step

    Step size of communication

    integer, intent(in) :: nsend

    Dimension of array to be sent data

    integer, intent(in), dimension(nsend) :: usend

    Array to be sent

    integer, intent(out) :: nrecv

    Dimension of received array

    integer, intent(out), allocatable, dimension(:) :: urecv

    Array to be received

  • private subroutine getdata_fwdbwdplane_csr(comm, step, acsr_send, acsr_recv)

    Communicates data (CSR matrix) between planes receives data from plane rank + step and sends data to plane rank-step with periodicity in communication

    Arguments

    Type IntentOptional Attributes Name
    integer, intent(in) :: comm

    MPI Communicator

    integer, intent(in) :: step

    Step size of communication

    type(csrmat_t), intent(in) :: acsr_send

    CSR matrix to be sent

    type(csrmat_t), intent(out), allocatable :: acsr_recv

    CSR matrix to be received


Subroutines

public subroutine get_mpipattern(comm, istart, iend, n, n_loc, iloc_start, iloc_end)

Divides an integer range into chunks, that can be worked on with separate MPI-processes

Arguments

Type IntentOptional Attributes Name
integer, intent(in) :: comm

MPI communicator

integer, intent(in) :: istart

Initial index of range

integer, intent(in) :: iend

Final index of range

integer, intent(out) :: n

Global length of range

integer, intent(out) :: n_loc

Local length of range

integer, intent(out) :: iloc_start

Local start of MPI-chunk

integer, intent(out) :: iloc_end

Local end of MPI-chunk

public subroutine communicate_matrixmpi(comm, ndim_glob, ndim_loc, nz_al, icsr, jcsr, val)

Assembles a global CSR matrix, which was built partially on individual processes

Arguments

Type IntentOptional Attributes Name
integer, intent(in) :: comm

MPI communicator

integer, intent(in) :: ndim_glob

Dimension of global matrix

integer, intent(in) :: ndim_loc

Local dimension of partial matrix

integer, intent(in) :: nz_al

Dimension of jcsr, val, must be larger than numbers of non-zeros of global matrix

integer, intent(inout), dimension(ndim_glob+1) :: icsr

On input: i-indices (CSR format) of partial matrix On output: i-indices (CSR format) of global matrix

integer, intent(inout), dimension(nz_al) :: jcsr

On input: columnd indices (CSR format) of partial matrix On output: columnd indices (CSR format) of global matrix

real(kind=FP), intent(inout), dimension(nz_al) :: val

On input: Values (CSR format) of partial matrix On output: Values (CSR format) of global matrix