Gyselalib++
 
Loading...
Searching...
No Matches
mpilayout.hpp
1// SPDX-License-Identifier: MIT
2#pragma once
3
4#include <numeric>
5
6#include <ddc/ddc.hpp>
7
8#include "ddc_aliases.hpp"
9#include "impilayout.hpp"
10
33template <class IdxRangeData, class... DistributedDim>
34class MPILayout : public IMPILayout<IdxRangeData, DistributedDim...>
35{
36 static_assert(ddc::is_discrete_domain_v<IdxRangeData>);
37
38 using base_type = IMPILayout<IdxRangeData, DistributedDim...>;
39
40public:
42 using idx_range_type = IdxRangeData;
47
48public:
58 idx_range_type distribute_idx_range(idx_range_type global_idx_range, int comm_size, int rank)
59 {
60 return internal_distribute_idx_range(global_idx_range, comm_size, rank);
61 }
62
63
64
65protected:
75 template <class HeadTag>
77 IdxRange<HeadTag> global_idx_range,
78 int comm_size,
79 int rank)
80 {
81 if constexpr (ddc::in_tags_v<HeadTag, distributed_type_seq>) {
82 assert(global_idx_range.size() % comm_size == 0);
83 IdxStep<HeadTag> elems_on_dim(global_idx_range.size() / comm_size);
84 IdxRange<HeadTag>
85 local_idx_range(global_idx_range.front() + rank * elems_on_dim, elems_on_dim);
86 return local_idx_range;
87 } else {
88 // Data is not actually distributed as it handles the case of an index range which is not defined on a distributed dimension.
89 assert(comm_size == 1);
90 return global_idx_range;
91 }
92 }
93
107 template <class HeadTag, class... Tags, std::enable_if_t<(sizeof...(Tags) > 0), bool> = true>
108 IdxRange<HeadTag, Tags...> internal_distribute_idx_range(
109 IdxRange<HeadTag, Tags...> idx_range,
110 int comm_size,
111 int rank)
112 {
113 IdxRange<HeadTag> global_idx_range_along_dim = ddc::select<HeadTag>(idx_range);
114 IdxRange<HeadTag> local_idx_range_along_dim;
115 IdxRange<Tags...> remaining_idx_range;
116
117 if constexpr (ddc::in_tags_v<HeadTag, distributed_type_seq>) {
118 // The number of MPI processes along this dimension
119 int n_ranks_along_dim = std::gcd(comm_size, global_idx_range_along_dim.size());
120 // The number of MPI processes along all subsequent dimensions
121 int n_elems_lower_dims = comm_size / n_ranks_along_dim;
122 // The rank index for the MPI process along this dimension
123 int rank_along_dim = rank / n_elems_lower_dims;
124 // The rank index for the MPI process along all subsequent dimensions
125 int remaining_rank = rank % n_elems_lower_dims;
126 // Calculate the local index range
127 IdxStep<HeadTag> elems_on_dim(global_idx_range_along_dim.size() / n_ranks_along_dim);
128 Idx<HeadTag> distrib_start(
129 global_idx_range_along_dim.front() + rank_along_dim * elems_on_dim);
130 local_idx_range_along_dim = IdxRange<HeadTag>(distrib_start, elems_on_dim);
131 // Calculate the index range for the subsequent dimensions
132 IdxRange<Tags...> remaining_dims = ddc::select<Tags...>(idx_range);
133 remaining_idx_range = internal_distribute_idx_range(
134 remaining_dims,
135 n_elems_lower_dims,
136 remaining_rank);
137 } else {
138 // Calculate the local index range
139 local_idx_range_along_dim = global_idx_range_along_dim;
140 // Calculate the index range for the subsequent dimensions
141 remaining_idx_range = internal_distribute_idx_range(
142 ddc::select<Tags...>(idx_range),
143 comm_size,
144 rank);
145 }
146 return IdxRange<HeadTag, Tags...>(local_idx_range_along_dim, remaining_idx_range);
147 }
148};
A super class describing a way in which data may be laid out across MPI processes.
Definition impilayout.hpp:16
ddc::detail::TypeSeq< DistributedDim... > distributed_type_seq
A type sequence describing the dimensions which are distributed across MPI processes.
Definition impilayout.hpp:25
IdxRange< DistributedDim... > distributed_sub_idx_range
The index range of the distributed section of the data.
Definition impilayout.hpp:23
A class describing a way in which data may be laid out across MPI processes.
Definition mpilayout.hpp:35
IdxRange< HeadTag > internal_distribute_idx_range(IdxRange< HeadTag > global_idx_range, int comm_size, int rank)
Distribute a 1D index range over the MPI processes.
Definition mpilayout.hpp:76
typename base_type::distributed_type_seq distributed_type_seq
A type sequence describing the dimensions which are distributed across MPI processes.
Definition mpilayout.hpp:46
typename base_type::distributed_sub_idx_range distributed_sub_idx_range
The index range of the distributed section of the data.
Definition mpilayout.hpp:44
IdxRange< HeadTag, Tags... > internal_distribute_idx_range(IdxRange< HeadTag, Tags... > idx_range, int comm_size, int rank)
Distribute the index range over the MPI processes.
Definition mpilayout.hpp:108
IdxRangeData idx_range_type
The index range of the data.
Definition mpilayout.hpp:42
idx_range_type distribute_idx_range(idx_range_type global_idx_range, int comm_size, int rank)
Get the distributed index range which follows the chosen layout.
Definition mpilayout.hpp:58