Skip to content

Commit 2f54bb5

Browse files
authored
Merge pull request #308 from idefix-code/addColumnDensityExample
Add a class to compute accelerated cumulative sum
2 parents 4286299 + 7c10ec0 commit 2f54bb5

9 files changed

Lines changed: 541 additions & 0 deletions

File tree

.github/workflows/idefix-ci-jobs.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,3 +201,5 @@ jobs:
201201
run: scripts/ci/run-tests $IDEFIX_DIR/test/utils/lookupTable -all $TESTME_OPTIONS
202202
- name: Dump Image
203203
run: scripts/ci/run-tests $IDEFIX_DIR/test/utils/dumpImage -all $TESTME_OPTIONS
204+
- name: Column density
205+
run: scripts/ci/run-tests $IDEFIX_DIR/test/utils/columnDensity -all $TESTME_OPTIONS

doc/source/faq.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,3 +73,6 @@ I want to test a modification to *Idefix* source code specific to my problem wit
7373

7474
I want to use a lookup table from a CSV file in my idefix_loop. How could I proceed?
7575
Use the ``LookupTable`` class (see :ref:`LookupTableClass`)
76+
77+
I want to compute a cumulative sum (e.g a column density) on the fly. How could I proceed?
78+
Use the ``Column`` class (see :class:`::Column`)

src/utils/CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,6 @@ target_sources(idefix
55
PUBLIC ${CMAKE_CURRENT_LIST_DIR}/dumpImage.cpp
66
PUBLIC ${CMAKE_CURRENT_LIST_DIR}/dumpImage.hpp
77
PUBLIC ${CMAKE_CURRENT_LIST_DIR}/lookupTable.hpp
8+
PUBLIC ${CMAKE_CURRENT_LIST_DIR}/column.cpp
9+
PUBLIC ${CMAKE_CURRENT_LIST_DIR}/column.hpp
810
)

src/utils/column.cpp

Lines changed: 240 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,240 @@
1+
// ***********************************************************************************
2+
// Idefix MHD astrophysical code
3+
// Copyright(C) Geoffroy R. J. Lesur <[email protected]>
4+
// and other code contributors
5+
// Licensed under CeCILL 2.1 License, see COPYING for more information
6+
// ***********************************************************************************
7+
8+
9+
#include <vector>
10+
#include <string>
11+
12+
#include "column.hpp"
13+
#include "idefix.hpp"
14+
#include "input.hpp"
15+
#include "output.hpp"
16+
#include "grid.hpp"
17+
#include "dataBlock.hpp"
18+
#include "dataBlockHost.hpp"
19+
20+
Column::Column(int dir, int sign, DataBlock *data)
21+
: direction(dir), sign(sign) {
22+
idfx::pushRegion("Column::Column");
23+
this->np_tot = data->np_tot;
24+
this->np_int = data->np_int;
25+
this->beg = data->beg;
26+
27+
if(dir>= DIMENSIONS || dir < IDIR) IDEFIX_ERROR("Unknown direction for Column constructor");
28+
29+
// Allocate the array on which we do the average
30+
this->ColumnArray = IdefixArray3D<real>("ColumnArray",np_tot[KDIR], np_tot[JDIR], np_tot[IDIR]);
31+
32+
// Elementary volumes and area
33+
this->Volume = data->dV;
34+
this->Area = data->A[dir];
35+
36+
// allocate helper array
37+
if(dir == IDIR) {
38+
localSum = IdefixArray2D<real>("localSum",np_tot[KDIR], np_tot[JDIR]);
39+
}
40+
if(dir == JDIR) {
41+
localSum = IdefixArray2D<real>("localSum",np_tot[KDIR], np_tot[IDIR]);
42+
}
43+
if(dir == KDIR) {
44+
localSum = IdefixArray2D<real>("localSum",np_tot[JDIR], np_tot[IDIR]);
45+
}
46+
#ifdef WITH_MPI
47+
// Create sub-MPI communicator dedicated to scan
48+
int remainDims[3] = {false, false, false};
49+
remainDims[dir] = true;
50+
MPI_Cart_sub(data->mygrid->CartComm, remainDims, &this->ColumnComm);
51+
MPI_Comm_rank(this->ColumnComm, &this->MPIrank);
52+
MPI_Comm_size(this->ColumnComm, &this->MPIsize);
53+
54+
// create MPI class for boundary Xchanges
55+
int ntarget = 0;
56+
std::vector<int> mapVars;
57+
mapVars.push_back(ntarget);
58+
59+
this->mpi.Init(data->mygrid, mapVars, data->nghost.data(), data->np_int.data());
60+
this->nproc = data->mygrid->nproc;
61+
#endif
62+
idfx::popRegion();
63+
}
64+
65+
void Column::ComputeColumn(IdefixArray4D<real> in, const int var) {
66+
idfx::pushRegion("Column::ComputeColumn");
67+
const int nk = np_int[KDIR];
68+
const int nj = np_int[JDIR];
69+
const int ni = np_int[IDIR];
70+
const int kb = beg[KDIR];
71+
const int jb = beg[JDIR];
72+
const int ib = beg[IDIR];
73+
const int ke = kb+nk;
74+
const int je = jb+nj;
75+
const int ie = ib+ni;
76+
77+
const int direction = this->direction;
78+
auto column = this->ColumnArray;
79+
auto dV = this->Volume;
80+
auto A = this->Area;
81+
auto localSum = this->localSum;
82+
83+
if(direction==IDIR) {
84+
// Inspired from loop.hpp
85+
Kokkos::parallel_for("ColumnX1", team_policy (nk*nj, Kokkos::AUTO),
86+
KOKKOS_LAMBDA (member_type team_member) {
87+
int k = team_member.league_rank() / nj;
88+
int j = team_member.league_rank() - k*nj + jb;
89+
k += kb;
90+
Kokkos::parallel_scan(Kokkos::TeamThreadRange<>(team_member,ib,ie),
91+
[=] (int i, real &partial_sum, bool is_final) {
92+
partial_sum += in(var,k,j,i)*dV(k,j,i) / (0.5*(A(k,j,i)+A(k,j,i+1)));
93+
if(is_final) column(k,j,i) = partial_sum;
94+
//partial_sum += in(var,k,j,i)*dV(k,j,i) / (0.5*(A(k,j,i)+A(k,j,i+1)));
95+
});
96+
});
97+
}
98+
if(direction==JDIR) {
99+
// Inspired from loop.hpp
100+
Kokkos::parallel_for("ColumnX2", team_policy (nk*ni, Kokkos::AUTO),
101+
KOKKOS_LAMBDA (member_type team_member) {
102+
int k = team_member.league_rank() / ni;
103+
int i = team_member.league_rank() - k*ni + ib;
104+
k += kb;
105+
Kokkos::parallel_scan(Kokkos::TeamThreadRange<>(team_member,jb,je),
106+
[=] (int j, real &partial_sum, bool is_final) {
107+
partial_sum += in(var,k,j,i)*dV(k,j,i) / (0.5*(A(k,j,i)+A(k,j+1,i)));
108+
if(is_final) column(k,j,i) = partial_sum;
109+
});
110+
});
111+
}
112+
if(direction==KDIR) {
113+
// Inspired from loop.hpp
114+
Kokkos::parallel_for("ColumnX2", team_policy (nj*ni, Kokkos::AUTO),
115+
KOKKOS_LAMBDA (member_type team_member) {
116+
int j = team_member.league_rank() / ni;
117+
int i = team_member.league_rank() - j*ni + ib;
118+
j += jb;
119+
Kokkos::parallel_scan(Kokkos::TeamThreadRange<>(team_member,kb,ke),
120+
[=] (int k, real &partial_sum, bool is_final) {
121+
partial_sum += in(var,k,j,i)*dV(k,j,i) / (0.5*(A(k,j,i)+A(k+1,j,i)));
122+
if(is_final) column(k,j,i) = partial_sum;
123+
});
124+
});
125+
}
126+
127+
#ifdef WITH_MPI
128+
// Load the current sum
129+
int dst,src;
130+
MPI_Cart_shift(this->ColumnComm,0, 1, &src, &dst);
131+
int size = localSum.extent(0)*localSum.extent(1);
132+
if(MPIrank>0) {
133+
MPI_Status status;
134+
// Get the cumulative sum from previous processes
135+
Kokkos::fence();
136+
MPI_Recv(localSum.data(), size, realMPI, src, 20, ColumnComm, &status);
137+
// Add this to our cumulative sum
138+
idefix_for("Addsum",kb,ke,jb,je,ib,ie,
139+
KOKKOS_LAMBDA(int k, int j, int i) {
140+
if(direction == IDIR) column(k,j,i) += localSum(k,j);
141+
if(direction == JDIR) column(k,j,i) += localSum(k,i);
142+
if(direction == KDIR) column(k,j,i) += localSum(j,i);
143+
});
144+
} // rank =0
145+
// Get the final sum
146+
if(MPIrank < MPIsize-1) {
147+
if(direction==IDIR) {
148+
idefix_for("Loadsum",kb,ke,jb,je,
149+
KOKKOS_LAMBDA(int k, int j) {
150+
localSum(k,j) = column(k,j,ie-1);
151+
});
152+
}
153+
if(direction==JDIR) {
154+
idefix_for("Loadsum",kb,ke,ib,ie,
155+
KOKKOS_LAMBDA(int k, int i) {
156+
localSum(k,i) = column(k,je-1,i);
157+
});
158+
}
159+
if(direction==KDIR) {
160+
idefix_for("Loadsum",jb,je,ib,ie,
161+
KOKKOS_LAMBDA(int j, int i) {
162+
localSum(j,i) = column(ke-1,j,i);
163+
});
164+
}
165+
// And send it
166+
Kokkos::fence();
167+
MPI_Send(localSum.data(),size, realMPI, dst, 20, ColumnComm);
168+
} // MPIrank small enough
169+
#endif
170+
// If we need it backwards
171+
if(sign<0) {
172+
// Compute total cumulative sum in the last subdomain
173+
if(direction == IDIR) {
174+
idefix_for("Loadsum",kb,ke,jb,je,
175+
KOKKOS_LAMBDA(int k, int j) {
176+
localSum(k,j) = column(k,j,ie-1) + in(var,k,j,ie-1) * dV(k,j,ie-1)
177+
/ (0.5*(A(k,j,ie-1)+A(k,j,ie)));
178+
});
179+
}
180+
if(direction == JDIR) {
181+
idefix_for("Loadsum",kb,ke,ib,ie,
182+
KOKKOS_LAMBDA(int k, int i) {
183+
localSum(k,i) = column(k,je-1,i) + in(var,k,je-1,i) * dV(k,je-1,i)
184+
/ (0.5*(A(k,je-1,i)+A(k,je,i)));
185+
});
186+
}
187+
if(direction == KDIR) {
188+
idefix_for("Loadsum",jb,je,ib,ie,
189+
KOKKOS_LAMBDA(int j, int i) {
190+
localSum(j,i) = column(ke-1,j,i) + in(var,ke-1,j,i) * dV(ke-1,j,i)
191+
/ (0.5*(A(ke-1,j,i)+A(ke,j,i)));
192+
});
193+
}
194+
195+
#ifdef WITH_MPI
196+
Kokkos::fence();
197+
MPI_Bcast(localSum.data(),size, realMPI, MPIsize-1, ColumnComm);
198+
#endif
199+
// All substract the local column from the full column
200+
201+
idefix_for("Subsum",kb,ke,jb,je,ib,ie,
202+
KOKKOS_LAMBDA(int k, int j, int i) {
203+
if(direction==IDIR) column(k,j,i) = localSum(k,j)-column(k,j,i);
204+
if(direction==JDIR) column(k,j,i) = localSum(k,i)-column(k,j,i);
205+
if(direction==KDIR) column(k,j,i) = localSum(j,i)-column(k,j,i);
206+
});
207+
} // sign <0
208+
// Xchange boundary elements when using MPI to ensure that column
209+
// density in the ghost zones are coherent
210+
#ifdef WITH_MPI
211+
// Create a 4D array that contains our column data
212+
IdefixArray4D<real> arr4D(column.data(), 1, this->np_tot[KDIR],
213+
this->np_tot[JDIR],
214+
this->np_tot[IDIR]);
215+
216+
for(int dir = 0 ; dir < DIMENSIONS ; dir++) {
217+
// MPI Exchange data when needed
218+
if(nproc[dir]>1) {
219+
switch(dir) {
220+
case 0:
221+
this->mpi.ExchangeX1(arr4D);
222+
break;
223+
case 1:
224+
this->mpi.ExchangeX2(arr4D);
225+
break;
226+
case 2:
227+
this->mpi.ExchangeX3(arr4D);
228+
break;
229+
}
230+
}
231+
}
232+
#endif
233+
idfx::popRegion();
234+
}
235+
236+
void Column::ComputeColumn(IdefixArray3D<real> in) {
237+
// 4D alias
238+
IdefixArray4D<real> arr4D(in.data(), 1, in.extent(0), in.extent(1), in.extent(2));
239+
return this->ComputeColumn(arr4D,0);
240+
}

src/utils/column.hpp

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
// ***********************************************************************************
2+
// Idefix MHD astrophysical code
3+
// Copyright(C) Geoffroy R. J. Lesur <[email protected]>
4+
// and other code contributors
5+
// Licensed under CeCILL 2.1 License, see COPYING for more information
6+
// ***********************************************************************************
7+
8+
#ifndef UTILS_COLUMN_HPP_
9+
#define UTILS_COLUMN_HPP_
10+
11+
#include <vector>
12+
#include <string>
13+
14+
#include "idefix.hpp"
15+
#include "dataBlock.hpp"
16+
17+
18+
19+
// A class to implement a parralel cumulative sum
20+
class Column {
21+
public:
22+
////////////////////////////////////////////////////////////////////////////////////
23+
/// @brief Constructor of a cumulative sum(=column density) from setup's arguments
24+
/// @param dir direction along which the integration is performed
25+
/// @param sign: +1 for an integration from left to right, -1 for an integration from right to
26+
/// left i.e (backwards)
27+
///////////////////////////////////////////////////////////////////////////////////
28+
Column(int dir, int sign, DataBlock *);
29+
30+
///////////////////////////////////////////////////////////////////////////////////
31+
/// @brief Effectively compute integral from the input array in argument
32+
/// @param in: 4D input array
33+
/// @param variable: index of the variable along which we do the integral (since the
34+
/// intput array is 4D)
35+
///////////////////////////////////////////////////////////////////////////////////
36+
void ComputeColumn(IdefixArray4D<real> in, int variable);
37+
38+
///////////////////////////////////////////////////////////////////////////////////
39+
/// @brief Effectively compute integral from the input array in argument
40+
/// @param in: 3D input array
41+
///////////////////////////////////////////////////////////////////////////////////
42+
void ComputeColumn(IdefixArray3D<real> in);
43+
44+
///////////////////////////////////////////////////////////////////////////////////
45+
/// @brief Get a reference to the computed column density array
46+
///////////////////////////////////////////////////////////////////////////////////
47+
IdefixArray3D<real> GetColumn() {
48+
return (this->ColumnArray);
49+
}
50+
51+
private:
52+
IdefixArray3D<real> ColumnArray;
53+
int direction; // The direction along which the column is computed
54+
int sign; // whether we integrate from the left or from the right
55+
std::array<int,3> np_tot;
56+
std::array<int,3> np_int;
57+
std::array<int,3> beg;
58+
59+
IdefixArray3D<real> Area;
60+
IdefixArray3D<real> Volume;
61+
62+
IdefixArray2D<real> localSum;
63+
#ifdef WITH_MPI
64+
Mpi mpi; // Mpi object when WITH_MPI is set
65+
MPI_Comm ColumnComm;
66+
int MPIrank;
67+
int MPIsize;
68+
69+
std::array<int,3> nproc; // 3D size of the MPI cartesian geometry
70+
71+
#endif
72+
};
73+
74+
#endif // UTILS_COLUMN_HPP_
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
#define COMPONENTS 3
2+
#define DIMENSIONS 3
3+
4+
#define GEOMETRY CARTESIAN
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
[Grid]
2+
X1-grid 1 0.0 64 u 1.0
3+
X2-grid 1 0.0 32 u 1.0
4+
X3-grid 1 0.0 16 u 1.0
5+
6+
[TimeIntegrator]
7+
CFL 0.8
8+
tstop 0.0
9+
nstages 2
10+
11+
[Hydro]
12+
solver hllc
13+
gamma 1.4
14+
15+
[Boundary]
16+
X1-beg periodic
17+
X1-end periodic
18+
X2-beg periodic
19+
X2-end periodic
20+
X3-beg periodic
21+
X3-end periodic
22+
23+
[Output]
24+
analysis 0.01

0 commit comments

Comments
 (0)