Commit 7a4509bc authored by Pietro Incardona's avatar Pietro Incardona

Fixing grid 1D

parent e5756fb2
......@@ -9,8 +9,9 @@ All notable changes to this project will be documented in this file.
### Fixed
- Installation PETSC installation fail in case of preinstalled MPI
- Miss-compilation of SUITESPARSE on gcc-6.2
- vector_dist with negative domain (Now supported)
- Grid 1D fixing
- Grid 1D has been fixed
### Changed
......
......@@ -43,7 +43,7 @@ else
sed -i "/INSTALL_LIB\s=\s\/usr\/local\/lib/c\INSTALL_LIB = $1\/SUITESPARSE\/lib" SuiteSparse_config/SuiteSparse_config.mk
sed -i "/INSTALL_INCLUDE\s=\s\/usr\/local\/include/c\INSTALL_INCLUDE = $1\/SUITESPARSE\/include" SuiteSparse_config/SuiteSparse_config.mk
sed -i "/\sLAPACK\s=\s-llapack/c\LAPACK = " SuiteSparse_config/SuiteSparse_config.mk
sed -i "/\sBLAS\s=\s\-lopenblas/c\BLAS = -L$1/OPENBLAS/lib -lopenblas" SuiteSparse_config/SuiteSparse_config.mk
sed -i "/\sBLAS\s=\s\-lopenblas/c\BLAS = -L$1/OPENBLAS/lib -lopenblas -lpthread" SuiteSparse_config/SuiteSparse_config.mk
fi
......
......@@ -75,6 +75,7 @@
* \see calculateGhostBoxes() for a visualization of internal and external ghost boxes
*
* ### Create a Cartesian decomposition object on a Box space, distribute, calculate internal and external ghost boxes
*
* \snippet CartDecomposition_unit_test.hpp Create CartDecomposition
*
*/
......@@ -203,6 +204,7 @@ public:
/*! \brief Constructor, it decompose and distribute the sub-domains across the processors
*
* \param v_cl Virtual cluster, used internally for communications
* \param bc boundary conditions
*
*/
void createSubdomains(Vcluster & v_cl, const size_t (& bc)[dim])
......@@ -405,30 +407,30 @@ public:
*
\verbatim
+----------------------------------------------------+
| |
| Processor 8 |
| Sub+domain 0 +-----------------------------------+
| | |
| | |
++--------------+---+---------------------------+----+ Processor 9 |
| | | B8_0 | | Subdomain 0 |
| +------------------------------------+ |
| | | | | |
| | | |B9_0| |
| | B | Local processor | | |
| Processor 5 | 5 | Subdomain 0 | | |
| Subdomain 0 | _ | +----------------------------------------+
| | 0 | | | |
| | | | | |
| | | | | Processor 9 |
| | | |B9_1| Subdomain 1 |
| | | | | |
| | | | | |
| | | | | |
+--------------+---+---------------------------+----+ |
| |
+-----------------------------------+
+----------------------------------------------------+
| |
| Processor 8 |
| Sub+domain 0 +-----------------------------------+
| | |
| | |
++--------------+---+---------------------------+----+ Processor 9 |
| | | B8_0 | | Subdomain 0 |
| +------------------------------------+ |
| | | | | |
| | | |B9_0| |
| | B | Local processor | | |
| Processor 5 | 5 | Subdomain 0 | | |
| Subdomain 0 | _ | +----------------------------------------+
| | 0 | | | |
| | | | | |
| | | | | Processor 9 |
| | | |B9_1| Subdomain 1 |
| | | | | |
| | | | | |
| | | | | |
+--------------+---+---------------------------+----+ |
| |
+-----------------------------------+
\endverbatim
......@@ -436,30 +438,32 @@ public:
and also
G8_0 G9_0 G9_1 G5_0 (External ghost boxes)
+----------------------------------------------------+
| Processor 8 |
| Subdomain 0 +-----------------------------------+
| | |
| +---------------------------------------------+ |
| | G8_0 | | |
+-----+---------------+------------------------------------+ | Processor 9 |
| | | | | Subdomain 0 |
| | | |G9_0| |
| | | | | |
| | | | | |
| | | Local processor | | |
| Processor 5 | | Sub+domain 0 | | |
| Subdomain 0 | | +-----------------------------------+
| | | | | |
| | G | | | |
| | 5 | | | Processor 9 |
| | | | | | Subdomain 1 |
| | 0 | |G9_1| |
| | | | | |
| | | | | |
+---------------------+------------------------------------+ | |
| | | |
+----------------------------------------+----+------------------------------+
\verbatim
+----------------------------------------------------+
| Processor 8 |
| Subdomain 0 +-----------------------------------+
| | |
| +---------------------------------------------+ |
| | G8_0 | | |
+-----+---------------+------------------------------------+ | Processor 9 |
| | | | | Subdomain 0 |
| | | |G9_0| |
| | | | | |
| | | | | |
| | | Local processor | | |
| Processor 5 | | Sub+domain 0 | | |
| Subdomain 0 | | +-----------------------------------+
| | | | | |
| | G | | | |
| | 5 | | | Processor 9 |
| | | | | | Subdomain 1 |
| | 0 | |G9_1| |
| | | | | |
| | | | | |
+---------------------+------------------------------------+ | |
| | | |
+----------------------------------------+----+------------------------------+
\endverbatim
......@@ -502,8 +506,10 @@ public:
public:
//! Space dimensions
static constexpr int dims = dim;
//! Space type
typedef T stype;
//! Increment the reference counter
......@@ -641,7 +647,11 @@ public:
/*! \brief Apply boundary condition to the point
*
* \param p Point to apply the boundary condition
* If the particle go out to the right, bring back the particle on the left
* in case of periodic, nothing in case of non periodic
*
* \param pt Point to apply the boundary condition. (it's coordinated are changed according the
* the explanation before)
*
*/
void applyPointBC(float (& pt)[dim]) const
......@@ -655,7 +665,11 @@ public:
/*! \brief Apply boundary condition to the point
*
* \param p Point to apply the boundary condition
* If the particle go out to the right, bring back the particle on the left
* in case of periodic, nothing in case of non periodic
*
* \param pt Point to apply the boundary conditions.(it's coordinated are changed according the
* the explanation before)
*
*/
void applyPointBC(Point<dim,T> & pt) const
......@@ -669,7 +683,11 @@ public:
/*! \brief Apply boundary condition to the point
*
* \param encapsulated object
* If the particle go out to the right, bring back the particle on the left
* in case of periodic, nothing in case of non periodic
*
* \param pt encapsulated point object (it's coordinated are changed according the
* the explanation before)
*
*/
template<typename Mem> void applyPointBC(encapc<1,Point<dim,T>,Mem> && pt) const
......@@ -721,7 +739,7 @@ public:
/*! \brief It create another object that contain the same information and act in the same way
*
* \return a duplicated decomposition
* \return a duplicated CartDecomposition object
*
*/
CartDecomposition<dim,T,Memory> duplicate() const
......@@ -755,6 +773,8 @@ public:
*
* \param cart element to copy
*
* \return itself
*
*/
CartDecomposition<dim,T,Memory> & operator=(const CartDecomposition & cart)
{
......@@ -785,6 +805,8 @@ public:
*
* \param cart element to copy
*
* \return itself
*
*/
CartDecomposition<dim,T,Memory> & operator=(CartDecomposition && cart)
{
......@@ -819,6 +841,10 @@ public:
* it define in how many cell it will be divided the space for a particular required minimum
* number of sub-domain
*
* \param n_sub number of subdomains per processors
*
* \return grid dimension (it is one number because on the other dimensions is the same)
*
*/
static size_t getDefaultGrid(size_t n_sub)
{
......@@ -828,6 +854,8 @@ public:
}
/*! \brief Given a point return in which processor the particle should go
*
* \param p point
*
* \return processorID
*
......@@ -838,6 +866,8 @@ public:
}
/*! \brief Given a point return in which processor the particle should go
*
* \param p point
*
* \return processorID
*
......@@ -848,6 +878,8 @@ public:
}
/*! \brief Given a point return in which processor the particle should go
*
* \param p point
*
* \return processorID
*
......@@ -857,10 +889,12 @@ public:
return fine_s.get(cd.getCell(p));
}
/*! \brief Given a point return in which processor the particle should go
/*! \brief Given a point return in which processor the point/particle should go
*
* Boundary conditions are considered
*
* \param p point
*
* \return processorID
*
*/
......@@ -876,6 +910,8 @@ public:
*
* Boundary conditions are considered
*
* \param p point
*
* \return processorID
*
*/
......@@ -891,6 +927,8 @@ public:
*
* Boundary consition are considered
*
* \param p point position
*
* \return processorID
*
*/
......@@ -939,7 +977,7 @@ public:
*
* \param div_ storing into how many sub-sub-domains to decompose on each dimension
* \param domain_ domain to decompose
* \param bc_ boundary conditions
* \param bc boundary conditions
* \param ghost Ghost size
*
*/
......@@ -962,6 +1000,10 @@ public:
}
/*! \brief Delete the decomposition and reset the data-structure
*
*
*/
void reset()
{
sub_domains.clear();
......@@ -989,6 +1031,8 @@ public:
}
/*! \brief Refine the decomposition, available only for ParMetis distribution, for Metis it is a null call
*
* \param ts number of time step from the previous load balancing
*
*/
void rebalance(size_t ts)
......@@ -1005,6 +1049,8 @@ public:
}
/*! \brief Refine the decomposition, available only for ParMetis distribution, for Metis it is a null call
*
* \param dlb Dynamic load balancing object
*
* \return true if the re-balance has been executed, false otherwise
*/
......@@ -1075,9 +1121,10 @@ public:
return dist.getNSubSubDomains();
}
/*! \brief function that set the weight of the vertex
/*! \brief Function that set the computational cost for a of a sub-sub domain
*
* \param id vertex id
* \param weight compotational cost
*
*/
inline void setSubSubDomainComputationCost(size_t id, size_t weight)
......@@ -1085,9 +1132,11 @@ public:
dist.setComputationCost(id, weight);
}
/*! \brief function that set the weight of the vertex
/*! \brief function that return the computation cost of the sub-sub-domain id
*
* \param id vertex id
* \param id sub-sub-domain id
*
* \return the computational cost
*
*/
inline size_t getSubSubDomainComputationCost(size_t id)
......@@ -1116,7 +1165,8 @@ public:
/*! \brief Get the local sub-domain
*
* \param i (each local processor can have more than one sub-domain)
* \param lc (each local processor can have more than one sub-domain)
*
* \return the sub-domain
*
*/
......@@ -1137,10 +1187,11 @@ public:
return sp;
}
/*! \brief Get the local sub-domain with ghost extension
/*! \brief Get the local sub-domain enlarged with ghost extension
*
* \param i (each local processor can have more than one sub-domain)
* \return the sub-domain
* \param lc (each processor can have more than one sub-domain)
*
* \return the sub-domain extended
*
*/
SpaceBox<dim, T> getSubDomainWithGhost(size_t lc)
......@@ -1182,7 +1233,7 @@ public:
*
* \warning if the particle id outside the domain the result is unreliable
*
* \param p object position
* \param pos object position
*
* \return true if it is local
*
......@@ -1194,11 +1245,12 @@ public:
/*! \brief Check if the particle is local considering boundary conditions
*
* \warning if the particle id outside the domain and non periodic the result
* \warning if the particle id outside the domain and non periodic boundary the result
* is unreliable
*
*
* \param p object position
* \param bc boundary conditions
*
* \return true if it is local
*
......@@ -1217,8 +1269,12 @@ public:
}
/*! \brief Check if the particle is local considering boundary conditions
*
* \warning if the particle id outside the domain and non periodic boundary the result
* is unreliable
*
* \param p object position
* \param bc boundary conditions
*
* \return true if it is local
*
......@@ -1251,6 +1307,8 @@ public:
/*! \brief Return the ghost
*
*
* \return the ghost extension
*
*/
const Ghost<dim,T> & getGhost() const
{
......@@ -1283,6 +1341,8 @@ public:
*
* \param output directory where to write the files
*
* \return true if the write succeed
*
*/
bool write(std::string output) const
{
......@@ -1358,7 +1418,9 @@ public:
/*! \brief Check if the CartDecomposition contain the same information
*
* \param ele Element to check
* \param cart Element to check with
*
* \return true if they are equal
*
*/
bool is_equal(CartDecomposition<dim,T,Memory> & cart)
......@@ -1402,7 +1464,9 @@ public:
/*! \brief Check if the CartDecomposition contain the same information with the exception of the ghost part
* It is anyway required that the ghost come from the same sub-domains decomposition
*
* \param ele Element to check
* \param cart Element to check with
*
* \return true if the two CartDecomposition are equal
*
*/
bool is_equal_ng(CartDecomposition<dim,T,Memory> & cart)
......@@ -1463,8 +1527,7 @@ public:
dist.setComputationCost(gid, c + i);
}
// friend classes
//! friend classes
friend extended_type;
};
......
......@@ -57,7 +57,6 @@ BOOST_AUTO_TEST_CASE( CartDecomposition_non_periodic_test)
// Vcluster
Vcluster & vcl = create_vcluster();
//! [Create CartDecomposition]
CartDecomposition<3, float> dec(vcl);
// Physical domain
......@@ -83,8 +82,6 @@ BOOST_AUTO_TEST_CASE( CartDecomposition_non_periodic_test)
dec.setParameters(div,box,bc,g);
dec.decompose();
//! [Create CartDecomposition]
// For each calculated ghost box
for (size_t i = 0; i < dec.getNIGhostBox(); i++)
{
......@@ -244,7 +241,6 @@ BOOST_AUTO_TEST_CASE( CartDecomposition_ext_non_periodic_test)
// Vcluster
Vcluster & vcl = create_vcluster();
//! [Create CartDecomposition]
CartDecomposition<3,float> dec(vcl);
// Physical domain
......
......@@ -93,9 +93,9 @@ BOOST_AUTO_TEST_CASE( nn_processor_np_test)
if (v_cl.getProcessUnitID() == 0)
{
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(1),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(2),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(3),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(1),1ul);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(2),1ul);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(3),1ul);
const openfpm::vector< ::Box<2,float> > & nsubs1 = nnp.getNearSubdomains(1);
const openfpm::vector< ::Box<2,float> > & nsubs2 = nnp.getNearSubdomains(2);
......@@ -120,9 +120,9 @@ BOOST_AUTO_TEST_CASE( nn_processor_np_test)
}
else if (v_cl.getProcessUnitID() == 1)
{
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(0),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(2),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(3),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(0),1ul);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(2),1ul);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(3),1ul);
const openfpm::vector< ::Box<2,float> > & nsubs1 = nnp.getNearSubdomains(0);
const openfpm::vector< ::Box<2,float> > & nsubs2 = nnp.getNearSubdomains(2);
......@@ -147,9 +147,9 @@ BOOST_AUTO_TEST_CASE( nn_processor_np_test)
}
else if (v_cl.getProcessUnitID() == 2)
{
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(1),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(0),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(3),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(1),1ul);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(0),1ul);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(3),1ul);
const openfpm::vector< ::Box<2,float> > & nsubs1 = nnp.getNearSubdomains(1);
const openfpm::vector< ::Box<2,float> > & nsubs2 = nnp.getNearSubdomains(0);
......@@ -173,9 +173,9 @@ BOOST_AUTO_TEST_CASE( nn_processor_np_test)
}
else if (v_cl.getProcessUnitID() == 3)
{
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(0),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(1),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(2),1);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(0),1ul);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(1),1ul);
BOOST_REQUIRE_EQUAL(nnp.getNRealSubdomains(2),1ul);
const openfpm::vector< ::Box<2,float> > & nsubs1 = nnp.getNearSubdomains(0);
const openfpm::vector< ::Box<2,float> > & nsubs2 = nnp.getNearSubdomains(1);
......@@ -250,9 +250,9 @@ BOOST_AUTO_TEST_CASE( nn_processor_box_periodic_test)
if (v_cl.getProcessUnitID() == 0)
{
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(1).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(2).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(3).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(1).size(),4ul);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(2).size(),4ul);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(3).size(),4ul);
openfpm::vector<Box<2,float>> bv;
......@@ -286,9 +286,9 @@ BOOST_AUTO_TEST_CASE( nn_processor_box_periodic_test)
}
else if (v_cl.getProcessUnitID() == 1)
{
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(0).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(2).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(3).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(0).size(),4ul);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(2).size(),4ul);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(3).size(),4ul);
openfpm::vector<Box<2,float>> bv;
......@@ -322,9 +322,9 @@ BOOST_AUTO_TEST_CASE( nn_processor_box_periodic_test)
}
else if (v_cl.getProcessUnitID() == 2)
{
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(0).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(1).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(3).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(0).size(),4ul);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(1).size(),4ul);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(3).size(),4ul);
openfpm::vector<Box<2,float>> bv;
......@@ -358,9 +358,9 @@ BOOST_AUTO_TEST_CASE( nn_processor_box_periodic_test)
}
else if (v_cl.getProcessUnitID() == 3)
{
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(0).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(1).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(2).size(),4);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(0).size(),4ul);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(1).size(),4ul);
BOOST_REQUIRE_EQUAL(nnp.getNearSubdomains(2).size(),4ul);
openfpm::vector<Box<2,float>> bv;
......
......@@ -116,9 +116,11 @@ class grid_dist_iterator<dim,device_grid,FREE>
/*! \brief Constructor of the distributed grid iterator
*
* \param gk std::vector of the local grid
* \param gdb_ext set of local subdomains
* \param stop end point
*
*/
grid_dist_iterator(const openfpm::vector<device_grid> & gk, const openfpm::vector<GBoxes<device_grid::dims>> & gdb_ext, grid_key_dx<dim> stop)
grid_dist_iterator(const openfpm::vector<device_grid> & gk, const openfpm::vector<GBoxes<device_grid::dims>> & gdb_ext, const grid_key_dx<dim> & stop)
:g_c(0),gList(gk),gdb_ext(gdb_ext),stop(stop)
{
// Initialize the current iterator
......
......@@ -390,11 +390,131 @@ void Test2D(const Box<2,float> & domain, long int k)
// In this case the boundary condition are non periodic
if (g_dist.isInside(key_g))
{
match &= (g_dist.template get<0>(key),info.LinId(key_g));
match &= (g_dist.template get<0>(key) == info.LinId(key_g))?true:false;
}
++domg;
}
BOOST_REQUIRE_EQUAL(match,true);
}
}
void Test1D(const Box<1,float> & domain, long int k)
{
Vcluster & v_cl = create_vcluster();
long int big_step = k / 30;
big_step = (big_step == 0)?1:big_step;
long int small_step = 21;
if (v_cl.getProcessingUnits() > 48)
return;
print_test( "Testing 1D grid k<=",k);
// 1D test
for ( ; k >= 2 ; k-= (k > 2*big_step)?big_step:small_step )
{
BOOST_TEST_CHECKPOINT( "Testing 1D grid k=" << k );
//! [Create and access a distributed grid]
// grid size
size_t sz[1];
sz[0] = k;
float factor = pow(create_vcluster().getProcessingUnits()/2.0f,1.0f);
// Ghost
Ghost<1,float> g(0.01 / factor);
// Distributed grid with id decomposition
grid_dist_id<1, float, scalar<float>> g_dist(sz,domain,g);
// check the consistency of the decomposition
bool val = g_dist.getDecomposition().check_consistency();
BOOST_REQUIRE_EQUAL(val,true);
// Grid sm
grid_sm<1,void> info(sz);
// get the domain iterator
size_t count = 0;
auto dom = g_dist.getDomainIterator();
while (dom.isNext())
{
auto key = dom.get();
auto key_g = g_dist.getGKey(key);
g_dist.template get<0>(key) = info.LinId(key_g);
// Count the point
count++;
++dom;
}
//! [Create and access a distributed grid]
// Get the virtual cluster machine
Vcluster & vcl = g_dist.getVC();
// reduce
vcl.sum(count);
vcl.execute();
// Check
BOOST_REQUIRE_EQUAL(count,(size_t)k);
auto dom2 = g_dist.getDomainIterator();
grid_key_dx<1> start = dom2.getStart();
grid_key_dx<1> stop = dom2.getStop();
BOOST_REQUIRE_EQUAL((long int)stop.get(0),(long int)g_dist.size(0)-1);
BOOST_REQUIRE_EQUAL(start.get(0),0);
bool match = true;
// check that the grid store the correct information
while (dom2.isNext())
{
auto key = dom2.get();
auto key_g = g_dist.getGKey(key);
match &= (g_dist.template get<0>(key) == info.LinId(key_g))?true:false;
++dom2;
}
BOOST_REQUIRE_EQUAL(match,true);
g_dist.template ghost_get<0>();
// check that the communication is correctly completed
auto domg = g_dist.getDomainGhostIterator();
// check that the grid with the ghost past store the correct information
while (domg.isNext())
{
auto key = domg.get();
auto key_g = g_dist.getGKey(key);
// In this case the boundary condition are non periodic
if (g_dist.isInside(key_g))
{
match &= (g_dist.template get<0>(key) == info.LinId(key_g))?true:false;
}
++domg;
}
BOOST_REQUIRE_EQUAL(match,true);
}
}
......@@ -1770,6 +1890,16 @@ BOOST_AUTO_TEST_CASE( grid_dist_id_copy )
Test_grid_copy(domain3,k);
}
BOOST_AUTO_TEST_CASE( grid_1d_test )
{
// Domain
Box<1,float> domain1({-1.0},{1.0});