diff --git a/cpp/dolfinx/io/XDMFFile.cpp b/cpp/dolfinx/io/XDMFFile.cpp index 25226a0166..f91f3eb1a9 100644 --- a/cpp/dolfinx/io/XDMFFile.cpp +++ b/cpp/dolfinx/io/XDMFFile.cpp @@ -191,7 +191,7 @@ XDMFFile::read_mesh(const fem::CoordinateElement& element, // Create mesh const std::vector& _x = std::get>(x); mesh::Mesh mesh - = mesh::create_mesh(_comm.comm(), cells, element, _x, xshape, mode); + = mesh::create_mesh(_comm.comm(), cells, element, _x, xshape, xshape[1], mode); mesh.name = name; return mesh; } diff --git a/cpp/dolfinx/mesh/Geometry.h b/cpp/dolfinx/mesh/Geometry.h index 30784a7285..fea83534fa 100644 --- a/cpp/dolfinx/mesh/Geometry.h +++ b/cpp/dolfinx/mesh/Geometry.h @@ -64,6 +64,8 @@ class Geometry _input_global_indices(std::forward(input_global_indices)) { assert(_x.size() % 3 == 0); + std::cout << _x.size() << std::endl; + std::cout << _input_global_indices.size() << std::endl; if (_x.size() / 3 != _input_global_indices.size()) throw std::runtime_error("Geometry size mis-match"); } @@ -94,6 +96,8 @@ class Geometry _input_global_indices(std::forward(input_global_indices)) { assert(_x.size() % 3 == 0); + std::cout << _x.size() << std::endl; + std::cout << _input_global_indices.size() << std::endl; if (_x.size() / 3 != _input_global_indices.size()) throw std::runtime_error("Geometry size mis-match"); } @@ -254,7 +258,8 @@ Geometry(std::shared_ptr, U, /// rank_offset`, where `i` is the local row index in `x` and /// `rank_offset` is the sum of `x` rows on all processed with a lower /// rank than the caller. -/// @param[in] dim Geometric dimension (1, 2, or 3). +/// @param[in] xshape The shape of `x`. +/// @param[in] gdim Euclidean dimension of ambient space. /// @param[in] reorder_fn Function for re-ordering the degree-of-freedom /// map associated with the geometry data. /// @note Experimental new interface for multiple cmap/dofmap @@ -266,7 +271,7 @@ create_geometry( const std::vector>>& elements, std::span nodes, std::span xdofs, - const U& x, int dim, + const U& x, std::array xshape, int gdim, std::function(const graph::AdjacencyList&)> reorder_fn = nullptr) @@ -338,20 +343,17 @@ create_geometry( [&nodes](auto index) { return nodes[index]; }); // Build coordinate dof array, copying coordinates to correct position - assert(x.size() % dim == 0); - const std::size_t shape0 = x.size() / dim; - const std::size_t shape1 = dim; - std::vector xg(3 * shape0, 0); - for (std::size_t i = 0; i < shape0; ++i) + std::vector xg(3 * xshape[0], 0); + for (std::size_t i = 0; i < xshape[0]; ++i) { - std::copy_n(std::next(x.begin(), shape1 * l2l[i]), shape1, + std::copy_n(std::next(x.begin(), xshape[1] * l2l[i]), xshape[1], std::next(xg.begin(), 3 * i)); } spdlog::info("Creating geometry with {} dofmaps", dof_layouts.size()); return Geometry(dof_index_map, std::move(dofmaps), elements, std::move(xg), - dim, std::move(igi)); + gdim, std::move(igi)); } /// @brief Build Geometry from input data. @@ -373,7 +375,8 @@ create_geometry( /// rank_offset`, where `i` is the local row index in `x` and /// `rank_offset` is the sum of `x` rows on all processed with a lower /// rank than the caller. -/// @param[in] dim Geometric dimension (1, 2, or 3). +/// @param[in] xshape Shape of the `x` data. +/// @param[in] gdim Geometric dimension of the ambient space. /// @param[in] reorder_fn Function for re-ordering the degree-of-freedom /// map associated with the geometry data. /// @return A mesh geometry. @@ -384,7 +387,7 @@ create_geometry( const fem::CoordinateElement< std::remove_reference_t>& element, std::span nodes, std::span xdofs, - const U& x, int dim, + const U& x, std::array xshape, int gdim, std::function(const graph::AdjacencyList&)> reorder_fn = nullptr) @@ -430,18 +433,16 @@ create_geometry( [&nodes](auto index) { return nodes[index]; }); // Build coordinate dof array, copying coordinates to correct position - assert(x.size() % dim == 0); - const std::size_t shape0 = x.size() / dim; - const std::size_t shape1 = dim; - std::vector xg(3 * shape0, 0); - for (std::size_t i = 0; i < shape0; ++i) + assert(x.size() % xshape[1] == 0); + std::vector xg(3 * xshape[0], 0); + for (std::size_t i = 0; i < xshape[0]; ++i) { - std::copy_n(std::next(x.cbegin(), shape1 * l2l[i]), shape1, + std::copy_n(std::next(x.cbegin(), xshape[1] * l2l[i]), xshape[1], std::next(xg.begin(), 3 * i)); } return Geometry(dof_index_map, std::move(dofmaps.front()), {element}, - std::move(xg), dim, std::move(igi)); + std::move(xg), gdim, std::move(igi)); } } // namespace dolfinx::mesh diff --git a/cpp/dolfinx/mesh/generation.h b/cpp/dolfinx/mesh/generation.h index ba2fb2fd18..9f60470dae 100644 --- a/cpp/dolfinx/mesh/generation.h +++ b/cpp/dolfinx/mesh/generation.h @@ -42,13 +42,13 @@ create_interval_cells(std::array p, std::int64_t n); template Mesh build_tri(MPI_Comm comm, std::array, 2> p, - std::array n, + std::array n, int gdim, const CellPartitionFunction& partitioner, DiagonalType diagonal); template Mesh build_quad(MPI_Comm comm, const std::array, 2> p, - std::array n, + std::array n, int gdim, const CellPartitionFunction& partitioner); template @@ -161,6 +161,7 @@ Mesh create_box(MPI_Comm comm, std::array, 2> p, /// @param[in] p Bottom-left and top-right corners of the rectangle. /// @param[in] n Number of cells in each direction. /// @param[in] celltype Cell shape. +/// @param[in] gdim Geometric dimension of ambient space. /// @param[in] partitioner Partitioning function for distributing cells /// across MPI ranks. /// @param[in] diagonal Direction of diagonals @@ -169,6 +170,7 @@ template Mesh create_rectangle(MPI_Comm comm, std::array, 2> p, std::array n, CellType celltype, CellPartitionFunction partitioner, + int gdim = 2, DiagonalType diagonal = DiagonalType::right) { if (std::ranges::any_of(n, [](auto e) { return e < 1; })) @@ -186,9 +188,9 @@ Mesh create_rectangle(MPI_Comm comm, std::array, 2> p, switch (celltype) { case CellType::triangle: - return impl::build_tri(comm, p, n, partitioner, diagonal); + return impl::build_tri(comm, p, n, gdim, partitioner, diagonal); case CellType::quadrilateral: - return impl::build_quad(comm, p, n, partitioner); + return impl::build_quad(comm, p, n, gdim, partitioner); default: throw std::runtime_error("Generate rectangle mesh. Wrong cell type"); } @@ -206,14 +208,16 @@ Mesh create_rectangle(MPI_Comm comm, std::array, 2> p, /// @param[in] p Two corner points /// @param[in] n Number of cells in each direction /// @param[in] celltype Cell shape +/// @param[in] gdim Geometric dimension of ambient space /// @param[in] diagonal Direction of diagonals /// @return Mesh template Mesh create_rectangle(MPI_Comm comm, std::array, 2> p, std::array n, CellType celltype, + int gdim = 2, DiagonalType diagonal = DiagonalType::right) { - return create_rectangle(comm, p, n, celltype, nullptr, diagonal); + return create_rectangle(comm, p, n, celltype, nullptr, gdim, diagonal); } /// @brief Interval mesh of the 1D line `[a, b]`. @@ -225,12 +229,13 @@ Mesh create_rectangle(MPI_Comm comm, std::array, 2> p, /// @param[in] comm MPI communicator to build the mesh on. /// @param[in] n Number of cells. /// @param[in] p End points of the interval. +/// @param[in] gdim Geometric dimension of ambient space /// @param[in] ghost_mode ghost mode of the created mesh, defaults to none /// @param[in] partitioner Partitioning function for distributing cells /// across MPI ranks. /// @return A mesh. template -Mesh create_interval(MPI_Comm comm, std::int64_t n, std::array p, +Mesh create_interval(MPI_Comm comm, std::int64_t n, std::array p, int gdim = 1, mesh::GhostMode ghost_mode = mesh::GhostMode::none, CellPartitionFunction partitioner = nullptr) { @@ -254,12 +259,12 @@ Mesh create_interval(MPI_Comm comm, std::int64_t n, std::array p, { auto [x, cells] = impl::create_interval_cells(p, n); return create_mesh(comm, MPI_COMM_SELF, cells, element, MPI_COMM_SELF, x, - {x.size(), 1}, partitioner); + {x.size(), 1}, gdim, partitioner); } else { return create_mesh(comm, MPI_COMM_NULL, {}, element, MPI_COMM_NULL, - std::vector{}, {0, 1}, partitioner); + std::vector{}, {0, 1}, gdim, partitioner); } } @@ -383,7 +388,7 @@ Mesh build_tet(MPI_Comm comm, MPI_Comm subcomm, } return create_mesh(comm, subcomm, cells, element, subcomm, x, - {x.size() / 3, 3}, partitioner); + {x.size() / 3, 3}, 3, partitioner); } template @@ -426,7 +431,7 @@ mesh::Mesh build_hex(MPI_Comm comm, MPI_Comm subcomm, } return create_mesh(comm, subcomm, cells, element, subcomm, x, - {x.size() / 3, 3}, partitioner); + {x.size() / 3, 3}, 3, partitioner); } template @@ -473,12 +478,12 @@ Mesh build_prism(MPI_Comm comm, MPI_Comm subcomm, } return create_mesh(comm, subcomm, cells, element, subcomm, x, - {x.size() / 3, 3}, partitioner); + {x.size() / 3, 3}, 3, partitioner); } template Mesh build_tri(MPI_Comm comm, std::array, 2> p, - std::array n, + std::array n, int gdim, const CellPartitionFunction& partitioner, DiagonalType diagonal) { @@ -624,18 +629,18 @@ Mesh build_tri(MPI_Comm comm, std::array, 2> p, } return create_mesh(comm, MPI_COMM_SELF, cells, element, MPI_COMM_SELF, x, - {x.size() / 2, 2}, partitioner); + {x.size() / 2, 2}, gdim, partitioner); } else { return create_mesh(comm, MPI_COMM_NULL, {}, element, MPI_COMM_NULL, - std::vector{}, {0, 2}, partitioner); + std::vector{}, {0, 2}, gdim, partitioner); } } template Mesh build_quad(MPI_Comm comm, const std::array, 2> p, - std::array n, + std::array n, int gdim, const CellPartitionFunction& partitioner) { fem::CoordinateElement element(CellType::quadrilateral, 1); @@ -673,12 +678,12 @@ Mesh build_quad(MPI_Comm comm, const std::array, 2> p, } return create_mesh(comm, MPI_COMM_SELF, cells, element, MPI_COMM_SELF, x, - {x.size() / 2, 2}, partitioner); + {x.size() / 2, 2}, gdim, partitioner); } else { return create_mesh(comm, MPI_COMM_NULL, {}, element, MPI_COMM_NULL, - std::vector{}, {0, 2}, partitioner); + std::vector{}, {0, 2}, gdim, partitioner); } } } // namespace impl diff --git a/cpp/dolfinx/mesh/utils.h b/cpp/dolfinx/mesh/utils.h index 1fa5724e39..e2cdc1afb5 100644 --- a/cpp/dolfinx/mesh/utils.h +++ b/cpp/dolfinx/mesh/utils.h @@ -776,6 +776,7 @@ compute_incident_entities(const Topology& topology, /// the process offset on`comm`, The offset is the sum of `x` rows on /// all processed with a lower rank than the caller. /// @param[in] xshape Shape of the `x` data. +/// @param[in] gdim Geometric dimension of ambient space. /// @param[in] partitioner Graph partitioner that computes the owning /// rank for each cell. If not callable, cells are not redistributed. /// @return A mesh distributed on the communicator `comm`. @@ -785,6 +786,7 @@ Mesh> create_mesh( const fem::CoordinateElement< typename std::remove_reference_t>& element, MPI_Comm commg, const U& x, std::array xshape, + int gdim, const CellPartitionFunction& partitioner) { CellType celltype = element.cell_shape(); @@ -909,7 +911,7 @@ Mesh> create_mesh( // Create geometry object Geometry geometry - = create_geometry(topology, element, nodes1, cells1, coords, xshape[1]); + = create_geometry(topology, element, nodes1, cells1, coords, xshape, gdim); return Mesh(comm, std::make_shared(std::move(topology)), std::move(geometry)); @@ -946,6 +948,7 @@ Mesh> create_mesh( /// the process offset on`comm`, The offset is the sum of `x` rows on /// all processed with a lower rank than the caller. /// @param[in] xshape Shape of the `x` data. +/// @param[in] gdim Geometric dimension of ambient space. /// @param[in] partitioner Graph partitioner that computes the owning /// rank for each cell. If not callable, cells are not redistributed. /// @return A mesh distributed on the communicator `comm`. @@ -956,6 +959,7 @@ Mesh> create_mesh( const std::vector>>& elements, MPI_Comm commg, const U& x, std::array xshape, + int gdim, const CellPartitionFunction& partitioner) { assert(cells.size() == elements.size()); @@ -1138,7 +1142,7 @@ Mesh> create_mesh( // Create geometry object Geometry geometry - = create_geometry(topology, elements, nodes1, nodes2, coords, xshape[1]); + = create_geometry(topology, elements, nodes1, nodes2, coords, xshape, gdim); return Mesh(comm, std::make_shared(std::move(topology)), std::move(geometry)); @@ -1159,7 +1163,8 @@ Mesh> create_mesh( /// @param[in] elements Coordinate elements for the cells. /// @param[in] x Geometry data ('node' coordinates). See ::create_mesh /// for a detailed description. -/// @param[in] xshape The shape of `x`. It should be `(num_points, gdim)`. +/// @param[in] xshape The shape of `x`. +/// @param[in] gdim Euclidean dimension of ambient space. /// @param[in] ghost_mode The requested type of cell ghosting/overlap /// @return A mesh distributed on the communicator `comm`. template @@ -1167,13 +1172,13 @@ Mesh> create_mesh(MPI_Comm comm, std::span cells, const fem::CoordinateElement< std::remove_reference_t>& elements, - const U& x, std::array xshape, GhostMode ghost_mode) + const U& x, std::array xshape, int gdim, GhostMode ghost_mode) { if (dolfinx::MPI::size(comm) == 1) - return create_mesh(comm, comm, cells, elements, comm, x, xshape, nullptr); + return create_mesh(comm, comm, cells, elements, comm, x, xshape, gdim, nullptr); else { - return create_mesh(comm, comm, cells, elements, comm, x, xshape, + return create_mesh(comm, comm, cells, elements, comm, x, xshape, gdim, create_cell_partitioner(ghost_mode)); } } diff --git a/cpp/dolfinx/refinement/refine.h b/cpp/dolfinx/refinement/refine.h index 430bcc495b..c7a0368033 100644 --- a/cpp/dolfinx/refinement/refine.h +++ b/cpp/dolfinx/refinement/refine.h @@ -72,7 +72,7 @@ refine(const mesh::Mesh& mesh, mesh::Mesh mesh1 = mesh::create_mesh( mesh.comm(), mesh.comm(), cell_adj.array(), mesh.geometry().cmap(), - mesh.comm(), new_vertex_coords, xshape, partitioner); + mesh.comm(), new_vertex_coords, xshape, mesh.geometry().dim(), partitioner); // Report the number of refined cells const int D = topology->dim(); diff --git a/cpp/test/mesh/distributed_mesh.cpp b/cpp/test/mesh/distributed_mesh.cpp index 07e8f5a4b1..0017100879 100644 --- a/cpp/test/mesh/distributed_mesh.cpp +++ b/cpp/test/mesh/distributed_mesh.cpp @@ -29,7 +29,7 @@ constexpr int N = 8; // Create mesh using all processes and save xdmf auto part = mesh::create_cell_partitioner(mesh::GhostMode::shared_facet); auto mesh = std::make_shared>( - mesh::create_rectangle(comm, {{{0.0, 0.0}, {1.0, 1.0}}}, {N, N}, + mesh::create_rectangle(comm, {{{0.0, 0.0}, {1.0, 1.0}}}, {N, N}, mesh::CellType::triangle, part)); // Save mesh in XDMF format @@ -139,7 +139,7 @@ void test_distributed_mesh(mesh::CellPartitionFunction partitioner) // Build mesh mesh::Mesh mesh = mesh::create_mesh(comm, subset_comm, cells, cmap, comm, x, - xshape, partitioner); + xshape, xshape[1], partitioner); auto t = mesh.topology(); int tdim = t->dim(); CHECK(t->index_map(tdim)->size_global() == 2 * N * N); diff --git a/cpp/test/mesh/generation.cpp b/cpp/test/mesh/generation.cpp index 93bfd9dd9b..9b87267e21 100644 --- a/cpp/test/mesh/generation.cpp +++ b/cpp/test/mesh/generation.cpp @@ -118,7 +118,7 @@ TEMPLATE_TEST_CASE("Interval mesh (parallel)", "[mesh][interval]", float, }; mesh::Mesh mesh = mesh::create_interval( - MPI_COMM_WORLD, 5 * comm_size - 1, {0., 1.}, ghost_mode, part); + MPI_COMM_WORLD, 5 * comm_size - 1, {0., 1.}, 1, ghost_mode, part); { int comp_result; @@ -304,7 +304,7 @@ TEMPLATE_TEST_CASE("Rectangle triangle mesh (right)", using T = TestType; mesh::Mesh mesh = dolfinx::mesh::create_rectangle( - MPI_COMM_SELF, {{{0, 0}, {1, 1}}}, {1, 1}, mesh::CellType::triangle, + MPI_COMM_SELF, {{{0, 0}, {1, 1}}}, {1, 1}, mesh::CellType::triangle, 2, mesh::DiagonalType::right); // vertex layout: @@ -345,7 +345,7 @@ TEMPLATE_TEST_CASE("Rectangle triangle mesh (left)", using T = TestType; mesh::Mesh mesh = dolfinx::mesh::create_rectangle( - MPI_COMM_SELF, {{{0, 0}, {1, 1}}}, {1, 1}, mesh::CellType::triangle, + MPI_COMM_SELF, {{{0, 0}, {1, 1}}}, {1, 1}, mesh::CellType::triangle, 2, mesh::DiagonalType::left); // vertex layout: @@ -387,7 +387,7 @@ TEMPLATE_TEST_CASE("Rectangle triangle mesh (crossed)", using T = TestType; mesh::Mesh mesh = dolfinx::mesh::create_rectangle( - MPI_COMM_SELF, {{{0, 0}, {1, 1}}}, {1, 1}, mesh::CellType::triangle, + MPI_COMM_SELF, {{{0, 0}, {1, 1}}}, {1, 1}, mesh::CellType::triangle, 2, mesh::DiagonalType::crossed); // vertex layout: diff --git a/cpp/test/mesh/refinement/interval.cpp b/cpp/test/mesh/refinement/interval.cpp index 1253fe00b0..0c884de98b 100644 --- a/cpp/test/mesh/refinement/interval.cpp +++ b/cpp/test/mesh/refinement/interval.cpp @@ -59,7 +59,7 @@ mesh::Mesh create_3_vertex_interval_mesh() v1[2], v2[0], v2[1], v2[2]}; fem::CoordinateElement element(mesh::CellType::interval, 1); return mesh::create_mesh(MPI_COMM_SELF, MPI_COMM_SELF, cells, element, - MPI_COMM_SELF, x, {x.size() / 3, 3}, + MPI_COMM_SELF, x, {x.size() / 3, 3}, 3, mesh::create_cell_partitioner()); } @@ -184,7 +184,7 @@ TEMPLATE_TEST_CASE("Interval Refinement (parallel)", MPI_Comm commt = rank == 0 ? MPI_COMM_SELF : MPI_COMM_NULL; return mesh::create_mesh(MPI_COMM_WORLD, commt, cells, element, commt, x, - {x.size() / 3, 3}, partitioner); + {x.size() / 3, 3}, 3, partitioner); }; mesh::Mesh mesh = create_mesh(); diff --git a/python/dolfinx/io/utils.py b/python/dolfinx/io/utils.py index 005e28385a..e929b545ad 100644 --- a/python/dolfinx/io/utils.py +++ b/python/dolfinx/io/utils.py @@ -262,7 +262,7 @@ def read_mesh( # Build the mesh cmap = _cpp.fem.CoordinateElement_float64(cell_shape, cell_degree) msh = _cpp.mesh.create_mesh( - self.comm, cells, cmap, x, _cpp.mesh.create_cell_partitioner(ghost_mode) + self.comm, cells, cmap, x, x.shape[1], _cpp.mesh.create_cell_partitioner(ghost_mode) ) msh.name = name domain = ufl.Mesh( diff --git a/python/dolfinx/mesh.py b/python/dolfinx/mesh.py index e58563a2bf..aab3842485 100644 --- a/python/dolfinx/mesh.py +++ b/python/dolfinx/mesh.py @@ -650,7 +650,7 @@ def create_mesh( x = np.asarray(x, dtype=dtype, order="C") cells = np.asarray(cells, dtype=np.int64, order="C") - msh = _cpp.mesh.create_mesh(comm, cells, cmap._cpp_object, x, partitioner) + msh = _cpp.mesh.create_mesh(comm, cells, cmap._cpp_object, x, gdim, partitioner) return Mesh(msh, domain) @@ -769,6 +769,7 @@ def create_interval( comm: _MPI.Comm, nx: int, points: npt.ArrayLike, + gdim: int = 1, dtype: npt.DTypeLike = default_real_type, ghost_mode=GhostMode.shared_facet, partitioner=None, @@ -779,8 +780,9 @@ def create_interval( comm: MPI communicator. nx: Number of cells. points: Coordinates of the end points. - dtype: Float type for the mesh geometry(``numpy.float32`` + dtype: Float type for the mesh geometry (``numpy.float32`` or ``numpy.float64``). + gdim: Geometric dimension of ambient space. ghost_mode: Ghost mode used in the mesh partitioning. Options are ``GhostMode.none`` and ``GhostMode.shared_facet``. partitioner: Partitioning function to use for determining the @@ -791,11 +793,11 @@ def create_interval( """ if partitioner is None and comm.size > 1: partitioner = _cpp.mesh.create_cell_partitioner(ghost_mode) - domain = ufl.Mesh(basix.ufl.element("Lagrange", "interval", 1, shape=(1,), dtype=dtype)) # type: ignore + domain = ufl.Mesh(basix.ufl.element("Lagrange", "interval", 1, shape=(gdim,), dtype=dtype)) # type: ignore if np.issubdtype(dtype, np.float32): - msh = _cpp.mesh.create_interval_float32(comm, nx, points, ghost_mode, partitioner) + msh = _cpp.mesh.create_interval_float32(comm, nx, points, gdim, ghost_mode, partitioner) elif np.issubdtype(dtype, np.float64): - msh = _cpp.mesh.create_interval_float64(comm, nx, points, ghost_mode, partitioner) + msh = _cpp.mesh.create_interval_float64(comm, nx, points, gdim, ghost_mode, partitioner) else: raise RuntimeError(f"Unsupported mesh geometry float type: {dtype}") @@ -806,6 +808,7 @@ def create_unit_interval( comm: _MPI.Comm, nx: int, dtype: npt.DTypeLike = default_real_type, + gdim: int = 1, ghost_mode=GhostMode.shared_facet, partitioner=None, ) -> Mesh: @@ -815,8 +818,9 @@ def create_unit_interval( comm: MPI communicator. nx: Number of cells. points: Coordinates of the end points. - dtype: Float type for the mesh geometry(``numpy.float32`` + dtype: Float type for the mesh geometry (``numpy.float32`` or ``numpy.float64``). + gdim: Geometric dimension of ambient space. ghost_mode: Ghost mode used in the mesh partitioning. Options are ``GhostMode.none`` and ``GhostMode.shared_facet``. partitioner: Partitioning function to use for determining the @@ -825,7 +829,9 @@ def create_unit_interval( Returns: A unit interval mesh with end points at 0 and 1. """ - return create_interval(comm, nx, [0.0, 1.0], dtype, ghost_mode, partitioner) + return create_interval( + comm, nx, [0.0, 1.0], dtype=dtype, gdim=gdim, ghost_mode=ghost_mode, partitioner=partitioner + ) def create_rectangle( @@ -834,6 +840,7 @@ def create_rectangle( n: npt.ArrayLike, cell_type=CellType.triangle, dtype: npt.DTypeLike = default_real_type, + gdim: int = 2, ghost_mode=GhostMode.shared_facet, partitioner=None, diagonal: DiagonalType = DiagonalType.right, @@ -846,25 +853,27 @@ def create_rectangle( the rectangle. n: Number of cells in each direction. cell_type: Mesh cell type. - dtype: Float type for the mesh geometry(``numpy.float32`` + dtype: Float type for the mesh geometry (``numpy.float32`` or ``numpy.float64``) + gdim: Geometric dimension of ambient space. ghost_mode: Ghost mode used in the mesh partitioning. partitioner: Function that computes the parallel distribution of cells across MPI ranks. - diagonal: Direction of diagonal of triangular meshes. The - options are ``left``, ``right``, ``crossed``, ``left / right``, - ``right / left``. - + diagonal: Direction of diagonal of triangular meshes. Returns: A mesh of a rectangle. """ if partitioner is None and comm.size > 1: partitioner = _cpp.mesh.create_cell_partitioner(ghost_mode) - domain = ufl.Mesh(basix.ufl.element("Lagrange", cell_type.name, 1, shape=(2,), dtype=dtype)) # type: ignore + domain = ufl.Mesh(basix.ufl.element("Lagrange", cell_type.name, 1, shape=(gdim,), dtype=dtype)) # type: ignore if np.issubdtype(dtype, np.float32): - msh = _cpp.mesh.create_rectangle_float32(comm, points, n, cell_type, partitioner, diagonal) + msh = _cpp.mesh.create_rectangle_float32( + comm, points, n, cell_type, gdim, partitioner, diagonal + ) elif np.issubdtype(dtype, np.float64): - msh = _cpp.mesh.create_rectangle_float64(comm, points, n, cell_type, partitioner, diagonal) + msh = _cpp.mesh.create_rectangle_float64( + comm, points, n, cell_type, gdim, partitioner, diagonal + ) else: raise RuntimeError(f"Unsupported mesh geometry float type: {dtype}") @@ -877,6 +886,7 @@ def create_unit_square( ny: int, cell_type=CellType.triangle, dtype: npt.DTypeLike = default_real_type, + gdim: int = 2, ghost_mode=GhostMode.shared_facet, partitioner=None, diagonal: DiagonalType = DiagonalType.right, @@ -890,6 +900,7 @@ def create_unit_square( cell_type: Mesh cell type. dtype: Float type for the mesh geometry(``numpy.float32`` or ``numpy.float64``). + gdim: Geometric dimension of ambient space. ghost_mode: Ghost mode used in the mesh partitioning. partitioner: Function that computes the parallel distribution of cells across MPI ranks. @@ -903,11 +914,12 @@ def create_unit_square( comm, [np.array([0.0, 0.0]), np.array([1.0, 1.0])], [nx, ny], - cell_type, - dtype, - ghost_mode, - partitioner, - diagonal, + cell_type=cell_type, + dtype=dtype, + gdim=gdim, + ghost_mode=ghost_mode, + partitioner=partitioner, + diagonal=diagonal, ) @@ -982,10 +994,10 @@ def create_unit_cube( comm, [np.array([0.0, 0.0, 0.0]), np.array([1.0, 1.0, 1.0])], [nx, ny, nz], - cell_type, - dtype, - ghost_mode, - partitioner, + cell_type=cell_type, + dtype=dtype, + ghost_mode=ghost_mode, + partitioner=partitioner, ) diff --git a/python/dolfinx/wrappers/mesh.cpp b/python/dolfinx/wrappers/mesh.cpp index 5cf66c9c6a..e6543be761 100644 --- a/python/dolfinx/wrappers/mesh.cpp +++ b/python/dolfinx/wrappers/mesh.cpp @@ -237,30 +237,30 @@ void declare_mesh(nb::module_& m, std::string type) std::string create_interval("create_interval_" + type); m.def( create_interval.c_str(), - [](MPICommWrapper comm, std::int64_t n, std::array p, + [](MPICommWrapper comm, std::int64_t n, std::array p, int gdim, dolfinx::mesh::GhostMode mode, const part::impl::PythonCellPartitionFunction& part) { return dolfinx::mesh::create_interval( - comm.get(), n, p, mode, + comm.get(), n, p, gdim, mode, part::impl::create_cell_partitioner_cpp(part)); }, - nb::arg("comm"), nb::arg("n"), nb::arg("p"), nb::arg("ghost_mode"), + nb::arg("comm"), nb::arg("n"), nb::arg("p"), nb::arg("gdim"), nb::arg("ghost_mode"), nb::arg("partitioner").none()); std::string create_rectangle("create_rectangle_" + type); m.def( create_rectangle.c_str(), [](MPICommWrapper comm, std::array, 2> p, - std::array n, dolfinx::mesh::CellType celltype, + std::array n, dolfinx::mesh::CellType celltype, int gdim, const part::impl::PythonCellPartitionFunction& part, dolfinx::mesh::DiagonalType diagonal) { return dolfinx::mesh::create_rectangle( comm.get(), p, n, celltype, - part::impl::create_cell_partitioner_cpp(part), diagonal); + part::impl::create_cell_partitioner_cpp(part), gdim, diagonal); }, - nb::arg("comm"), nb::arg("p"), nb::arg("n"), nb::arg("celltype"), + nb::arg("comm"), nb::arg("p"), nb::arg("n"), nb::arg("celltype"), nb::arg("gdim"), nb::arg("partitioner").none(), nb::arg("diagonal")); std::string create_box("create_box_" + type); @@ -284,6 +284,7 @@ void declare_mesh(nb::module_& m, std::string type) nb::c_contig>>& cells_nb, const std::vector>& elements, nb::ndarray x, + int gdim, const part::impl::PythonCellPartitionFunction& p) { std::size_t shape1 = x.ndim() == 1 ? 1 : x.shape(1); @@ -312,12 +313,12 @@ void declare_mesh(nb::module_& m, std::string type) }; return dolfinx::mesh::create_mesh( comm.get(), comm.get(), cells, elements, comm.get(), - std::span(x.data(), x.size()), {x.shape(0), shape1}, p_wrap); + std::span(x.data(), x.size()), {x.shape(0), shape1}, gdim, p_wrap); } else return dolfinx::mesh::create_mesh( comm.get(), comm.get(), cells, elements, comm.get(), - std::span(x.data(), x.size()), {x.shape(0), shape1}, nullptr); + std::span(x.data(), x.size()), {x.shape(0), shape1}, gdim, nullptr); }); m.def( @@ -326,6 +327,7 @@ void declare_mesh(nb::module_& m, std::string type) nb::ndarray, nb::c_contig> cells, const dolfinx::fem::CoordinateElement& element, nb::ndarray x, + int gdim, const part::impl::PythonCellPartitionFunction& p) { std::size_t shape1 = x.ndim() == 1 ? 1 : x.shape(1); @@ -349,18 +351,18 @@ void declare_mesh(nb::module_& m, std::string type) return dolfinx::mesh::create_mesh( comm.get(), comm.get(), std::span(cells.data(), cells.size()), element, comm.get(), std::span(x.data(), x.size()), - {x.shape(0), shape1}, p_wrap); + {x.shape(0), shape1}, gdim, p_wrap); } else { return dolfinx::mesh::create_mesh( comm.get(), comm.get(), std::span(cells.data(), cells.size()), element, comm.get(), std::span(x.data(), x.size()), - {x.shape(0), shape1}, nullptr); + {x.shape(0), shape1}, gdim, nullptr); } }, nb::arg("comm"), nb::arg("cells"), nb::arg("element"), - nb::arg("x").noconvert(), nb::arg("partitioner").none(), + nb::arg("x").noconvert(), nb::arg("gdim"), nb::arg("partitioner").none(), "Helper function for creating meshes."); m.def( "create_submesh", @@ -471,13 +473,13 @@ void declare_mesh(nb::module_& m, std::string type) const std::vector>& elements, nb::ndarray, nb::c_contig> nodes, nb::ndarray, nb::c_contig> xdofs, - nb::ndarray, nb::c_contig> x, int dim) + nb::ndarray, nb::c_contig> x, int gdim) { return dolfinx::mesh::create_geometry( topology, elements, std::span(nodes.data(), nodes.size()), std::span(xdofs.data(), xdofs.size()), - std::span(x.data(), x.size()), dim); + std::span(x.data(), x.size()), std::array{x.shape(0), x.shape(1)}, gdim); }); } diff --git a/python/test/unit/mesh/test_create_mixed_mesh.py b/python/test/unit/mesh/test_create_mixed_mesh.py index 83be54ae3b..9709f36834 100644 --- a/python/test/unit/mesh/test_create_mixed_mesh.py +++ b/python/test/unit/mesh/test_create_mixed_mesh.py @@ -87,6 +87,7 @@ def test_create_mixed_mesh(): cells_np, [hexahedron._cpp_object, pyramid._cpp_object, tetrahedron._cpp_object], geomx, + 3, part, ) diff --git a/python/test/unit/mesh/test_mesh.py b/python/test/unit/mesh/test_mesh.py index e620e1c056..38bfab1538 100644 --- a/python/test/unit/mesh/test_mesh.py +++ b/python/test/unit/mesh/test_mesh.py @@ -108,6 +108,7 @@ def mesh_2d(dtype): [1, 1], CellType.triangle, dtype, + 2, GhostMode.none, create_cell_partitioner(GhostMode.none), DiagonalType.left, @@ -174,6 +175,7 @@ def rectangle(): [5, 5], CellType.triangle, np.float64, + 2, GhostMode.none, ) diff --git a/python/test/unit/mesh/test_mixed_topology.py b/python/test/unit/mesh/test_mixed_topology.py index 842d28e467..31899e1260 100644 --- a/python/test/unit/mesh/test_mixed_topology.py +++ b/python/test/unit/mesh/test_mixed_topology.py @@ -54,7 +54,9 @@ def test_mixed_topology_mesh(): quad = coordinate_element(CellType.quadrilateral, 1) nodes = np.array([0, 1, 2, 3, 4, 5], dtype=np.int64) xdofs = np.array([0, 1, 2, 1, 2, 3, 2, 3, 4, 5], dtype=np.int64) - x = np.array([0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 2.0, 1.0, 2.0, 0.0], dtype=np.float64) + x = np.array( + [[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0], [2.0, 1.0], [2.0, 0.0]], dtype=np.float64 + ) geom = create_geometry(topology, [tri._cpp_object, quad._cpp_object], nodes, xdofs, x, 2) print(geom.x) print(geom.index_map().size_local) @@ -201,9 +203,7 @@ def test_parallel_mixed_mesh(): set_log_level(LogLevel.INFO) set_thread_name(str(rank)) - geom = create_geometry( - topology, [tri._cpp_object, quad._cpp_object], nodes, xdofs, x.flatten(), 2 - ) + geom = create_geometry(topology, [tri._cpp_object, quad._cpp_object], nodes, xdofs, x, 2) assert len(geom.dofmaps(0)) == 2 assert len(geom.dofmaps(1)) == 1