Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Uzerbinati/fenicsx #8

Merged
merged 5 commits into from
Oct 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 29 additions & 3 deletions .github/workflows/ngsPETSc.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# .github/workflows/app.yaml
name: ngsPETSc
name: ngsPETSc tests
on:
push:
branches-ignore:
Expand All @@ -9,10 +9,10 @@ on:
- cron: '30 10 7,14,21,28 * *'

jobs:
ngsolve_tests:
ngsolve:
runs-on: ubuntu-latest
container: urzerbinati/ngspetsc:latest
timeout-minutes: 20
timeout-minutes: 30

steps:
- name: Check out repository code
Expand Down Expand Up @@ -48,3 +48,29 @@ jobs:
mpirun --allow-run-as-root -n 2 pytest -v --with-mpi tests/test_pc.py
mpirun --allow-run-as-root -n 2 pytest -v --with-mpi tests/test_eps.py
mpirun --allow-run-as-root -n 2 pytest -v --with-mpi tests/test_snes.py

fenicsx:
runs-on: ubuntu-latest
container: dolfinx/dolfinx:latest
timeout-minutes: 30

steps:
- name: Check out repository code
uses: actions/checkout@v2

- name: Install Netgen and ngsPETSc
run: |
pip3 install netgen-mesher \
&& pip3 install pylint \
&& export PYTHONPATH=$PYTHONPATH:/usr/local/lib/python3.10/site-packages \
&& echo "PYTHONPATH=$PYTHONPATH" >> $GITHUB_ENV \
&& NGSPETSC_NO_INSTALL_REQUIRED=ON pip install .

- name: Check formatting
run: |
make lint
make lint_test

- name: Run test suite in serial
run: |
pytest -v tests/test_fenicsx.py
8 changes: 5 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
MPI_EXEC = ${PETSC_DIR}/${PETSC_ARCH}/bin/mpiexec
lint:
pylint --disable=C0412,C0103,C0415,C0321,E1101,E0611,R1736,R0401,R0801,R0902,R1702,R0913,R0914,R0903,R0205,R0912,R0915,I1101,W0201,C0209 --variable-naming-style=camelCase --class-naming-style=PascalCase --argument-naming-style=camelCase --attr-naming-style=camelCase ngsPETSc
pylint --disable=C0412,C0103,C0415,C0321,C3001,E1101,E0611,R1736,R0401,R0801,R0902,R1702,R0913,R0914,R0903,R0205,R0912,R0915,I1101,W0201,W0406,W0212,C0209 --variable-naming-style=camelCase --class-naming-style=PascalCase --argument-naming-style=camelCase --attr-naming-style=camelCase ngsPETSc/utils
pylint --disable=C0412,C0103,C0415,C0321,E0401,E1101,E0611,R1736,R0401,R0801,R0902,R1702,R0913,R0914,R0903,R0205,R0912,R0915,I1101,W0201,C0209 --variable-naming-style=camelCase --class-naming-style=PascalCase --argument-naming-style=camelCase --attr-naming-style=camelCase ngsPETSc
pylint --disable=C0412,C0103,C0415,C0321,C3001,E0401,E1101,E0611,R1736,R0401,R0801,R0902,R1702,R0913,R0914,R0903,R0205,R0912,R0915,I1101,W0201,W0406,W0212,C0209 --variable-naming-style=camelCase --class-naming-style=PascalCase --argument-naming-style=camelCase --attr-naming-style=camelCase ngsPETSc/utils
lint_test:
pylint --disable=C0412,C0103,C0415,C0321,E1101,E0611,R1736,R0401,R0914,R0801,R0902,R1702,R0913,R0903,R0205,R0912,R0915,I1101,W0201,C0209 --variable-naming-style=camelCase --class-naming-style=PascalCase --argument-naming-style=camelCase --attr-naming-style=camelCase tests
pylint --disable=C0412,C0103,C0415,C0321,E0401,E1101,E0611,R1736,R0401,R0914,R0801,R0902,R1702,R0913,R0903,R0205,R0912,R0915,I1101,W0201,C0209 --variable-naming-style=camelCase --class-naming-style=PascalCase --argument-naming-style=camelCase --attr-naming-style=camelCase tests
test:
pytest tests/test_env.py
pytest tests/test_vec.py
Expand All @@ -13,6 +13,7 @@ test:
pytest tests/test_pc.py
pytest tests/test_eps.py
pytest tests/test_snes.py
pytest tests/test_fenicsx.py
test_mpi:
$(MPI_EXEC) --allow-run-as-root -n 2 pytest --with-mpi tests/test_env.py
$(MPI_EXEC) --allow-run-as-root -n 2 pytest --with-mpi tests/test_vec.py
Expand All @@ -22,6 +23,7 @@ test_mpi:
$(MPI_EXEC) --allow-run-as-root -n 2 pytest --with-mpi tests/test_pc.py
$(MPI_EXEC) --allow-run-as-root -n 2 pytest --with-mpi tests/test_eps.py
$(MPI_EXEC) --allow-run-as-root -n 2 pytest --with-mpi tests/test_snes.py
$(MPI_EXEC) --allow-run-as-root -n 2 pytest --with-mpi tests/test_fenicsx.py
doc:
rm docs/src/notebooks/*.rst
jupyter nbconvert --to rst docs/src/notebooks/*.ipynb
Expand Down
25 changes: 23 additions & 2 deletions ngsPETSc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,34 @@
ngsPETSc is a NGSolve/Netgen interface to PETSc
'''
import warnings

from ngsPETSc.plex import *
from ngsPETSc.utils.firedrake import *

#Firedrake
try:
import firedrake
except ImportError:
firedrake = None

if firedrake:
from ngsPETSc.utils.firedrake import *

#FEniCSx
try:
import dolfinx
except ImportError:
dolfinx = None

if dolfinx:
from ngsPETSc.utils.fenicsx import *

#Netgen
try:
import ngsolve
except ImportError:
warnings.warn("No NGSolve installed, only working with Netgen.")
ngsolve = None

if ngsolve:
from ngsPETSc.mat import *
from ngsPETSc.vec import *
Expand All @@ -17,6 +38,6 @@
from ngsPETSc.eps import *
from ngsPETSc.snes import *

VERSION = "0.0.3"
VERSION = "0.0.4"

__all__ = ["Matrix","VectorMapping","MeshMapping","KrylovSolver","EigenSolver","FiredrakeMesh"]
81 changes: 81 additions & 0 deletions ngsPETSc/utils/fenicsx.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
'''
This module contains all the functions related to wrapping NGSolve meshes to FEniCSx
We adopt the same docstring conventiona as the FEniCSx project, since this part of
the package will only be used in combination with FEniCSx.
'''
import typing
import dolfinx
import numpy as np

from mpi4py import MPI as _MPI

from ngsPETSc import MeshMapping

# Map from Netgen cell type (integer tuple) to GMSH cell type
_ngs_to_cells = {(2,3): 2, (2,4):3, (3,4): 4}

class GeometricModel:
"""
This class is used to wrap a Netgen geometric model to a DOLFINx mesh.
Args:
geo: The Netgen model
comm: The MPI communicator to use for mesh creation
"""
def __init__(self,geo, comm: _MPI.Comm):
self.geo = geo
self.comm = comm

def model_to_mesh(self, hmax: float, gdim: int = 2,
partitioner: typing.Callable[
[_MPI.Comm, int, int, dolfinx.cpp.graph.AdjacencyList_int32],
dolfinx.cpp.graph.AdjacencyList_int32] =
dolfinx.mesh.create_cell_partitioner(dolfinx.mesh.GhostMode.none),
transform: typing.Any = None, routine: typing.Any = None) -> typing.Tuple[dolfinx.mesh.Mesh,
dolfinx.cpp.mesh.MeshTags_int32,dolfinx.cpp.mesh.MeshTags_int32]:
"""Given a NetGen model, take all physical entities of the highest
topological dimension and create the corresponding DOLFINx mesh.

This function only works in serial, at the moment.

Args:
hmax: The maximum diameter of the elements in the triangulation
model: The NetGen model
gdim: Geometrical dimension of the mesh
partitioner: Function that computes the parallel
distribution of cells across MPI ranks
transform: PETSc DMPLEX Transformation to be applied to the mesh
routine: Function to be applied to the mesh after generation
takes as plan the mesh and the NetGen model and returns the
same objects after the routine has been applied.

Returns:
A DOLFINx mesh for the given NetGen model.
"""
# First we generate a mesh
ngmesh = self.geo.GenerateMesh(maxh=hmax)
# Apply any ngs routine post meshing
if routine is not None:
ngmesh, self.geo = routine(ngmesh, self.geo)
# Applying any PETSc Transform
if transform is not None:
meshMap = MeshMapping(ngmesh)
transform.setDM(meshMap.plex)
transform.setUp()
newplex = transform.apply(meshMap.plex)
meshMap = MeshMapping(newplex)
ngmesh = meshMap.ngmesh
# We extract topology and geometry
if ngmesh.dim == 2:
V = ngmesh.Coordinates()
T = ngmesh.Elements2D().NumPy()["nodes"]
T = np.array([list(np.trim_zeros(a, 'b')) for a in list(T)])-1
elif ngmesh.dim == 3:
V = ngmesh.Coordinates()
T = ngmesh.Elements3D().NumPy()["nodes"]
T = np.array([list(np.trim_zeros(a, 'b')) for a in list(T)])-1
ufl_domain = dolfinx.io.gmshio.ufl_mesh(_ngs_to_cells[(gdim,T.shape[1])],gdim)
cell_perm = dolfinx.cpp.io.perm_gmsh(dolfinx.cpp.mesh.to_type(str(ufl_domain.ufl_cell())),
T.shape[1])
T = T[:, cell_perm]
mesh = dolfinx.mesh.create_mesh(self.comm, T, V, ufl_domain, partitioner)
return mesh
2 changes: 2 additions & 0 deletions ngsPETSc/utils/firedrake.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
'''
This module contains all the functions related to wrapping NGSolve meshes to Firedrake
We adopt the same docstring conventiona as the Firedrake project, since this part of
the package will only be used in combination with Firedrake.
'''
try:
import firedrake as fd
Expand Down
2 changes: 1 addition & 1 deletion tests/test_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,4 @@ def test_ngsPETSc():
'''
Testing that ngsPETSc can be imported correctly
'''
assert ngsPETSc.VERSION == "0.0.3"
assert ngsPETSc.VERSION == "0.0.4"
68 changes: 68 additions & 0 deletions tests/test_fenicsx.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
'''
This module test the utils.fenicsx class
'''
import pytest

def test_square_netgen():
'''
Testing FEniCSx interface with Netgen generating a square mesh
'''
try:
from mpi4py import MPI
import ngsPETSc.utils.fenicsx as ngfx
from dolfinx.io import XDMFFile
except ImportError:
pytest.skip(msg="DOLFINx unavailable, skipping FENICSx test")

from netgen.geom2d import SplineGeometry
geo = SplineGeometry()
geo.AddRectangle((0,0),(1,1))
geoModel = ngfx.GeometricModel(geo, MPI.COMM_WORLD)
domain =geoModel.model_to_mesh(hmax=0.1)
with XDMFFile(domain.comm, "XDMF/mesh.xdmf", "w") as xdmf:
xdmf.write_mesh(domain)

def test_poisson_netgen():
'''
Testing FEniCSx interface with Netgen generating a square mesh
'''
try:
import numpy as np
import ufl
from dolfinx import fem, mesh
from dolfinx.fem.petsc import LinearProblem
from ufl import dx, grad, inner
from mpi4py import MPI
from petsc4py.PETSc import ScalarType
import ngsPETSc.utils.fenicsx as ngfx
except ImportError:
pytest.skip(msg="DOLFINx unavailable, skipping FENICSx test")

from netgen.geom2d import SplineGeometry
geo = SplineGeometry()
geo.AddRectangle((0,0),(np.pi,np.pi))
geoModel = ngfx.GeometricModel(geo, MPI.COMM_WORLD)
msh =geoModel.model_to_mesh(hmax=0.1)
V = fem.FunctionSpace(msh, ("Lagrange", 2))
facetsLR = mesh.locate_entities_boundary(msh, dim=(msh.topology.dim - 1),
marker=lambda x: np.logical_or(np.isclose(x[0], 0.0),
np.isclose(x[0], np.pi)))
facetsTB = mesh.locate_entities_boundary(msh, dim=(msh.topology.dim - 1),
marker=lambda x: np.logical_or(np.isclose(x[1], 0.0),
np.isclose(x[1], np.pi)))
facets = np.append(facetsLR,facetsTB)
dofs = fem.locate_dofs_topological(V=V, entity_dim=1, entities=facets)
bc = fem.dirichletbc(value=ScalarType(0), dofs=dofs, V=V)
u = ufl.TrialFunction(V)
v = ufl.TestFunction(V)
x = ufl.SpatialCoordinate(msh)
f = ufl.exp(ufl.sin(x[0])*ufl.sin(x[1]))
a = inner(grad(u), grad(v)) * dx
L = inner(f, v) * dx
problem = LinearProblem(a, L, bcs=[bc],
petsc_options={"ksp_type": "cg", "pc_type": "qr"})
problem.solve()

if __name__ == "__main__":
test_square_netgen()
test_poisson_netgen()
Loading