diff --git a/.bumpversion.cfg b/.bumpversion.cfg deleted file mode 100644 index b3bbe38..0000000 --- a/.bumpversion.cfg +++ /dev/null @@ -1,3 +0,0 @@ -[bumpversion] -current_version = 0.0.3 -files = setup.py pydiso/__init__.py diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index aca8f6b..0000000 --- a/.coveragerc +++ /dev/null @@ -1,5 +0,0 @@ -[run] -source = pydiso -plugins = Cython.Coverage -omit = - */setup.py diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000..b1a286b --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ +ref-names: $Format:%D$ \ No newline at end of file diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..e834cfc --- /dev/null +++ b/.gitattributes @@ -0,0 +1,11 @@ +.git_archival.txt export-subst +# Excluding files from an sdist generated by meson-python + +.azure-pipelines/* export-ignore +.ci/* export-ignore +tests/* export-ignore + +.flake8 export-ignore +.git* export-ignore +*.yml export-ignore +*.yaml export-ignore \ No newline at end of file diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index ca841fc..925d788 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -14,61 +14,71 @@ on: jobs: build-and-test: - name: Testing (${{ matrix.python-version }}, ${{ matrix.os }}) + name: Testing (Python ${{ matrix.python-version }}, on ${{ matrix.os }}, with MKL ${{ matrix.mkl-version }}) runs-on: ${{ matrix.os }} defaults: run: shell: bash -l {0} strategy: - fail-fast: True + fail-fast: False matrix: - os: [ubuntu-latest, macOS-latest, windows-latest] - python-version: ["3.8", "3.9", "3.10", "3.11"] + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + mkl-version: ['2023'] # currently 2024 fails building for some reason... + include: + - os: ubuntu-latest + python-version: "3.12" + coverage: ${{ true }} + - os: ubuntu-latest + python-version: "3.12" + mkl-version: '2024' + - os: windows-latest + python-version: "3.12" + mkl-version: '2024' steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: - auto-update-conda: true - activate-environment: dev python-version: ${{ matrix.python-version }} - - name: Install Env + mamba-version: '*' + channels: conda-forge, defaults + channel-priority: true + activate-environment: dev + + - name: Conda information run: | conda info conda list conda config --show - conda install --quiet --yes pip numpy scipy cython mkl pytest pytest-cov mkl-devel - - name: Install Our Package - if: ${{ matrix.os != 'windows-latest' }} + - name: Create environment run: | - echo $CONDA_PREFIX - export TEST_COV=1 - pip install -v -e . - conda list + mamba install --quiet --yes pip numpy scipy cython mkl=${{ matrix.mkl-version }} pytest \ + mkl-devel pkg-config meson-python meson ninja setuptools_scm \ + ${{ matrix.coverage && 'coverage' || ''}} - - name: Install Our Package on Windows - if: ${{ matrix.os == 'windows-latest' }} + - name: Install Our Package run: | - echo $CONDA_PREFIX - export MKLROOT="$CONDA_PREFIX\Library" - echo $MKLROOT - pip install -v -e . + python -m pip install --no-build-isolation --verbose --editable . \ + --config-setting=compile-args=-v \ + ${{ matrix.coverage && '--config-settings=setup-args="-Dcy_coverage=true"' || ''}} conda list - name: Run Tests run: | - pytest --cov-config=.coveragerc --cov-report=xml --cov=pydiso -s -v + ${{ matrix.coverage && 'coverage run -m' || '' }} pytest -s -v + ${{ matrix.coverage && 'coverage xml' || '' }} - name: Upload coverage - if: ${{ matrix.os == 'ubuntu-latest' }} and {{ matrix.python-version == '3.8' }} - uses: codecov/codecov-action@v2 + if: ${{ matrix.coverage }} + uses: codecov/codecov-action@v4 with: verbose: true # optional (default = false) distribute: - name: Distributing from 3.8 + name: Distributing from 3.11 needs: build-and-test if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') runs-on: ubuntu-latest @@ -77,29 +87,31 @@ jobs: shell: bash -l {0} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: - auto-update-conda: true + python-version: 3.11 + mamba-version: '*' + channels: conda-forge, defaults + channel-priority: true activate-environment: dev - python-version: 3.8 - name: Install Env run: | conda info conda list conda config --show - conda install --quiet --yes pip numpy scipy cython mkl mkl-devel - - name: Install Our Package + - name: Create environment run: | - echo $CONDA_PREFIX - pip install -v -e . + mamba install --quiet --yes pip numpy scipy cython mkl=2023 \ + mkl-devel pkg-config meson-python meson ninja setuptools_scm \ + python-build - name: Generate Source Distribution run: | - python setup.py sdist + python -m build --no-isolation --skip-dependency-check --sdist . - name: pypi-publish uses: pypa/gh-action-pypi-publish@v1.4.2 diff --git a/.gitignore b/.gitignore index f107869..72fc51b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,12 @@ build +dist +coverage_html_report *.so pydiso.egg-info __pycache__ mkl_solver.c +mkl_solver.c.dep .coverage coverage.xml + +.idea/ diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 9bdc9a3..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -include *.rst LICENSE -global-include *.pyx -global-include *.pxd diff --git a/README.md b/README.md index 814e90c..f003fee 100644 --- a/README.md +++ b/README.md @@ -1,36 +1,29 @@ # pydiso Pydiso is a python wrapper for the pardiso solver. It is currently implemented for the -Intel MKL's version of pardiso. It's goal is to expose the full functionality of pardiso +Intel MKL's version of pardiso. Its goal is to expose the full functionality of pardiso to python, allowing the user to customize it as necessary for their use cases. # Installation -### Installing from source +## Installing from source The wrapper is written in cython and links to the mkl libraries dynamically. Therefore, it needs to find the necessary header files associated with the MKL installation to compile. -For conda users, these headers can be installed with `mkl-devel` package that is available -on the default channel, conda-forge channel, the intel channel, or others, e.g. +The meson build backend uses pkg-config to identify the locations of the mkl header files +and library dynamic libraries. Most development installations of MKL should provide the +necessary pkg-config files for this. For example, conda users can be install the necessary +configuration information with `mkl-devel` package that is available on the default channel, +conda-forge channel, the intel channel, or others, e.g. `conda install mkl-devel` -Most of the time, your respective numpy installations will then be intelligent enough to -identify the location of the installed MKL. However if you run into issues automatically -finding the library headers, you will need to set the `MKLROOT` environment variable to -point to the correct location. On Linux and MacOS the `mkl-rt` library and `mkl.h` are -usually in the same folder, and thus the root should point to that directory. On Windows -the `mkl-rt.h` and `mkl-rt.lib` are not in the same folder but seperated a level into -`Library` and `Include` directories, and in this case `MKLROOT` would point to the folder -containing them. +If you have installed the configuration files to a non-standard location, you will need to set +`PKG_CONFIG_PATH` to point to that location. -After the necessary MKL files are accessible, you should be able to do the standard install -script common to python packages by running either -`python setup.py install` - -or, equivalently +After the necessary MKL files are accessible, you should be able to install by running `pip install .` diff --git a/meson.build b/meson.build new file mode 100644 index 0000000..3303d66 --- /dev/null +++ b/meson.build @@ -0,0 +1,50 @@ +project( + 'pydiso', + 'c', 'cython', + # Note that the git commit hash cannot be added dynamically here + # (it is dynamically generated though setuptools_scm) + version: run_command('python', + [ + '-c', + ''' +from setuptools_scm import get_version +print(get_version())''' + ], + check: true +).stdout().strip(), + + license: 'MIT', + meson_version: '>= 1.1.0', + default_options: [ + 'buildtype=debugoptimized', + 'b_ndebug=if-release', + ], +) + +# https://mesonbuild.com/Python-module.html +py_mod = import('python') +py = py_mod.find_installation(pure: false) +py_dep = py.dependency() + +cc = meson.get_compiler('c') +cy = meson.get_compiler('cython') +# generator() doesn't accept compilers, only found programs - cast it. +cython = find_program(cy.cmd_array()[0]) + +_global_c_args = cc.get_supported_arguments( + '-Wno-unused-but-set-variable', + '-Wno-unused-function', + '-Wno-conversion', + '-Wno-misleading-indentation', +) +add_project_arguments(_global_c_args, language : 'c') + +# We need -lm for all C code (assuming it uses math functions, which is safe to +# assume for SciPy). For C++ it isn't needed, because libstdc++/libc++ is +# guaranteed to depend on it. +m_dep = cc.find_library('m', required : false) +if m_dep.found() + add_project_link_arguments('-lm', language : 'c') +endif + +subdir('pydiso') \ No newline at end of file diff --git a/meson.options b/meson.options new file mode 100644 index 0000000..f3968b5 --- /dev/null +++ b/meson.options @@ -0,0 +1,10 @@ +option('cy_coverage', type : 'boolean', value : false) + +option('use-sdl', type: 'boolean', value: true, + description: 'Use the single dynamic library.') + +option('use-ilp64', type: 'boolean', value: false, + description: 'Use ILP64 (64-bit integer) BLAS and LAPACK interfaces') + +option('mkl-threading', type: 'string', value: 'auto', + description: 'MKL threading method, one of: `seq`, `iomp`, `gomp`, `tbb`') \ No newline at end of file diff --git a/pydiso/__init__.py b/pydiso/__init__.py index 759868c..4e72450 100644 --- a/pydiso/__init__.py +++ b/pydiso/__init__.py @@ -1,4 +1,19 @@ -__version__ = "0.0.3" __author__ = "SimPEG Team" __license__ = "MIT" __copyright__ = "2021, SimPEG Developers, http://simpeg.xyz" + +from importlib.metadata import version, PackageNotFoundError + +# Version +try: + # - Released versions just tags: 0.8.0 + # - GitHub commits add .dev#+hash: 0.8.1.dev4+g2785721 + # - Uncommitted changes add timestamp: 0.8.1.dev4+g2785721.d20191022 + __version__ = version("pydiso") +except PackageNotFoundError: + # If it was not installed, then we don't know the version. We could throw a + # warning here, but this case *should* be rare. discretize should be + # installed properly! + from datetime import datetime + + __version__ = "unknown-" + datetime.today().strftime("%Y%m%d") diff --git a/pydiso/meson.build b/pydiso/meson.build new file mode 100644 index 0000000..fd7f01c --- /dev/null +++ b/pydiso/meson.build @@ -0,0 +1,128 @@ +# NumPy include directory +# The try-except is needed because when things are +# split across drives on Windows, there is no relative path and an exception +# gets raised. There may be other such cases, so add a catch-all and switch to +# an absolute path. Relative paths are needed when for example a virtualenv is +# placed inside the source tree; Meson rejects absolute paths to places inside +# the source tree. +# For cross-compilation it is often not possible to run the Python interpreter +# in order to retrieve numpy's include directory. It can be specified in the +# cross file instead: +# [properties] +# numpy-include-dir = /abspath/to/host-pythons/site-packages/numpy/core/include +# +# This uses the path as is, and avoids running the interpreter. +incdir_numpy = meson.get_external_property('numpy-include-dir', 'not-given') +if incdir_numpy == 'not-given' + incdir_numpy = run_command(py, + [ + '-c', + '''import os +import numpy as np +try: + incdir = os.path.relpath(np.get_include()) +except Exception: + incdir = np.get_include() +print(incdir) + ''' + ], + check: true + ).stdout().strip() +else + _incdir_numpy_abs = incdir_numpy +endif +inc_np = include_directories(incdir_numpy) +np_dep = declare_dependency(include_directories: inc_np) + +# MKL-specific options +mkl_dep_name = 'mkl-dynamic' + +use_ilp64 = get_option('use-ilp64') +if use_ilp64 + mkl_dep_name += '-ilp64' +else + mkl_dep_name += '-lp64' +endif + +# MKL-specific options +_threading_opt = get_option('mkl-threading') +if _threading_opt == 'auto' + # openmp.pc not included with conda-forge distribution (yet) + mkl_dep_name += '-seq' +else + mkl_dep_name += '-' + _threading_opt +endif + +mkl_version_req = '>=2023.0' + +use_sdl = get_option('use-sdl') + +if use_sdl + mkl_dep = dependency('mkl-sdl', required: true) +else + # find mkl + mkl_dep = dependency(mkl_dep_name, + required: false, # may be required, but we need to emit a custom error message + version: mkl_version_req, + ) + + mkl_may_use_sdl = not use_ilp64 and _threading_opt in ['auto', 'iomp'] + + # Insert a second try with MKL, because we may be rejecting older versions + # or missing it because no pkg-config installed. If so, we need to retry + # with MKL SDL, and drop the version constraint. + if not mkl_dep.found() and mkl_may_use_sdl + mkl_dep = dependency('mkl-sdl', required: true) + endif + +endif + +# Deal with M_PI & friends; add `use_math_defines` to c_args or cpp_args +# Cython doesn't always get this right itself (see, e.g., gh-16800), so +# explicitly add the define as a compiler flag for Cython-generated code. +is_windows = host_machine.system() == 'windows' +if is_windows + use_math_defines = ['-D_USE_MATH_DEFINES'] +else + use_math_defines = [] +endif + +numpy_nodepr_api = '-DNPY_NO_DEPRECATED_API=NPY_1_9_API_VERSION' +c_undefined_ok = ['-Wno-maybe-uninitialized'] + +cython_c_args = [numpy_nodepr_api, use_math_defines]\ + +cython_file = 'mkl_solver.pyx' +cython_file_full_path = meson.current_source_dir() / cython_file + +if get_option('cy_coverage') + # tell cython to enable linetracing + add_project_arguments(['--directive', 'linetrace=true'], language : 'cython') + # tell the c_compiler to definie the CYTHON_TRACE_NOGIL + add_project_arguments(['-DCYTHON_TRACE_NOGIL=1'], language : 'c') + + # compile the .c file from the .pyx file in it's directory. + # These should include the default options passed to the cython compiler + run_command(cython, '-M', '--fast-fail', '-3', '--directive', 'linetrace=true', cython_file_full_path) +endif + +module_path = 'pydiso' + +py.extension_module( + 'mkl_solver', + cython_file, + include_directories: incdir_numpy, + c_args: cython_c_args, + install: true, + subdir: module_path, + dependencies : [py_dep, np_dep, mkl_dep], +) + +python_sources = [ + '__init__.py', +] + +py.install_sources( + python_sources, + subdir: module_path +) \ No newline at end of file diff --git a/pydiso/mkl_solver.pyx b/pydiso/mkl_solver.pyx index e1d8c7f..5d0a64e 100644 --- a/pydiso/mkl_solver.pyx +++ b/pydiso/mkl_solver.pyx @@ -1,6 +1,6 @@ #cython: language_level=3 -#cython: linetrace=True cimport numpy as np +import cython from cython cimport numeric from cpython.pythread cimport ( PyThread_type_lock, @@ -15,9 +15,10 @@ import numpy as np import scipy.sparse as sp import os -ctypedef long long MKL_INT64 -ctypedef unsigned long long MKL_UINT64 -ctypedef int MKL_INT +cdef extern from 'mkl.h': + ctypedef long long MKL_INT64 + ctypedef unsigned long long MKL_UINT64 + ctypedef int MKL_INT ctypedef MKL_INT int_t ctypedef MKL_INT64 long_t @@ -41,15 +42,15 @@ cdef extern from 'mkl.h': int mkl_get_max_threads() int mkl_domain_get_max_threads(int domain) - ctypedef int (*ProgressEntry)(int_t* thread, int_t* step, char* stage, int_t stage_len) except? -1; + ctypedef int (*ProgressEntry)(int* thread, int* step, char* stage, int stage_len) except? -1; ProgressEntry mkl_set_progress(ProgressEntry progress); ctypedef void * _MKL_DSS_HANDLE_t - void pardiso(_MKL_DSS_HANDLE_t, const int*, const int*, const int*, - const int *, const int *, const void *, const int *, - const int *, int *, const int *, int *, - const int *, void *, void *, int *) nogil + void pardiso(_MKL_DSS_HANDLE_t, const int_t*, const int_t*, const int_t*, + const int_t *, const int_t *, const void *, const int_t *, + const int_t *, int_t *, const int_t *, int_t *, + const int_t *, void *, void *, int_t *) nogil void pardiso_64(_MKL_DSS_HANDLE_t, const long_t *, const long_t *, const long_t *, const long_t *, const long_t *, const void *, const long_t *, @@ -58,11 +59,11 @@ cdef extern from 'mkl.h': #call pardiso (pt, maxfct, mnum, mtype, phase, n, a, ia, ja, perm, nrhs, iparm, msglvl, b, x, error) -cdef int mkl_progress(int_t *thread, int_t* step, char* stage, int_t stage_len): +cdef int mkl_progress(int *thread, int* step, char* stage, int stage_len): print(thread[0], step[0], stage, stage_len) return 0 -cdef int mkl_no_progress(int_t *thread, int_t* step, char* stage, int_t stage_len) nogil: +cdef int mkl_no_progress(int *thread, int* step, char* stage, int stage_len) nogil: return 0 MATRIX_TYPES ={ @@ -111,7 +112,7 @@ def _ensure_csr(A, sym=False): if sym and sp.isspmatrix_csc(A): A = A.T else: - warnings.warn("Converting %s matrix to CSR format, will slow down." + warnings.warn("Converting %s matrix to CSR format." %A.__class__.__name__, PardisoTypeConversionWarning) A = A.tocsr() return A @@ -190,12 +191,17 @@ cdef class MKLPardisoSolver: cdef int_t mat_type cdef int_t _factored cdef size_t shape[2] - cdef int_t _initialized cdef PyThread_type_lock lock cdef void * a cdef object _data_type - cdef object _Adata #a reference to make sure the pointer "a" doesn't get destroyed + cdef object _Adata # a reference to make sure the pointer "a" doesn't get destroyed + + def __cinit__(self, *args, **kwargs): + self.lock = PyThread_allocate_lock() + + for i in range(64): + self.handle[i] = NULL def __init__(self, A, matrix_type=None, factor=True, verbose=False): '''ParidsoSolver(A, matrix_type=None, factor=True, verbose=False) @@ -254,15 +260,11 @@ cdef class MKLPardisoSolver: >>> np.allclose(x, x_solved) True ''' - self._initialized = False n_row, n_col = A.shape if n_row != n_col: raise ValueError("Matrix is not square") self.shape = n_row, n_col - # allocate the lock - self.lock = PyThread_allocate_lock() - self._data_type = A.dtype if matrix_type is None: if np.issubdtype(self._data_type, np.complexfloating): @@ -295,18 +297,17 @@ cdef class MKLPardisoSolver: #set integer length integer_len = A.indices.itemsize - self._is_32 = integer_len == sizeof(int_t) + #self._is_32 = integer_len == sizeof(int_t) + self._is_32 = sizeof(int_t) == 8 or integer_len == sizeof(int_t) + if self._is_32: self._par = _PardisoParams() self._initialize(self._par, A, matrix_type, verbose) - elif integer_len == 8: + else: self._par64 = _PardisoParams64() self._initialize(self._par64, A, matrix_type, verbose) - else: - raise PardisoError("Unrecognized integer length") - self._initialized = True - if(verbose): + if verbose: #for reporting factorization progress via python's `print` mkl_set_progress(mkl_progress) else: @@ -343,6 +344,13 @@ cdef class MKLPardisoSolver: self._set_A(A.data) self._factor() + cdef _initialized(self): + cdef int i + for i in range(64): + if self.handle[i]: + return 1 + return 0 + def __call__(self, b): return self.solve(b) @@ -443,8 +451,13 @@ cdef class MKLPardisoSolver: return self.iparm[17] cdef _initialize(self, _par_params par, A, matrix_type, verbose): + + if _par_params is _PardisoParams: + int_dtype = f'i{sizeof(int_t)}' + else: + int_dtype = 'i8' par.n = A.shape[0] - par.perm = np.empty(par.n, dtype=np.int32) + par.perm = np.empty(par.n, dtype=int_dtype) par.maxfct = 1 par.mnum = 1 @@ -452,6 +465,9 @@ cdef class MKLPardisoSolver: par.mtype = matrix_type par.msglvl = verbose + for i in range(64): + par.iparm[i] = 0 # ensure these all start at 0 + # set default parameters par.iparm[0] = 1 # tell pardiso to not reset these values on the first call par.iparm[1] = 2 # The nested dissection algorithm from the METIS @@ -486,15 +502,8 @@ cdef class MKLPardisoSolver: par.iparm[55] = 0 # Internal function used to work with pivot and calculation of diagonal arrays turned off. par.iparm[59] = 0 # operate in-core mode - if _par_params is _PardisoParams: - indices = np.require(A.indices, dtype=np.int32) - indptr = np.require(A.indptr, dtype=np.int32) - else: - indices = np.require(A.indices, dtype=np.int64) - indptr = np.require(A.indptr, dtype=np.int64) - - par.ia = indptr - par.ja = indices + par.ia = np.require(A.indptr, dtype=int_dtype) + par.ja = np.require(A.indices, dtype=int_dtype) cdef _set_A(self, data): self._Adata = data @@ -505,24 +514,28 @@ cdef class MKLPardisoSolver: cdef int_t phase=-1, nrhs=0, error=0 cdef long_t phase64=-1, nrhs64=0, error64=0 - if self._initialized: + if self._initialized(): PyThread_acquire_lock(self.lock, 1) if self._is_32: pardiso( self.handle, &self._par.maxfct, &self._par.mnum, &self._par.mtype, - &phase, &self._par.n, self.a, NULL, NULL, NULL, &nrhs, self._par.iparm, + &phase, &self._par.n, NULL, NULL, NULL, NULL, &nrhs, self._par.iparm, &self._par.msglvl, NULL, NULL, &error ) else: pardiso_64( self.handle, &self._par64.maxfct, &self._par64.mnum, &self._par64.mtype, - &phase64, &self._par64.n, self.a, NULL, NULL, NULL, &nrhs64, + &phase64, &self._par64.n, NULL, NULL, NULL, NULL, &nrhs64, self._par64.iparm, &self._par64.msglvl, NULL, NULL, &error64 ) PyThread_release_lock(self.lock) err = error or error64 if err!=0: - raise PardisoError("Memmory release error "+_err_messages[err]) + raise PardisoError("Memory release error "+_err_messages[err]) + for i in range(64): + self.handle[i] = NULL + + if self.lock: #dealloc lock PyThread_free_lock(self.lock) @@ -537,8 +550,10 @@ cdef class MKLPardisoSolver: self._factored = False err = self._run_pardiso(22) + if err!=0: raise PardisoError("Factor step error, "+_err_messages[err]) + self._factored = True cdef _solve(self, void* b, void* x, int_t nrhs_in): @@ -550,6 +565,7 @@ cdef class MKLPardisoSolver: if err!=0: raise PardisoError("Solve step error, "+_err_messages[err]) + @cython.boundscheck(False) cdef int _run_pardiso(self, int_t phase, void* b=NULL, void* x=NULL, int_t nrhs=0) nogil: cdef int_t error=0 cdef long_t error64=0, phase64=phase, nrhs64=nrhs @@ -559,11 +575,10 @@ cdef class MKLPardisoSolver: pardiso(self.handle, &self._par.maxfct, &self._par.mnum, &self._par.mtype, &phase, &self._par.n, self.a, &self._par.ia[0], &self._par.ja[0], &self._par.perm[0], &nrhs, self._par.iparm, &self._par.msglvl, b, x, &error) - PyThread_release_lock(self.lock) - return error else: pardiso_64(self.handle, &self._par64.maxfct, &self._par64.mnum, &self._par64.mtype, &phase64, &self._par64.n, self.a, &self._par64.ia[0], &self._par64.ja[0], &self._par64.perm[0], &nrhs64, self._par64.iparm, &self._par64.msglvl, b, x, &error64) - PyThread_release_lock(self.lock) - return error64 + PyThread_release_lock(self.lock) + error = error or error64 + return error \ No newline at end of file diff --git a/pydiso/setup.py b/pydiso/setup.py deleted file mode 100644 index f951490..0000000 --- a/pydiso/setup.py +++ /dev/null @@ -1,49 +0,0 @@ -import os -from os.path import join, abspath, dirname - -base_path = abspath(dirname(__file__)) - - -# Enable line tracing for coverage of cython files conditionally -ext_kwargs = {} -if os.environ.get("TEST_COV", None) is not None: - ext_kwargs["define_macros"] = [("CYTHON_TRACE_NOGIL", 1)] - - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs - import numpy.distutils.system_info as sysinfo - config = Configuration("pydiso", parent_package, top_path) - - try: - from Cython.Build import cythonize - cythonize(join(base_path, "mkl_solver.pyx")) - except ImportError: - pass - - # get information about mkl location - mkl_root = os.environ.get('MKLROOT', None) - if mkl_root is None: - mkl_info = sysinfo.get_info('mkl') - else: - mkl_info = { - 'include_dirs': [join(mkl_root, 'include')], - 'library_dirs': [join(mkl_root, 'lib'), join(mkl_root, 'lib', 'intel64')], - 'libraries': ['mkl_rt'] - } - - mkl_include_dirs = mkl_info.get('include_dirs', []) - mkl_library_dirs = mkl_info.get('library_dirs', []) - mkl_libraries = mkl_info.get('libraries', ['mkl_rt']) - - config.add_extension( - "mkl_solver", - sources=["mkl_solver.c"], - libraries=mkl_libraries, - include_dirs=get_numpy_include_dirs() + mkl_include_dirs, - library_dirs=mkl_library_dirs, - extra_compile_args=['-w'], - **ext_kwargs - ) - - return config diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0178128 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,124 @@ + +[build-system] +build-backend = 'mesonpy' +requires = [ + "meson-python>=0.14.0", + "Cython>=0.29.35", # when updating version, also update check in meson.build + "setuptools_scm[toml]>=6.2", + + # This package automatically provides all of the numpy pinning for different python + # versions and runtime requirements. + "oldest-supported-numpy", + + # The following is taken from scipy's pyproject.toml file to handle + # building against the proper numpy API + + # When numpy 2.0.0rc1 comes out, we should update this to build against 2.0, + # and then runtime depend on the range 1.22.X to <2.3. No need to switch to + # 1.25.2 in the meantime (1.25.x is the first version which exports older C + # API versions by default). + + # default numpy requirements + # "numpy==1.22.4; python_version<='3.10' and platform_python_implementation != 'PyPy'", + # "numpy==1.23.2; python_version=='3.11' and platform_python_implementation != 'PyPy'", + + # For Python versions which aren't yet officially supported, we specify an + # unpinned NumPy which allows source distributions to be used and allows + # wheels to be used as soon as they become available. + # "numpy>=1.26.0b1; python_version>='3.12'", + # "numpy; python_version>='3.8' and platform_python_implementation=='PyPy'", +] + +[project] +name = 'pydiso' +dynamic = ["version"] +description = "Wrapper for intel's pardiso implementation in the MKL" +readme = 'README.md' +requires-python = '>=3.8' +authors = [ + {name = 'SimPEG developers', email = 'josephrcapriotti@gmail.com'}, +] +keywords = [ + 'sparse', 'solver', 'wrapper', +] + +# Note: Python and NumPy upper version bounds should be set correctly in +# release branches, see: +# https://scipy.github.io/devdocs/dev/core-dev/index.html#version-ranges-for-numpy-and-other-dependencies +dependencies = [ + # TODO: update to "pin-compatible" once possible, see + # https://github.com/mesonbuild/meson-python/issues/29 + "numpy>=1.22.4", + "scipy>=1.8", +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Programming Language :: Cython", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Mathematics", + "Topic :: Scientific/Engineering :: Physics", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Operating System :: Unix", + "Operating System :: MacOS", + "Natural Language :: English", +] + +[project.license] +file = 'LICENSE' + +[project.optional-dependencies] +test = [ + "pytest", + "pytest-cov", +] + +build = [ + "meson-python>=0.14.0", + "meson", + "ninja", + "numpy>=1.22.4", + "cython>=0.29.35", + "setuptools_scm", +] + +[project.urls] +Homepage = 'https://simpeg.xyz' +#Documentation = 'https://discretize.simpeg.xyz' +Repository = 'https://github.com/simpeg/pydiso.git' + +[tool.setuptools_scm] + +[tool.coverage.run] +branch = true +plugins = ["Cython.Coverage"] +source = ["pydiso", "tests"] + +[tool.coverage.report] +ignore_errors = false +show_missing = true +# Regexes for lines to exclude from consideration +exclude_also = [ + # Don't complain about missing debug-only code: + "def __repr__", + "if self\\.debug", + + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", + "AbstractMethodError", + + # Don't complain if non-runnable code isn't run: + "if 0:", + "if __name__ == .__main__.:", + + # Don't complain about abstract methods, they aren't run: + "@(abc\\.)?abstractmethod", +] + +[tool.coverage.html] +directory = "coverage_html_report" \ No newline at end of file diff --git a/setup.py b/setup.py deleted file mode 100644 index ac83b4c..0000000 --- a/setup.py +++ /dev/null @@ -1,70 +0,0 @@ -from distutils.core import setup -from setuptools import find_packages - -import sys - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration - - config = Configuration(None, parent_package, top_path) - config.set_options( - ignore_setup_xxx_py=True, - assume_default_configuration=True, - delegate_options_to_subpackages=True, - quiet=True, - ) - - config.add_subpackage("pydiso") - - return config - -metadata = dict( - name='pydiso', - version='0.0.3', - python_requires=">=3.8", - setup_requires=[ - "numpy>=1.8", - "cython>=3.0", - ], - install_requires=[ - 'numpy>=1.8', - 'scipy>=0.13', - ], - author='SimPEG developers', - author_email='josephrcapriotti@gmail.com', - description="Wrapper for intel's pardiso implementation in the MKL", - keywords='sparse, solver, wrapper', - url='https://www.simpeg.xyz', - download_url='https://github.com/jcapriot/pydiso-mkl', - platforms=['Windows', 'Linux', 'Solaris', 'Mac OS-X', 'Unix'], - license='MIT License' -) - -if len(sys.argv) >= 2 and ( - "--help" in sys.argv[1:] - or sys.argv[1] in ("--help-commands", "egg_info", "--version", "clean") -): - # For these actions, NumPy is not required. - # - # They are required to succeed without Numpy, for example when - # pip is used to install discretize when Numpy is not yet present in - # the system. - try: - from setuptools import setup - except ImportError: - from distutils.core import setup -else: - if (len(sys.argv) >= 2 and sys.argv[1] in ("bdist_wheel", "bdist_egg")) or ( - "develop" in sys.argv - ): - # bdist_wheel/bdist_egg needs setuptools - import setuptools - - from numpy.distutils.core import setup - - # Add the configuration to the setup dict when building - # after numpy is installed - metadata["configuration"] = configuration - - -setup(**metadata)