diff --git a/.github/workflows/push_pr.yml b/.github/workflows/push_pr.yml index f05b272..0e85c55 100644 --- a/.github/workflows/push_pr.yml +++ b/.github/workflows/push_pr.yml @@ -12,7 +12,7 @@ jobs: strategy: matrix: os: ["ubuntu-latest"] - python-version: [ "3.9", "3.10", "3.11", "3.12"] + python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 with: @@ -38,6 +38,6 @@ jobs: - name: Run pytest shell: bash -l {0} run: | - conda install pytest h5py - pip install . - pytest -k "not orientation" tests + conda install pytest h5py pillow + pip install --no-deps . + pytest -m "not orientation" tests diff --git a/CHANGELOG.md b/CHANGELOG.md index e8c49f4..1c66664 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ This document contains the Spec2nii release history in reverse chronological order. +0.8.5 (Thursday 31st October 2024) +---------------------------------- +- Add special case handling for DICOM dkd_svs_mslaser_msspnav sequence +- More GE HBCD sequence adjustments. +- Python 3.13 compatibility and testing, scipy dependency now >=1.13 + 0.8.4 (Monday 23rd September 2024) ------------------------------- - GE HBCD sequence adjustments. diff --git a/setup.py b/setup.py index 61871f0..b1e1972 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent"], python_requires='>=3.9', diff --git a/spec2nii/bruker.py b/spec2nii/bruker.py index 9bf2d0e..a3367f7 100644 --- a/spec2nii/bruker.py +++ b/spec2nii/bruker.py @@ -7,7 +7,7 @@ Copyright (C) 2021 Institute of Scientific Instruments of the CAS, v. v. i. """ import os -import pkg_resources +import importlib.resources as importlib_resources import warnings from datetime import datetime @@ -64,40 +64,43 @@ def yield_bruker(args): 2/ Directory - function yields data and properties and data of all datasets compliant to the queries """ - # get location of the spec2nii Bruker properties configuration file - bruker_properties_path = pkg_resources.resource_filename('spec2nii', 'bruker_properties.json') - bruker_fid_override_path = pkg_resources.resource_filename('spec2nii', 'bruker_fid_override.json') - # get a list of queries to filter datasets queries = _get_queries(args) - # case of Bruker dataset - if os.path.isfile(args.file): - d = Dataset( - args.file, - property_files=[bruker_fid_override_path, bruker_properties_path], - parameter_files=['method']) - try: - d.query(queries) - except FilterEvalFalse: - raise ValueError(f'Bruker dataset {d.path} is not suitable for conversion to mrs_nifti') - yield from _proc_dataset(d, args) - - # case of folder containing Bruker datasets - elif os.path.isdir(args.file): - - # process individual datasets - for dataset in Folder(args.file, dataset_state={ - "parameter_files": ['method'], - "property_files": [bruker_properties_path] - }).get_dataset_list_rec(): - with dataset as d: + # get location of the spec2nii Bruker properties configuration file + ref1 = importlib_resources.files('spec2nii') / 'bruker_properties.json' + ref2 = importlib_resources.files('spec2nii') / 'bruker_fid_override.json' + + with importlib_resources.as_file(ref1) as bruker_properties_path: + with importlib_resources.as_file(ref2) as bruker_fid_override_path: + + # case of Bruker dataset + if os.path.isfile(args.file): + d = Dataset( + args.file, + property_files=[bruker_fid_override_path, bruker_properties_path], + parameter_files=['method']) try: d.query(queries) except FilterEvalFalse: - continue + raise ValueError(f'Bruker dataset {d.path} is not suitable for conversion to mrs_nifti') yield from _proc_dataset(d, args) + # case of folder containing Bruker datasets + elif os.path.isdir(args.file): + + # process individual datasets + for dataset in Folder(args.file, dataset_state={ + "parameter_files": ['method'], + "property_files": [bruker_properties_path] + }).get_dataset_list_rec(): + with dataset as d: + try: + d.query(queries) + except FilterEvalFalse: + continue + yield from _proc_dataset(d, args) + def _get_queries(args): """