From 548e054a9d7e1509a301f84cbe6bbd03b446565f Mon Sep 17 00:00:00 2001 From: MikeG Date: Tue, 14 May 2024 09:58:04 +0200 Subject: [PATCH] V4 (#1111) * Mixed subtree processing (#981) * Refactor tests for test_mixed.py (#1027) * Remove deprecated modules and functions/classes & warnings (#1026, #1032) * Use readonly morphio Morphology by default (#979) * Morphology level radial distance features use the soma as reference point (#1030) * Expose subtree processing from the morph_stats api (#1034) * Remove pyXX prefix for lint, docs, and coverage (#1038) * Fix tutorials and add tutorial testenv (#1039) * Add isort for formatting/linting (#1040) * Add testing of example scripts (#1041) * Make documentation/docstrings testable (#1035) * Add black to neurom, format everything, and add to lint (#1042) * Fix load_morpholgies to always resolve paths (#1047) * allow Morphology objects to be either mut or immut (#1049) * Replace iter_* methods by properties in core objects and improve iter_segments (#1054) * Decouple Morphology constructor from io (#1120) * Move soma methods to functions (#1118) --- CHANGELOG.rst | 18 + doc/Makefile | 220 - doc/clean.sh | 2 - doc/make.bat | 263 - doc/source/conf.py | 81 +- doc/source/examples.rst | 26 +- doc/source/features.rst | 46 +- doc/source/heterogeneous.rst | 236 + doc/source/images/heterogeneous_neurite.png | Bin 0 -> 8611 bytes doc/source/images/heterogeneous_neuron.png | Bin 0 -> 32564 bytes doc/source/index.rst | 1 + doc/source/migration.rst | 128 +- doc/source/quickstart.rst | 11 +- doc/source/validation.rst | 8 +- examples/boxplot.py | 3 +- examples/density_plot.py | 98 +- examples/end_to_end_distance.py | 49 +- examples/extract_distribution.py | 15 +- examples/features_graph_table.py | 15 +- examples/get_features.py | 111 +- examples/histogram.py | 34 +- examples/iteration_analysis.py | 87 +- examples/nl_fst_compat.py | 31 +- examples/plot_somas.py | 17 +- examples/radius_of_gyration.py | 40 +- examples/section_ids.py | 13 +- examples/soma_radius_fit.py | 13 +- neurom/__init__.py | 13 +- neurom/apps/__init__.py | 3 +- neurom/apps/annotate.py | 26 +- neurom/apps/cli.py | 119 +- neurom/apps/morph_check.py | 3 +- neurom/apps/morph_stats.py | 71 +- neurom/check/__init__.py | 2 + neurom/check/morphology_checks.py | 75 +- neurom/check/morphtree.py | 18 +- neurom/check/neuron_checks.py | 8 - neurom/check/runner.py | 22 +- neurom/core/__init__.py | 5 +- neurom/core/dataformat.py | 1 + neurom/core/morphology.py | 438 +- neurom/core/neuron.py | 8 - neurom/core/population.py | 56 +- neurom/core/soma.py | 465 +- neurom/core/types.py | 134 +- neurom/features/__init__.py | 105 +- neurom/features/bifurcation.py | 65 +- neurom/features/bifurcationfunc.py | 8 - neurom/features/morphology.py | 313 +- neurom/features/neurite.py | 379 +- neurom/features/population.py | 34 +- neurom/features/section.py | 27 +- neurom/features/sectionfunc.py | 8 - neurom/geom/__init__.py | 7 +- neurom/geom/transform.py | 10 +- neurom/io/utils.py | 56 +- neurom/morphmath.py | 47 +- neurom/stats.py | 12 +- neurom/utils.py | 12 + neurom/view/__init__.py | 5 +- neurom/view/dendrogram.py | 11 +- neurom/view/matplotlib_impl.py | 247 +- neurom/view/matplotlib_utils.py | 167 +- neurom/view/plotly_impl.py | 72 +- neurom/viewer.py | 136 - pylintrc | 4 +- pyproject.toml | 21 + tests/apps/__init__.py | 0 tests/apps/test_annotate.py | 10 +- tests/apps/test_cli.py | 77 +- tests/apps/test_config.py | 10 +- tests/apps/test_morph_stats.py | 246 +- tests/check/__init__.py | 0 tests/check/test_morphology_checks.py | 182 +- tests/check/test_morphtree.py | 51 +- tests/check/test_runner.py | 176 +- tests/conftest.py | 92 + tests/core/__init__.py | 0 tests/core/test_iter.py | 199 +- tests/core/test_neurite.py | 4 +- tests/core/test_neuron.py | 73 +- tests/core/test_population.py | 56 +- tests/core/test_section.py | 75 +- tests/core/test_soma.py | 227 +- tests/core/test_types.py | 8 +- .../mixed/expected_morphology_features.json | 5269 +++++++++++++++++ .../data/mixed/expected_neurite_features.json | 430 ++ .../mixed/expected_population_features.json | 71 + tests/data/mixed/mixed.h5 | Bin 0 -> 10240 bytes tests/data/swc/heterogeneous_morphology.swc | 25 + tests/features/test_bifurcation.py | 2 +- tests/features/test_features.py | 63 + tests/features/test_get_features.py | 934 +-- tests/features/test_morphology.py | 455 +- tests/features/test_neurite.py | 100 +- tests/features/test_population.py | 57 + tests/features/test_section.py | 189 +- tests/geom/__init__.py | 0 tests/geom/test_geom.py | 18 +- tests/geom/test_transform.py | 143 +- tests/io/__init__.py | 0 tests/io/test_io_utils.py | 180 +- tests/io/test_neurolucida.py | 195 +- tests/io/test_swc_reader.py | 72 +- tests/test_examples.py | 3 +- tests/test_import.py | 1 + tests/test_mixed.py | 1142 ++++ tests/test_morphmath.py | 548 +- tests/test_stats.py | 70 +- tests/test_utils.py | 15 +- tests/test_viewer.py | 139 - tests/view/__init__.py | 0 tests/view/conftest.py | 2 + tests/view/test_dendrogram.py | 20 +- tests/view/test_matplotlib_impl.py | 93 +- tests/view/test_matplotlib_utils.py | 29 +- tests/view/test_plotly_impl.py | 13 +- tox.ini | 40 +- 118 files changed, 12664 insertions(+), 3919 deletions(-) delete mode 100644 doc/Makefile delete mode 100755 doc/clean.sh delete mode 100644 doc/make.bat create mode 100644 doc/source/heterogeneous.rst create mode 100644 doc/source/images/heterogeneous_neurite.png create mode 100644 doc/source/images/heterogeneous_neuron.png delete mode 100644 neurom/check/neuron_checks.py delete mode 100644 neurom/core/neuron.py delete mode 100644 neurom/features/bifurcationfunc.py delete mode 100644 neurom/features/sectionfunc.py delete mode 100644 neurom/viewer.py create mode 100644 tests/apps/__init__.py create mode 100644 tests/check/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/core/__init__.py create mode 100644 tests/data/mixed/expected_morphology_features.json create mode 100644 tests/data/mixed/expected_neurite_features.json create mode 100644 tests/data/mixed/expected_population_features.json create mode 100644 tests/data/mixed/mixed.h5 create mode 100644 tests/data/swc/heterogeneous_morphology.swc create mode 100644 tests/features/test_features.py create mode 100644 tests/features/test_population.py create mode 100644 tests/geom/__init__.py create mode 100644 tests/io/__init__.py create mode 100644 tests/test_mixed.py delete mode 100644 tests/test_viewer.py create mode 100644 tests/view/__init__.py diff --git a/CHANGELOG.rst b/CHANGELOG.rst index cae764200..d7e606ffa 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,24 @@ Changelog ========= +Version 4.0.0 +------------- + +- Morphology class accepts only morphio objects, not files anymore. (#1120) +- Replace ``iter_*`` methods by properties in core objects and improve ``iter_segments``. (#1054) +- NeuriteType extended to allow mixed type declarations as tuple of ints. (#1071) +- All features return built-in types (#1064) +- Morphology class also allows mutable morphio objects to be passed explicitly. (#1049) +- Morphology class uses morphio immutable class by composition, istead of inheritance. (#979) +- Morphology level radial distance features use the soma as reference point. (#1030) +- Make ``neurom.core.Population`` resolve paths. Symlinks are not resolved. (#1047) +- Mixed subtree processing can be used in morph_stats app via the use_subtrees flag. (#1034) +- ``neurom.view.[plot_tree|plot_tree3d|plot_soma|plot_soma3D]`` were hidden from the + neurom.view module. They can still be imported from neurom.view.matplotlib_impl. (#1032) +- Mixed subtree processing. (#981) +- Deprecated modules and classes were removed. (#1026) + + Version 3.2.3 ------------- diff --git a/doc/Makefile b/doc/Makefile deleted file mode 100644 index c3dff82b8..000000000 --- a/doc/Makefile +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project -# All rights reserved. -# -# This file is part of NeuroM -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# 3. Neither the name of the copyright holder nor the names of -# its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " applehelp to make an Apple Help Book" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - @echo " coverage to run coverage check of the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/NeuroM.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/NeuroM.qhc" - -applehelp: - $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp - @echo - @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." - @echo "N.B. You won't be able to view it unless you put it in" \ - "~/Library/Documentation/Help or install it in your application" \ - "bundle." - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/NeuroM" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/NeuroM" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -coverage: - $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage - @echo "Testing of coverage in the sources finished, look at the " \ - "results in $(BUILDDIR)/coverage/python.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/doc/clean.sh b/doc/clean.sh deleted file mode 100755 index 49f8ce18d..000000000 --- a/doc/clean.sh +++ /dev/null @@ -1,2 +0,0 @@ -rm -Rf neurom -rm -Rf _build diff --git a/doc/make.bat b/doc/make.bat deleted file mode 100644 index 2137681ff..000000000 --- a/doc/make.bat +++ /dev/null @@ -1,263 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source -set I18NSPHINXOPTS=%SPHINXOPTS% source -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - echo. coverage to run coverage check of the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -REM Check if sphinx-build is available and fallback to Python version if any -%SPHINXBUILD% 2> nul -if errorlevel 9009 goto sphinx_python -goto sphinx_ok - -:sphinx_python - -set SPHINXBUILD=python -m sphinx.__init__ -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -:sphinx_ok - - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\NeuroM.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\NeuroM.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "coverage" ( - %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage - if errorlevel 1 exit /b 1 - echo. - echo.Testing of coverage in the sources finished, look at the ^ -results in %BUILDDIR%/coverage/python.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff --git a/doc/source/conf.py b/doc/source/conf.py index 3e956b5cd..362abdb2c 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -49,7 +49,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ @@ -60,13 +60,15 @@ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.napoleon', + 'sphinx.ext.doctest', ] + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # General information about the project. project = u'NeuroM' @@ -86,13 +88,13 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -100,24 +102,24 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False suppress_warnings = ["ref.python"] autosummary_generate = True @@ -127,7 +129,7 @@ } autosummary_mock_imports = ['plotly'] -#autoclass_content = 'both' +# autoclass_content = 'both' # -- Options for HTML output ---------------------------------------------- @@ -140,7 +142,7 @@ # documentation. html_theme_options = { "repo_url": "https://github.com/BlueBrain/NeuroM/", - "repo_name": "BlueBrain/NeuroM" + "repo_name": "BlueBrain/NeuroM", } @@ -149,7 +151,7 @@ html_title = 'NeuroM' # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. @@ -158,7 +160,7 @@ # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -168,7 +170,7 @@ # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -176,90 +178,89 @@ # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. html_show_sourcelink = False # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # -- Options for LaTeX output --------------------------------------------- # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # If true, show URL addresses after external links. -#man_show_urls = False - +# man_show_urls = False # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' diff --git a/doc/source/examples.rst b/doc/source/examples.rst index 979a8bcfc..33eff6525 100644 --- a/doc/source/examples.rst +++ b/doc/source/examples.rst @@ -35,34 +35,32 @@ Examples started *with the virtualenv activated*. That gives access to the ``neurom`` installation. -Fast analysis with :py:mod:`neurom` -*********************************** +Analysis with :py:mod:`neurom` +****************************** Here we load a morphology and obtain some information from it: -.. code-block:: python +.. doctest:: [examples] >>> import neurom as nm - >>> m = nm.load_morphology('some/data/path/morph_file.swc') + >>> m = nm.load_morphology("tests/data/swc/Neuron.swc") >>> ap_seg_len = nm.get('segment_lengths', m, neurite_type=nm.APICAL_DENDRITE) >>> ax_sec_len = nm.get('section_lengths', m, neurite_type=nm.AXON) -Morphology visualization with the :py:mod:`neurom.viewer` module -**************************************************************** +Morphology visualization with the :py:mod:`neurom.view` module +************************************************************** Here we visualize a morphology: -.. code-block:: python +.. doctest:: [examples] >>> # Initialize m as above - >>> from neurom import viewer - >>> fig, ax = viewer.draw(m) - >>> fig.show() - >>> - >>> fig, ax = viewer.draw(m, mode='3d') # valid modes '2d', '3d', 'dendrogram' - >>> fig.show() + >>> from neurom.view import plot_morph, plot_morph3d, plot_dendrogram + >>> plot_morph(m) + >>> plot_morph3d(m) + >>> plot_dendrogram(m) Advanced iterator-based feature extraction example ************************************************** @@ -89,7 +87,7 @@ Getting Log Information They are emitted in the ``neurom`` namespace, and can thus be filtered based on this. An example of setting up a handler is: -.. code-block:: python +.. doctest:: >>> import logging >>> # setup which namespace will be examined, and at what level diff --git a/doc/source/features.rst b/doc/source/features.rst index afc49ed45..82382474b 100644 --- a/doc/source/features.rst +++ b/doc/source/features.rst @@ -48,18 +48,22 @@ only to a morphology or a morphology population. An example for ``neurite``: -.. code-block:: python +.. testcode:: - from neurom import load_morphology, features - from neurom.features.neurite import max_radial_distance + from neurom import load_morphology, features + from neurom.features.neurite import max_radial_distance + + m = load_morphology("tests/data/swc/Neuron.swc") + + # valid input + rd = max_radial_distance(m.neurites[0]) + + # invalid input + # rd = max_radial_distance(m) + + # valid input + rd = features.get('max_radial_distance', m) - m = load_morphology('path/to/morphology') - # valid input - max_radial_distance(m.neurites[0]) - # invalid input - max_radial_distance(m) - # valid input - features.get('max_radial_distance', m) The features mechanism assumes that a neurite feature must be summed if it returns a number, and concatenated if it returns a list. Other types of returns are invalid. For example lets take @@ -69,33 +73,39 @@ Calling it on a morphology population will return a list of ``number_of_segments within the population. -.. code-block:: python +.. testcode:: + + from neurom import load_morphology, load_morphologies, features - from neurom import load_morphology, features + m = load_morphology("tests/data/swc/Neuron.swc") - m = load_morphology('path/to/morphology') # a single number features.get('number_of_segments', m.neurites[0]) + # a single number that is a sum for all `m.neurites`. features.get('number_of_segments', m) - pop = load_morphology('path/to/morphology population') + pop = load_morphologies("tests/data/valid_set") + # a list of numbers features.get('number_of_segments', pop) if a list is returned then the feature results are concatenated. -.. code-block:: python +.. testcode:: + + from neurom import load_morphology, load_morphologies, features - from neurom import load_morphology, features + m = load_morphology("tests/data/swc/Neuron.swc") - m = load_morphology('path/to/morphology') # a list of lengths in a neurite features.get('section_lengths', m.neurites[0]) + # a flat list of lengths in a morphology, no separation among neurites features.get('section_lengths', m) - pop = load_morphology('path/to/morphology population') + pop = load_morphologies("tests/data/valid_set") + # a flat list of lengths in a population, no separation among morphologies features.get('section_lengths', pop) diff --git a/doc/source/heterogeneous.rst b/doc/source/heterogeneous.rst new file mode 100644 index 000000000..60e427f7d --- /dev/null +++ b/doc/source/heterogeneous.rst @@ -0,0 +1,236 @@ +.. Copyright (c) 2022, Ecole Polytechnique Federale de Lausanne, Blue Brain Project + All rights reserved. + + This file is part of NeuroM + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. Neither the name of the copyright holder nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY + DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +.. _heterogeneous: + +Heterogeneous Morphologies +************************** + +.. image:: images/heterogeneous_neuron.png + +Definition +---------- + +A heterogeneous morphology consists of zero or more homogeneous and at least one heterogeneous neurite tree extending from the soma. +A heterogeneous neurite tree consists of multiple sub-neurites with different types (ie: basal and axon). + +A typical example of a heterogeneous neurite is the axon-carrying dendrite, in which the axon sprouts from the basal dendrite. + + +Identification +-------------- + +Heterogeneous neurites can be identified using the ``Neurite::is_heterogeneous`` method: + +.. testcode:: [heterogeneous] + + from neurom import load_morphology + from neurom.core.morphology import iter_neurites + + m = load_morphology('tests/data/swc/heterogeneous_morphology.swc') + + print([neurite.is_heterogeneous() for neurite in m.neurites]) + +.. testoutput:: [heterogeneous] + :hide: + + [False, True, False] + +which would return ``[False, True, False]``, meaning the 2nd neurite extending from the soma contains multiple neurite types. + + +sub-neurite views of heterogeneous neurites +-------------------------------------------- + +Default mode +~~~~~~~~~~~~ + +NeuroM does not take into account heterogeneous sub-neurites by default. +A heterogeneous neurite is treated as a homogeneous one, the type of which is determined by the first section of the tree. +For example: + +.. testcode:: [heterogeneous] + + basal, axon_carrying_dendrite, apical = list(iter_neurites(m)) + + print(basal.type, axon_carrying_dendrite.type, apical.type) + +.. testoutput:: [heterogeneous] + + NeuriteType.basal_dendrite NeuriteType.basal_dendrite NeuriteType.apical_dendrite + +In other words, the axon-carrying dendrite would be treated as a basal dendrite. + +For feature extraction and checks, the axon-carrying dendrite is treated as a basal dendrite. +Features, for which an axon neurite type is passed, do not have access to the axonal part of the neurite. +For instance, the number of basal and axon neurites will be two and zero respectively. +A feature such as ``total_volume`` would include the entire axon-carrying dendrite, without separating between basal and axon types. + +subtree mode +~~~~~~~~~~~~ + +The ``Population``, ``Morphology`` and ``Neurite`` objects have a boolean attribute named ``process_subtrees`` which is set to ``False`` by default. +The value of this attribute can be set to ``True`` in order to take into account heterogeneous sub-neurites. + +.. testcode:: [heterogeneous] + + m.process_subtrees = True + + basal, axon_carrying_basal, apical = list(iter_neurites(m)) + + print(basal.type, axon_carrying_basal.type, apical.type) + +.. testoutput:: [heterogeneous] + + NeuriteType.basal_dendrite NeuriteType.axon_carrying_dendrite NeuriteType.apical_dendrite + +In the example above, two views of the axon-carrying dendrite have been created: the basal dendrite view and the axon view. + +.. image:: images/heterogeneous_neurite.png + +Given that the topology of the morphology is not modified, the sub-neurites specify as their ``root_node`` the same section of the homogeneous neurite. +So, in this case, both the basal and axon views start at the same section but then the filters used in iterators are different. +This also means that the sub-trees have no 'trunk', as a trunk is defined as a section connected to the soma. + + +Extract features from heterogeneous morphologies +------------------------------------------------ + +Neurite +~~~~~~~ + +Neurite objects have been extended to include a ``process_subtrees`` flag, which can be used to apply a feature on a heterogeneous neurite. + +.. testcode:: [heterogeneous] + + from neurom import NeuriteType + from neurom.features.neurite import number_of_sections + + axon_carrying_dendrite = m.neurites[1] + + axon_carrying_dendrite.process_subtrees = True + total_sections = number_of_sections(axon_carrying_dendrite) + basal_sections = number_of_sections(axon_carrying_dendrite, section_type=NeuriteType.basal_dendrite) + axon_sections = number_of_sections(axon_carrying_dendrite, section_type=NeuriteType.axon) + + print(total_sections, basal_sections, axon_sections) + +.. testoutput:: [heterogeneous] + + 9 4 5 + +Not specifying a ``section_type`` is equivalent to passing ``NeuriteType.all`` and it will use all sections as done historically, even if ``process_subtrees`` is set to ``True``. + +Morphology +~~~~~~~~~~ + +Morphology objects have been extended to include the ``process_subtrees`` flag, which allows to use the sub-neurites. + +.. testcode:: [heterogeneous] + + from neurom.features.morphology import number_of_neurites + + m.process_subtrees = False + total_neurites_wout_subneurites = number_of_neurites(m) + m.process_subtrees = True + total_neurites_with_subneurites = number_of_neurites(m) + + print("A:", total_neurites_wout_subneurites, total_neurites_with_subneurites) + + m.process_subtrees = False + number_of_axon_neurites_wout = number_of_neurites(m, neurite_type=NeuriteType.axon) + m.process_subtrees = True + number_of_axon_neurites_with = number_of_neurites(m, neurite_type=NeuriteType.axon) + + print("B:", number_of_axon_neurites_wout, number_of_axon_neurites_with) + + m.process_subtrees = False + number_of_basal_neurites_wout = number_of_neurites(m, neurite_type=NeuriteType.basal_dendrite) + m.process_subtrees = True + number_of_basal_neurites_with = number_of_neurites(m, neurite_type=NeuriteType.basal_dendrite) + + print("C:", number_of_basal_neurites_wout, number_of_basal_neurites_with) + +.. testoutput:: [heterogeneous] + + A: 3 3 + B: 0 1 + C: 2 2 + +In the example above, the total number of neurites is the same when the subtrees are enabled (see ``A`` in the print out.) +This is because the axonal and basal parts of the axon-carrying dendrite are counted as one neurite. + +Specifying a ``neurite_type``, allows to consider sub-neurites. +Therefore, the number of axons without subtrees is 0, whereas it is 1 when subtrees are enabled (see ``B`` in the print out.) +However, for basal dendrites the number does not change (2) because the axon-carrying dendrite is perceived as basal dendrite in the default case (see ``C``.) + +features.get +~~~~~~~~~~~~ + +``features.get`` can be used with respect to what has been mentioned above for neurite and morphology features. + +.. testcode:: [heterogeneous] + + from neurom import features + + m.process_subtrees = True + n_neurites = features.get("number_of_neurites", m) + n_sections = features.get("number_of_sections", m, neurite_type=NeuriteType.axon) + + print(f"Neurites: {n_neurites}, Sections: {n_sections}") + +.. testoutput:: [heterogeneous] + + Neurites: 3, Sections: 5 + +.. warning:: + The ``features.get`` function can be used with either the ``neurite_type`` or the ``section_type`` parameter, depending on what type of object the feature is applied. + When the feature is applied to a ``Population`` or to a ``Morphology`` object, only the ``neurite_type`` parameter is accepted. + While the feature is applied to a ``Neurite`` or to a list of ``Neurite`` objects, only the ``section_type`` parameter is accepted. + +Conventions & Incompatibilities +------------------------------- + +Heterogeneous Forks +~~~~~~~~~~~~~~~~~~~ + +A heterogeneous bifurcation/fork, i.e. a section with children of different types, is ignored when features on bifurcations are calculated. +It is not meaningful to calculate features, such as bifurcation angles, on transitional forks where the downstream subtrees have different types. + +Incompatible features with subtrees +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following features are not compatible with subtrees: + +* trunk_origin_azimuths +* trunk_origin_elevations +* trunk_angles + +Because they require the neurites to be rooted at the soma. +This is not true for sub-neurites. +Therefore, passing a Neurite object with a ``process_subtrees`` flag set to ``True`` will result in an empty list. diff --git a/doc/source/images/heterogeneous_neurite.png b/doc/source/images/heterogeneous_neurite.png new file mode 100644 index 0000000000000000000000000000000000000000..ed4d08114e8d4bfd448bc936e05c45b08c02784c GIT binary patch literal 8611 zcmXYX2UHWy_jYJOF!Wvm8tJ`BFDjuo>74|mNC#;FLNgYSCSCfAG(+zoC4^Uj7o;e? zC{^i5A}#bU{{G*dGrKc)@7(+B-80Y3oI44Rjdf_KIjI2v0F9n5)C>S1F(=knZvlz- z5OpgI@j)K+NYCOH@j~8mjVHFL0(5PI0027YzlG$3Mx`fF$Puh%6KwA99vtTU%ncA0 z7AE28=NT{h!@;d}5P$F%K*fLO#bd@t43DU! zUoa{^q|(iyWgy{-hhjw?@`P?d985DspT|!~=u1*-=Lu=OAtxg#=eYAg_ZB}D|BIH8 z8yoL;QLmKvH%CTPnm13wi^5NoWSdW&HeH*>7Gd$ zG(KKV6vYqP7}SoiuT0T_mI-r!M)&kP%{D#agN>(=o?7_>f%+s%hMWoiGw_}v@X~__ z*_sHM#g}oAO0V>-gQ-kbw##F{tHE~t3JBzfvlQRtnVe#XYC5k-u70(!6@FRbsWHYP zjR+twdHutfAXzYNSOhy#mZtmrZ$#$hQ|S1TA3@7(Iha99DPGrc|G(QO8Pu`00XX$%YSr0IJht-8!k)-Hxr;CyDn-8fSj-S!u_kU1ndVYe_ zH;`Z!Zg{ZnxjDSXjG4jtMAXm4sBmdMFN;l&;O#sz8jNp%iF;kl z2_Dmj0&2)AeunqhWZFT?lsNYC^#TlzR+L3Kx!nWo1(eu$4?zQ51>RDff~Ulg>H>R{ zUz`s56)b*%$Dq1sHgfpCT7$+E2;F^Vt?`b~G8)uAXhtt!xgS)a{YS-xVQkkUprFsd zjp|4oRsH@5?bQ`OmD*PiVYq_y^|U8NO17j#P9+}jx&#EuQ86=Z=>V;{_TYKaLJ%~2 zQgWGG^U}04ig%z##ldnJ-q6SI=36pZN)vn>(Ju;_sM|6s=A!aCRbIYG=s4im?_L^T z=@yD&&Db&r!lbv%$8EHr$}FE=jI2CYUJ5$?u$13d!Q!MbwQ_987_|gXs`>+&XaJ49 zd2tK{ADoSLZ0^pFz1MAu8RPrL0UXQ|Xo%M>l~q(^oOM9)?*@;ntQYl;sh+Z+Y;%p# zUub|+_jnl6cx2}k%^AOU9Lh!@dwC+dhJQoqj1UD#KW|?6MbW;E6 ziuOy=9d<9Ap+K6GTb8vqBZDW>lv57Hv>Q-wzvdLi+06kPmshNtIb~%)3!}OqXh)0n z3o$p`!|&CK6i05T@u@!;2qzG7ETfcy@5Hh0OqEIDI62pGxT(-6=NJme7sSex= zw)3C^=boL*!v{$X>+xz@a&5*t8(E4o%DEVd?gYVuD{86wfm0?1G%pG)yRPCbKO-kw zy?wj2^R+I2)rLV&H}@dveJ_RkHrzJCn5bsfQ+m(TzqY?%b@0cP-odFMLiZhws=lc~ zMa*QfI)w=GU;4=v%Cy_(Wl|=oaoWkh@0t+@>6Ce3NyeyG@YVhr;X!hpxeixZ!UF_wYWwx~-ew`x=9Ef|0?DEKxus0lLx~;ANEBl>k$p6xQy#p57 z+{H5|pAu93Z!3{cG(k_i!nij491gZSW@f7yigm+2xQc{WUd%~0?Uk-c#_*%Yz}1USu)$l-j{IPUV7Cbavj zzuu@3zMSHeAwMcyMTDK0&1VGC>so#J~8Cy;5XM3 z2&EApn?BI9jHy*L-nNB8AVwmx)-i1%rK6MZmm}|{S6kyvp=GNaN7C=U{ct-2g(e4S zY&myT>!yCllhk8@(*|fF7YnPd|5H?V!kt*B@evt^fjRQHUBM=jVf=?`%FPECRS0BM z&}!wz#A3F$H+~btunD%PCJ-a?s&6}u8Md{fZwB;sJBhMu9=Q7^UQQj=D%m{4@>TxWaaIH=Rl+8r6Ek}J}wX5vgFp1j1R+`MV$M=( zj6bCdyTgWbJKb>~Bl_nOlJTu}%l}|@$sMR2k|bMtG^CP?Bz*}Wu;9RKEM2U)%(xMG z+wMTmvou4?(VSPNxDdVi6n~ZKoz%}vGpd}Gc{i3Tj4RUcgONiRDJJXe`W=E6E)*b+ zyAwd$V47A(O!2+Oq^=6E2^5m5CE1M*KiS9edXJYJFtLz?d*F*Vz+ull4 z<>yEqvOd2qV)*-E4;~$5$ z1|1j(-w!z=8z39(KZQ|TQ`Mky9AIQ^3A52u4zC5<)#4QJ_nj}uZb(A^9lVmTT`j6# z-TkUgv&TK|W>CSlme6szK!G&GQDNU;>0(obvJC4<#u88c=nS=0#@+;*REQt7gS$6R z5aT909QKs7Sc8}q{*89rXS){RB6b0?{Qe-yZ}QtGg9e*Dcvt1ShYPw3#qR=yrMM83 zl4zKYr`LWD7L$8g^jy$A))vc|JL~tpCDqEh>P!>QNIEXl6uUz zLfUN+j@US^T>q$mPlq5{)w(98?Xi#a!5aP^P8V-e7M;iYRQ9#k_&Z%74pjc6LJ?re zWAOz4Lz&ipy%X20RO37rpXOj=*s|(=@cNs#A)?it8L&6r>ykiSA0_J)DWK)Uf@C8^%?DAadhP@EP}Xr5 zYnJnRi0wwUCEMfM-NPC@gagGGxpH?hoU2@8;f2SdVV^;Rd8{d-^@0VdRkw~Ox2su* zVOmyp2Q4}Lk#njdhe@t-BIhj2C%hW-V8N8vgS9rJ8q^4xp#2zAugD(8`jdm#3gX?G z+o{6|goD+`EAZ~j-e?t#_W0m|71><$`~1wF&=A!a&&bW$c^(Tt828Ke8!!N=e0ZrH zNQ*eQGDQsiKFmD@k%x{Wdr9}{!WN25P9pp@g&S!;Fqq3>}63P9gwtbi?WLjo$uJ zRLnijyR`HX0$~KDB$X*mx?Gm`y){f|?at(fKz@PF`>D^}as7zjTR?9~7LuuaNtznK z!5WxlMKcu;nzXJ~tf~oxipapCmR+o-=uz$K7d+4~W%3k9lr7)QY!=zNi#|LYC+iY*FhccE-nB(qBznF!wxQ!Dnc{@{C4h?ZPtyeO8k48fqvPK^K}rNS7!Q@MJSR6 z`+6_3EPf6BRR8Qr+%kh`$LmkqOFTa|$!?~T56j4aT{@6L=d&M_TX*D`Mc5+N^P~1j zM?QycmJ(0q!&ZQBMKPTg8@GoW^DappyyC7CgN*LQ&^Gtn;Pk3X1Cy25i zpZ(n5s6ZhKH@@VFt@+%jOUJUgVe)wp=a}H%tgKoDqa)Bb(R^f>6AAkLAL94J6=nE7 z;`;Fl9X5)vHb4~j83cWb6V6ocHd#;b@#yAMXIkFuMEC{XeWep;y}lcW%_Ht&k*OcvwN6Fw?Cw-yFaC%J6sbn!^z_Q6u2Ds5sxtg$ z7^lNBA-{POV8>%lyH#j}x%Sy!9!X%2{;U%fLhaB_{#6`S9jk}8d41ldq0loo^O0~+ zJScK~t6M(w+0KKq2ocL-_4iX_NwAx?8B6%)r!_AOH0}+&*vVK8*FizqlT6@=mP|*w2ZhiG96xL4y|{ zSwk>!#{OW61bhKZ;si3f&y;Yznd-9XWc{wntp2B5@z&0CDVsMm$lH>_dz$FXk8)1s znA;{8u65qGXnav=Y#5Gw5p1$Xb*Xr%c?#~fyCTJD>ho{j#*LM5AcP~umfWeA(4iKe zPj5L{`j00O!pBN74JENEZA%H>F^OjR+h>G3eCCB?hk9?e?~@G z#~{u+-o$8@T!o&vhupaipRaCK&=1s(r?T2DJGr z`=vSSzORGuW@|#-!I7*^od~|V)4RA+>Oe7U^{9zkQ9k`?G<^rqf7X%9#I=76jb@n4 zW*FDYeDz2zL9jUsABdb6QNgNqH`m%Y7RTN^{3tn=pZ)QqDR941_8^LeH$#QtU@zis z-F>BK%6qZS`0rqZN?q{3|L_J-H7EvL(&_u+!@pAA#5@xyw@Z%KJ?22(i@+CuS#`KH z$7}o-&Ruv1lKgpvH_gz*5Nwde^7_$!=3lN;LOG}DZq5t7uDaj#CDz`I@-9RiyYgJO zqP-%eeE3cEBN{z$eEiRXK1S)qp%IVbUqexZpNtG?3o`6X3Bgt2CecpZv8mDDsvbkk zB7vt+nbAqsL4y^|4NRIceOijx?Y+0kx3-l~tsqvYjH5<~s%0>>`fpp!lCqN8#Bm4C zx61U}94KBbNKgA=m_jlM;qiw;b4thri}m|`p1iz1Gob&khV^ZQpZ*rlf7%4vVznHF z^*LYk&FPkn+b?SM=Fbm270Q$=Szb$gY>**sGqN(TTk*q@xsu_ECQ6{3sy43Oh{g~c zFP1mrb$;+p(jtTMVqLSOvoob$SMwt|jP!9{eBYH47p)~Xb%+IV=IHd6a#98pW3$bP z8ik~dwN6|+kN$R*W7dDRl0#TBm>>t$CA&t1?ATPf0+u+cN*+%fS@mm-cnhRxDmzOl zTD_7vETsOkVy53E9xvB(I~GP<%PYGFnDR|%gkArxkm9C!nO|031lMwwHJ|nQdeDjGFE1(JuD1R5 zBh;w*pgDyBU_7IqiW1^`Nr(B-Bu9_(es2*GLneey}?wh zx&3SRr;2&v?~hUsxdUkpl|SS7GX_xfxVU;BgzVr`oUTASTr{A@qd`!qE}?8 zE7h86IYPj+&U2Cg-<>;lxqWYdryO&yj~b&#-jI5w@&;jx02gX=!f+ViBDgTRrYrVv zH(1h>FM+82fU#!_ZYD7Z*wCP_atU}vjwnz~~@dt9lV zI&;bQ$W_x6vmK<_N%mfjEAA%dJfI}wS}Px+!&PM8zODYFX@;Se*5|elc+mMleUpMw ze`Vt}M+YUftT%6tZgjz$lXWyF>KJrAr&qe6nT@ooKPJ0Som2XTvE$irXi&joXxm&C zY|7_b9}O{ge?Ed>oEJ412PazI=S8yyfEp00SIjs?^^HG{-R)PNE32e7Zxk0j$YMvn`gOXm74qEET9o}NyAVr1_wGjHz;gx3K%@Ff$kqmKhQ}?^Mzb}-ui`*x| zHX(#(ZXZsG3YnVcpGeOh7!>u(RetO4($LhGB^@*bQkNpM{qmwN7YDY9&r!hZbNFX$ z;ns2cR8~vZA`?v*vhQii@$U5NnkM((r45Ola@1rfw=Fa|!Buofqw2*Pje#m;KBH5n zZ^~vU5j1lWA$O98;bndNz~(QnlLI_8l5-^RcCG1Nf~YOv-M*{{%9n zpY1RUF7@AvD=H=@B+TT#;twRkHnDjcYYdFU)U=$3%F!VOLS8_QiKbZ&rhRRJ23*`; z0bw;879+H`X?Wxa>1Ppzf2i6#gB^Nc*)}&6q3N$|^+Y*ttG-o^X9l&&cl^83(`2?~ z0Mz&|CjaUE`DbvQI5n$m3ypaBSyRh*f&3F+vjI+|>N(19BcyEpU5RDaExv+jd|44e zeXE`2vGbKzt-AO%X?H_*qSnxdzL#m&=+<{$nF1;I2gI0wq2m1w6_cg=vKJbcl0aC^ zl0&#~qdNQv_IFB3l~3igBi^of%2eNE9^TMM0Cg(C#e3e&tq}^Yh~#JH3%n7ZelDIfcI{7nma(_konOT4G&Uug9 z&2T+ufeYr_KYY#<4ddD>JCtrWRK-9Q9-m)Ce`ze(88Q{nUJjgT*o$L6aFA_nJ>4GD z+{T76JNHAAZA+k@_Z0!@a;^+gQyWVX!=hQxy!N!b#pjRbV@5v0V@Oq)4K=^*3}+9r zeO3bdIJ^u!SX*~Qj^JZTN&;BGv`Lji`jx;Q&;Yxk7}rd2*BN8*1kQ)7ffr>YeqnNP zcP<%zQQAsUH4>rav8sTTpuA9+`!_UwlereX(m3y=F=;bJ0@{Kr$M*p)ZmZecM@$gwT(ei zy{s6?6>eQ@zqx_Ba8c_*un z`^y(NQgxuHbKajr} zy6hLS`+7$b=k)W>i$?lAVdNT?z2WZ9(330{Y+GOCgy@yT8?e@L0<^lEX}2c!fFT4v zeCK^ww8mk7PGWINI4}pzC&J&xFxLogUt6~lwRH2)O4t)jgXbfw8frQCzp{y3)|foM zqW3YFGM%D7A-mRkKY>0|{Dx;)fW9t^*U5W%Ou7g<2*H4T>?nO-n&YX+^2<{?y=$y3 zMhCxz)@(j%pnN}S{xS(@0T2MdYBr5%Lgf){wV7~Uk_(wR z8Fc9tAJ?sus0Qm{mm=nJ^ZOz9<0Jv&=C6H~MyvzQ?dJtAZ4XJUasO&*#w8$rCF2v}f+zqc7vI*7{Ed~$&~dG9 z9VHjCsz@XMXRoT1?^tI(KK=<)?#J;NpU9OR<-mS&o#Wa49t>GI7MofZJJG?Ncbde>+SHbu zgL&26WFu?m35JqckIH3Gtuzv;pRF!`sya91ScX4jL0OJkzwR7c@cZI`yqsK70ajZb zVkSgAdPmD?dtOU* zo?jPIkif57WA?QnxxW!51s`@890;d-sjgGgj*4KHe+v|_xdWgISx%CFs* zC?3~Z)1OgdHX`+4@3uLiDs~Hq^K^|Bld%`q|L0Ja7oF4R7r!YnEzT7!x#Ne+@(cbRZZ20s`V*hb28BikUadB#H~FI3O5I4yt4FK@ z7Z#EJXFi?>2f#>+E~AzOP_{H7`;0K{ve&Ow-Wt&%23cf))uIW3dB9McoeyPkxpbDw zb&s;n9-`FD2irfqZ|U@(=Y2`BQA$2#%b$p4B~$6Hne^nI%R}GG0J=29^=SbQJkrNh z4+Bf{D^uz@M?74wlnxI^`> zZZD&0`gi&p6IJFpp%m|ypkW=XCN>$HO!3YsRgo*}Y^+v;1oLCeo^`FaR3TOve_V}q zUEyEJB{i(o{4nvgMj?O`r`8R=wuJKvsTRqJdBQBFPh|*Nnev;Vk6rBGxbASBi7=W_ znOa^u(~1|$qsY~){nDRRXH!kLeB|bi2$BSKoA#LxZ#iXQEIMDafsX-F=33nE@jPEa->lsW~#+}gu+yk_C zY|7?sVaQ+TwdJki5|$&b#D_5`b@cjw_YRe=-PMCcW_|a~=&S%2o&qIKFSA!Otl$lP z6;&ElhqBPle^XL-FmaA`j8!e)>nE##yAa=W98ak>+KS@(nfLik&c@d4<~ zuhC}d3P5l71tUsg8vD-(@cC#;>S7wYlmRVr81>JFjtn?gnP*5vMI(KiSxS6O;>-mz zU&OJOPUH`jUEZE^V5rZ|)4wU#MVnar#3Ehr+{ZLSLC<}2hBUwCIC#*#lHd8toD69K z?!Aew=MQ#tiCL9*$sGKSVYf(~NGaVRDBU343QGyXg3^mfcXuoZh#)21NbZu-%~I0cy(l4F()BI= zzxO>a=WurUotfXZX`0{{SgCD1EP006xR06>#?1VlZ-E0m8% zePFuCDS;oMetaK&{EYe=#}TCK0ss)2Jp7^+Nf%n79#Xo#e($Q~VD9Q+;%o-+@bKWa zvbS+DHE}fKc5t>x+n1mM02lyDuVlfV>AOoFzWP&hh@+JTI;XRWj89J)Oh_K(+FH(q z@Ea*JxGyH6+p*?G%H@ork;rWoWjM3eZREPg z#oo2l^JiSyN6D2D#j%!m9yDx;fB&R!rkiRgv%QuK_mkYMs@#~F<}cuzCt}M7A02<^ z4%Ad$4<@?#0+~aWMpSYtvuW=N%4jRIAqw-J)!06Kg*~zggvV1PpFg@CBD55&gE7Six5J1>YlAc)Qc#ucgT2@%fn&K zd1!D=_MH!1BLJV4?l-ir`f&Nk4#?b6I`+-`66<+vgU0~cq`{EIm73T@IoKOZ)cpFz z=w0X!>FXMqJV$~U?^;e5njx-!2>#s5uByJ1n(8M!}^09`+MQN$~j>22lc+T<~KDu(ybGA{I&qz zTa}&%O(eN!1$DXCzfh0x;u4-w{qcSRbTb0Jy|- zf*g$dVVS=e(EgsD*7PfYRt3Jy?)Y%G44M$G^4lZYEo-lAAMpyLVsiSLu!&#sWOo&% z%Jd!QqVN1Kt4mP3PlYO*J^xq%e@Ynr8&oLpN3yi=I5=&{Mo*Phrqq)uZAN(pG_ekB z-O<@h`nC6NAo-n(E>JrjCBBzB3IkvxSpJm};Q=N@Qp1|BuIIfkxVFqfOQ0To^r{0W3!vkV|E>~cnh zlP+6>0ox*3p>m7gH?*)qEAfgt#;o>Y$s3+G1y_3phQ_2V*+4gwynGEf?my|rKPuc) ztHz{>lHAOBRJy);QSQWp3q&LSIJCih?@Bez(LM+W1x!QQ_)_@F*M7|_i^>MVhpyQj z9-!W@UjtsUUKO@?3wx=zL*Hp`d$U2-&r-SS!i(D8P2&=M9Z&%9POtONd+xGg@aO4l zcyr+c63bg;r_LR#-cSz44l(%!Z&D&%8W$nJ8 z?2+5+e+o+rJsLnVGyE|Lc#Cl~A>d>mF*nC`Rjf2T&?VZWInslHPs45V!ks^XqtZ_; ze?0QJ(A1XVTe4TMvVVy7`Br4K)&3Df1r2X8CTFg|-T9n8y?=I!Qp7k34?`L|etv}kG6HhGnsWPamQ%%b_`^5fKha}`K9+EG5V=BhV$K1l--8UGm)#e4Bc zV=s_%I{W3gZ&^Fdmd?FY$o4{&8&`hghkRf_v;NRk5vlTN$iM!~C)3y8Vz)F@@Xv-R zj#_-Yo0X=R{lvnz6XsZdXK*f)?T##M%Viy(R65b-ds|A_li(mTSaH9DJswa^TEZ52 zS>BzA{~l5Vy*6cb-<=z~OSE6@w2il;nm#$QZ&$N=eD+r?l&9}!P5gGwp^tppg|^Ye zgT6-#c)dg0jR26G$2Wyk?{6CRqpFgJ(D2MdUJ%M@r7pi8_;dYlV3U#2V~z(K zKsWl(_t2W+)`wR*K}gH$kj|FD-@xKpzodOg(Vp=)+O{p(fj#fL#;MDEN2P-DoXHon zol86~D|jC;i2s1WTRAT6BiVl4fYtW(4f5WY&#Q<(9I-!;{&d9}bOd6QLCcnJW| zw>@(O{RDD2WN$Gdb${DbMQOa=kqfx$|~DvXfHn~eu^g} z#HuuZo1Ct14=RmTx}U=%+gAdSrvDbpz^}AGA6SZpx1m@QIjzT@pdLh^snRz-YOQP* z_pn|3?f6a8tYK0;8jX^UOfh*&7FjdnGd6;8B3L4QrBW$*$HIP+g_@LPm_;s;k<=yN z|CXA>n&Q%gYs%u1`MhGbFlaCI&ZLZX|3u8im2?i7yHmha0R|IbT z*T+?&nSq`(f^A{)M3vboBt(93+t`M}PcaL(5WGRVw$F9A^00r^9}FV9oImTqq`fA- zHB-1Bp24=x3P&)mfCg`V<*JOweJFI}0ilHdT8hyvhh|;_1+wdUW+nu?J~&|a2fpMr zo|yX%ma62XxG2%TRgX7&3m5)}3)+aeC#qC8CM}Z7oi{kMM#E<1CHdQPcx37@RWZON zBUWkv{PE`pm44Uy(k4+Y)-2rfGDd_{WUf@o3U+7q^Kk)6SQ5Qyl$(S1uIifT6#Kg{UN zbt>Ar({(yQ8L^7%T)69qJ9}*#b!}O;uZ;j@Hd6d5l3m9L%BgQH97;Dtv+C2k^wzLe zj))K~c48?)Jm-Pv*SaxkCT6?SOM&4CF^noE?zn!&e=_w!(zt9*D-xFW#M26L{Vt+P z=4Sk+jpO!?*%HtcLlf!4OGsKE&$&gP6k)tTOKS?FdxbIi{X6PJfwSt`R;3vf4ggN= z$)c^g2W8xiHh*V>^ zI6%$=L79xuQ@zhC<`B-n5QiDQvzl}gb(mDeu}4dxz8~+e%6KNVQ=g^3f*0VQWdzd8pbzvB zE&Zkbwrt`YJ+HxVo}i7}m&kBiquve*W(ER#eUBg4R7jybg}+>ovL-)>Oqi_}^}mHv z27UwJ%3!T0AVL8(na?}9`otZzQuJ=hC;5Dp1fOeLfAei$k~W3BexbhWJrh8X9EsV{ zgiK``uLhI@cs|)XP~oH|Z2yDmGqYeCu*5t&4*rMz*c&EZIs~Mw$ZRmc#aJr;<<|r& z&z`2KNTCD$S@JDI<1kl8paYZuE4HGM|2au}aB+P&82~E77l^y6X(_EM>hh(TfE z3{him+D!5WJ_b|OCVbEPqL(s!`Dnit$Fr;syUFPMTcAKJ82WePS{Mq16hP0fHQwZl z<)gvbz0WHlqk4nL{76H0vVxjoze3XixUQ%ze`jjv!p@I%E>4(8O8*;|mI9NO3|%nJ zbDJHC7I@VyU1uWq$JNgXwzbVo$L~w5AaADt$}?);z0KZTqYs+DUN6Qo6vO}R;dUz$ zp+L}8c1M&}zGd6CHCq|WNj0|Q-|}|k_$`K|)n1>=76)O|vy0`LWRvrw;Xt8e?~i|| zZIFMJz&yR~qF@u3_C?`97p`54ycF^NV?23aBlblS^v%jn28}rB1sx3S7(-UWy9w&S z|5hizar~`jl_nO8!p%$M8Wp@bD)mB=Xb0b10bjciYk^uc%k^aK2djYz;*jf~R_Vf2sS)7q+a;HuV(Cf0U0CT<-^w6fI zM9K?4{Hb@YS6R!7Rl0)ng&+9>LsanGsd;5}1N2o+U(YqxZcB6e5+uk zUR>3&@#dE-3K>J3$5KrkGF%P>rXXCIP+%4P?ba5dc{{SlNo{pRhtb57>_F9wfiNIE zYZi*E7n*f=bU+0`pQWdm#k%PuYsIX-NWRL!yDSdL0iFZ^#jz-KgRF#g%$dTiAOZGB zdwQRk_Q^olaYgjti&psxV~lXEb^{BfY;Zt!dpcl)p@KYa`w$SQ_)8tgEx^0fbDiET z-v42q^K4aIw_eS1X2@4$5K_*UJm9x03$d?u^d=35wZDlrQCUDrRcIj$HcZbj?gtQO zbbVn5X6>PWKi5QF6hm=2&j^7BfCy3i-AuZTBk#np&6<^QYlOEit(Qb3belmHHMQJ9R&eL#x z>N+9Y-vPpv##+YUeny%;ET1_*2IL+^f?vy`9Je<9v{x=nN7bd_Ux_;Vt;C6E`*YVK zzQORMy0WYoq-GHwA2{!HxlE1lX)7z^;7Va8^8hMPJhd$oZ&~7YaB#*4=AZ9(@xO9r zYIG)_AAH>U%xsqgIG@q*Q>a+U;Pe*?KTnLlWYoO=(ryH7=1+lt3(`aC3QE6&-kV%+ z)_Z!;5A!-Y;ZLm)D9tacXkOVNo7&wmw566;-M~O&e)Vn;jr|Wd#8<$OU;viL zsHXEI348Dx(vczaK8dWgVy^gUw7SWO=Bn;{s#)p+-m7t>TbkN0V)|p`&Negz5KZ!)iB zn8cE;d%tU4Y0qZW*w+Gmn~ETH~x`qe3@)q+csxh zcwm9#g_y94xb-aPuR9$aR?F`b25EWtKG|JbxNAOB1JFBgh#K$QA!Im)K2oRmCZFR; z;Ga1Lrf^c>**fI#rGG~{>(N^p|0J4`p9v_4?IsRNQy*YOx_zW-uc(u6x6&AX7rSwX zEa?FZs95>%_`Xrup~6)otD+7JlN@Z3+RGgI2&co6`jPy1rj%v5@^FsgXho1{WdiGq z=!bV`?{@(W!kd$CC3!q%-rc@B6xEcoz8Pbsk+C+f4sfpy4he>mLsz~%eq)RlC0x-4 zZ4wmGzLY_^nQfZ#>pbH9vO>)M^Zx4k_`Y%ezHFrQ*Cjn47KGIFZJ#w8$?Y^U=TqCM z|0+E>WNX*ybutoqFL4t+QEq0oICKjDuG&TYMK_vk_s3Ohatmt!X8A5q5qyDv&%==PZ8t&ph!b)=}Vc);7yDTrixr~Hm$;bs}^%fv)_SLTn3Y% z@37d541~52m}htI(z~agk7`dIVw52_HCT534lWEArs*t=qTpcYdUM#DKw*tLW6nmgg0atFsu7aZFUDs;x=|=vTJ_cRqoVArmxAj7crPUoF9ni~O@yua1xL`$0i}xH7!{mH zexhK*y|lZP8Etp&OJ%i8G)wTP%^nxZ)p0##I?!HF$N+F;H+?t5zX4?f8$UX8hV+>K z`Yk>$92{gYfu`FUwS6Cye!679ZTUubmU6bQba|^bK`$A&sK_qj2b}Fh5DK9-jVcCM zJ6>Yf7%+cW*mR@*K~OzDbFDco8?Ihej``-}pbvx|bGVGf{}CwKEBeb$pmp%2X+=5I zhKq)R$zM%Ur54U|my&~*5rSt^(@w;GfVB)L!ym@sPoJ_S3Pa^-CSQ=8DnP>3ou_Sy z+LLHbXO9(9txW~@&@{S>6rpwaH>?ULujUG^K6hskcmfN;hA;6dJTQigF)1_qnVe{? zgl70s-{3}@A5zqUy{9Jl18dcVR~!BY;TM>2j_kXEpL#AW5u(?IBd=IO6A!92Lkj~r zVh^rrK%0FS_=7`!NAcUVs082J)WPI;h)gURvCna@A}^YdZ9wMs@-6$on`PEcRjlPB z2ylw=vA)Q{Npv5f55g|8`5Ng^0$!e=0Z;Kk;e zpeLi3;#YW3mhPDcz|H;+eeF8HcKOt;6YRBOv4`OnxxkzBm(O-)T;_vAoo-aJB#r7A@8iXCG%05z4OL`NSj0JTDz4m_);9-%Ts2M%E<#+_dB&-`s;Ows7>X!C=A8m5V*WTTPLPt2q%*pd79 z7MM!Mf5?_x+AaJdfTC;d^%qUI`P8ENf~$k=sbT#lsybmgQNPnJoKB2G_*G3 zz6i1ZGHOQV0*zj>LAl6H;zn$wv>2s%p+*bei~R)pIw+7QIcoH1*p`m)#~IVv>>A)X>KUWfmcc{Xu@DRJ4!y(V}TgfE!hhajc2#i3r3L*%4P~rC^kxfq#~ce*f3KR z%5NVMPAK}q9J76u0XNs#b!atb{c8cc4qW3O4WVrI6FX9t+iak*$|)+o%+h@7EneMBYtR0h^E$FApnRYuo}CvZ z_Ty#m{!BWVoj7))q_a~4pv$vawG{U1uw5K77grCTn)Xf%OGBc?X{}l@1z+9B3we}T z@WG>=iyk0~(?+C_IqYQaIv`cZ?s(oiFt_S5gc!;LXW)z4At8u3Ep}eSPVwHBzYA2X`!l^R^X#FGYHF540}e4-J&|& zm{c$#U zW4uO-vW8 z^R}T6D#^$PZO_Rp_PkMBabT4nepuwWE}KyPHKvGr=?K5FlnsyV@B`9i4~Dy3po@>E zeq_O(X;;?qgW>%xwLCyadIbE{gLN(Vq-}vI<&c+0_vZexM7`_t3OObu1uTE+5Ln1{ z7C%=~gjYLa{@N_^B7@Ex+Ad+W+Km}bXv%4yRH*^I&kJkdQn+Eyic{xY)}repH0?Sl z?nx|^JU0#FTQdzlug2!_WL~aKHk=p`Odh8sy||Bv6;?*Atv529lY_37KLS$(z$V6&v1G428pGfNI9oG=J zovWenv~7V(Phr_m1hb&!;<*>NM)3z=yiifw_~|={5A=zOrnNpK0PIlLTaXQPiZz8| z25#`-{^r{FM5L@pHM`icMd$vyBCEaG{E+aJt(-H*H7}$b9b0?!xG7TFsbN`C24;49 z2RNN2%5uBMJi`$1qFp{Dy4k}UM00SLR9nc;_~nR;eKqShaM%-c$>6~xHcZudB?V|Od}n3w4AF9 z1e_i_=5{aIvo8HC{D?U#5D3%mD&JoyE&#vzq%YYxB)kG91Ac?c`gH{kP1nSK2||%{ zL1sq@Vnp+QWtb5~*#N<^j0Isd7`&OK&*L z$8KsK-w=MyL;oZmd&w0)<^T1m%F-3jhpsj>8mt>iM34A@OoRGyu^V{ArSV<{SK8I{$#D7V7boB{F)|;HGqq2UjXs&b1J9tP7o_l0FMZ^-2 z;!KzfoYA#G&0>EBjphR$R>+q2nFJm{?Uo@3i1;Ry?xK!|@TuoSG| zI8>lD-7MVm;Gtb>MOU#d+7=YqdxLRgCpkDxx3RLi34r{N;&ru1v=1MS&pl3t=7nI# z>cn?);R#mMHvN9t{$qbxv8)i>hRPSRgOuWov488@>M|V(1n)_mX<;bNOf;ouUu@!qk&-*hNm0h@&GEWI>|8UOMO|7gb8F7Vx)S^X% zgj9CxFwqK@5!U_HRdsyDzeh6gQmWVvjd=KEk!Dw+JSAFp*KIL%c5G+C^m#m3*Vk91 z9TL3@NjjJOXek>0KeYgQiVw7Qz&R7zwoD$S6gAq%LprZz5CL5C_6y3oBkK#04uBoW z!McuUuRyGo&aB4I`&0IqB>XoWafc83Hv3(X;kihbo#Z4c-MkUYLe zJkNZFne8UIE*lnU7Dy|zQdT!jXb8A{4XJAGqN;-r%dDj$pDkN(ZNV-y~i%qsY^C z#FGfG(kdHWPxRSJXm{3*FTFV3`+Kq6jnF#k-Fr7@P*y#{LmHU~7HILKVKr2o3r}f` zg3dt0J;o%;Zry!sS;GE)4oR?gf_PpRgNHH5^la@`daAThY3N^hq3^`Yiv5Jga&p#t zsvQ0EpB18lU}QW*y((Ep#YSE+dVu%s*&lYI9>Do0y2c7U-(4Ae9shdDQ z?_k_-4VkRaQ9k9+Q^|SUk8=KWRlkN{@62*8k4Z`086v3Crek|Lo&!&!&53ppKHVL> zga@C9Dd%LR3Sx%UmSYh_!S@J_&=RWEyNBv67_P{Y~a$6IYv`z&y=zv|QuJcfr-t z;>ne}!k7Gw(|3COter}sq&7JV&F?W$K6$Go?w}`Sa00pB(HrNP<~f8eB>IT=U{~&Y zX?kUSbi)E7xz*quj0X2Uu&nQ}A^6AVO^WwR>zT@=tfjKrSZ25irMie?C$kcRnK>!9 zNF$+$jiG+N7tJm4;{jgPc5^_V>)G0N&Vc>*wZ`&y%IreF@Qe4n3|nUC!B z_1^17ozFAFb>yBnI%!I(y`xZm@m`O78kO}&C8jwB?vDwy2eizPpHE?#=NarFOgazF zV1sDqyMmEN3BzAWG?)aGyok{_&%UvNTWBL)0Tl!X$iJJ9kexI7hx50Vr8(QfP2tju zL31-q4fYHiM(?}b?MgYLOFvw|A#GFDYNxCmTSW$Dw1Q5kQkeGnM@Y!6U02YZAtUN)lv*v^EVlhA*S{&voFuR>WNwo%}IZ zEO-;lUjA_UpKnv#k{p63*EN($^SzN_2z$!iA$rxD94i(0$~YCO&PS8o#~z0(D=xXTn1)(6tgT0RvRQXU9TJN=Y!E62*v#P{<4&Pe>}&Jz+vJ3 zC1Eq9f%5wE2ZBf$wtGeW#C3dEOQ0>`U~g%0ySX)81?Kxeo1otK8kh(;Z<->uTM-l9 z!^CqL81bmd)eJ#p10LkK4l=)coLx(}Imc6dV?aNg`yZKz8m%=TcH3_y$^6c5(GQn@ z*xDmXHY^^D1$rns7^{MO=MLUpiNuWesGkbvRGsb8+S}PT=7Tj)4}$?o&rHkBzq=%A zivIsuQP(y2V5BcD1eao_H#JUK_&D~8AsLX^2sK0tqPC@_O!MtORe(;f+y=<_RNgPC2+`E!0ezS=f|^t z@&he<3^l-Hpem*Ux(ifbOhjg#?Y0xFOe%njdb6`NS&ZvIA*y9|@TBv{P`KI=jw5^A z2S@HhJV(TfOchwkKh|onq-h4Az5WRr4aHuv&PbRa%zSLEktK{g|G^rK<^vE0crXez znO=Lo$>&%2rxJeZ)C(aQZBe&xP4tg^Ygn+~7rmHjXVdzfbp?VnGF6cDMP~r@Q2`=v)|JZ4-t32ATN?EX3XZ8I_|!Xn78}{2ys$jwR6T(;ryeqj!IiAx90&*Es;%#Q-nO|ARyoQp^ywb#*doocNb!q#JGTW z#XWTYP8(yOQY#OC|F!qEbmZi;5jNh)u~34w;=k6Fslo7PA0SfWZ*h>!xbor)Gs;N7E{4TYhCR2zs6X%7k!6j zjnw-19dat#m65E_ zer5LdwakB_c%mly%rqOTeyDWcX0<9~mum9;urU9qKe(e)vxTrZ5>G|@sYwc08bw5* z#K2O!5&IVZWl$|o;oF7zRj`h;)VW7-gZ=WsAU_hj7Y}^>tW#4hy!sWY*y@3ftkzV! z@n^&=O2cSVT6l?<{N`K;@f@*cOQL-G73zfuSlsR_lLyb*=XsBAssvIcW7FUoVBVl_ zowljuJ=eSq{l_M2Ls~8r88fsyfM^zH$XT>rnnQN!+wP@kfb}>X?q=48ejrsn`E1{k zH1x(29*G(p`H$WHycD&S*bPW-e`(xzO^Fyn&~dnJHsaze zOA&k93Lfe+Q#AG)(to5HJqS4~b|1l5Yuf+dF}Q?iSWBXS{ZLa^@!56sXro9@t0Foz z!sR#GE(38LO1QpfN0|foIaRnc9g{tExHPq!t~Z1nf)%$zzBnusw99~K#l|7?0K8E! z2T}oc2}H+{;Q)eiZ)?^%CTblJ6)Ho2^&_{C6`Zve1g8<0Ir z!m;bKG#QgUDzJ=b+ zX8Hl0PZD^D77>An%{ko(q-SbB5Ywzl29$q$tU(N#aB%!8%{XBhgjp088s$TWV;zcK z9LSfm=_E`P6OegU^O-cquj#Rdx_oNpIfJ_HY!BA=>5eJ;1^*nomCyB`(>fTe*dk)) zcS>`)qS%@9-;&F)HRYb}laOzj$~9{3S^lQIf3NLpXOLerIwQ6Vkft-hQujHMnX1PFhmZHCtrB{u(Iqus1@AI6V>875Efgi&e zrUH%bY2s(*d0Y3J2>M3R7ln8d4y&YjwLb6!s(mK8CJ7>?_N$-IwwS77Ng|SBoWKFl zo0awA_Flr&;28Q4=sjZwKDoPV%)?ES8lXA+-t|=w@^7sc5R1 zSmh-1L{Sq>?3jD}BmD*aURrk{MiNs)#sJjG-iMO^L_;IE@re@SpI(DJccDWO$49XF z<+rpnRfPO^=A!{0D2VE#PT zD4L@cGjhiP4PX$G3&U8!KHFJaY{Mv%MIWh(Z5~aFg)Rhqadj4>?o+gLSwJguT(FLz znN*1>xF{<@^Zz1)w=<#S2ES<(pOHXPYD(*%Tmw5e z#Zj06!FGDlz&M6>p7Se=;8>okK-mq~Ho87yNlMrhChYREc0u}&t{=s5{yBD@#Wkqz zwq(>9`G{HL76%sf2Fe2OBR@=WUkkO1hvUgksxOC6iAk|r5bs=jv?C)g?tXju^M zb)o&_t9$WIu%dptdl1zmJ>x+Va#{fnJXQKwi5`4&%kx5;qmAxzRVV01#9Gdu*9#PQ!Be+l zv1lsJ$d(kqD6}@q1|qn8!u)*Gi&+~z@8lPC-nfas0=wBdQQrCoMUUILULWGMo~>`a zK20R`36q;WHHeki`y{(1(V+A_;iXITXvWLm7_s+l4Bh4en$Sv=C#6Q3yM;!c-Adx7 z`*`*h^KCX2jM66>TpGFk99J;}UCSnC$o|GOM|utqN8|U}NCkKat0)1a7En(=LYy}a znP!q^mAyNd2eq)gV6b9mfc{p?aZzPRz`paybLV#?g8@#(JQknsyAvI^V&G^LhDC`> zx_%XpMTm;*i-Rh{IVSdVewV%0&W*6*xVW=>1s@HP83M!|Loj`?s^F(@tK~qnw@7oN&VD``*x9X94b2b}^nOrbEl$)z}$oq1ADIsaFh$Rp`XGFV-01a)cTN(#b z)aWoXG-3DszIhh6Ft=cErodckLdK8QJqOAZ0offBBWOV-I!e|g(bO?-dCVE5pVYl_ zt{U0d@Roj^x>+1UFSYPYDE}R!*FqhGDU(C=S}^tmlQ4r7R#i-_V35iBPbQ(d=%&-o zm_C6A>fv?dMH>o$J4*){#H65};3B-j6W1`4HAR1jI;rh)LCAvP-4>DlBB{~>V@+Qc zL#O8YcGyR@xZISxbAOS&FsB7v5n@WKj=Vkc=4fS0>8}TX`H>RssA`$?;h&wY+6+wU zyPs7Zo$T}|qh-y#(p0w({A@If>7( zmpq8wZ}6j28n}3|=X1r8A3qPq@4DYYpA`Pi9pNGhyZoJdh;xiJ87tZgF1)n#na4(B zASNL(u>X;^EciBiBF%h&JciGnzR*uG^R5Jwl5GDGGw)6Ojo-c5>-H9}1Wk4=)zu+p z()@D9jO^nvwB97o+19+Drd-TfEK{QES#Qahqa}jyyj)qvUr7>E&{~C8jwlu|Nzqv; zOH=hRa#8(rk#(hjMNBa~NYUPloOY1{76vb`06FV#FBQe!OGK$MdJ)5ZFk?S+$)XQi zE0RRtrEUBv!n|Tj!2jq5EvM&9&Ut0rB3dkK(lczPv1eZqNGHuyZJaND&SBqH96QXj z)xY$chtX8Rh|+tP6}e)69j$aK_Yz^Xf7u+l*8NrPXR-wP78<=P_)-*hKZDa7c;^y> z1N>RPm&PReiKVG$U;bx7q$V%!JC4h-Z<5B<&+bQ>ezcu9iM<(} z`OX!EGA-#=4E}XCzXCY6D@Br+j9ar5k7W;Dkk)5J|3ydwab96FG}xQQ1iDheUaT^L zh|fo24Sr}T`L$~^2n?nNuVtw*2>o1TLAi!|ld_(f%bC zF`Gn)YC!5fc)ra(=COu!*kB_ut1S9~T?{}-7G6+8l`_XdME2qn@K0OCHPe?kSzt&T z$}sE4yd`YxA4&f_nW)$F#o1czcegie3F z?eHuJ~?5m^_M* z%jV9i{|R%9IKic?R_Kk|4FRluC8+E~fJea}Jk)E?CoTv`nXj%I6Wy29v^ch+C`BCg zS|}dYQRH>mCPhTpJd~Y&FdFZEaS0Cop>OHsk>@1_kslzAfv{7I-bP4D+#BT8!tnZb z2J+0;BHW*8+K2{D`Yvzzt_|$#FdyDL>QnNYp!Il*-Gdn%UZ}3NAh)?sXh6Z#T&1h8aRo!_L_t>#qZk)mYv#ae@!onSqX6G5xN4%bj2GHIdZSUen70MD1 zTe~xvtl#rf(4Q=`4iLq*gHn54^ZM?dGJ{&4!b}#a`(FFUo}rDjf)o+^yU{m$ZDDvK z^po+%(B_qmnAF!Ww96(hVym1-VpDsOu6a!amp&QMc`=yQG4$pRF;|Qf^%Tir)qu&6 z+BOCD?)89P#H~xLPLyM6599vDOss1n_MdwB6@p$HK|)Hm-_~_4G#8}{Qsi5lA_&Qs zk1wc?b1k`#QxCrQN~Lcb1y{wSpY+Jhi1iu%=0d*JoH(His{S(g%IKgDl>hjvqL}os zOWpEtC@Q?UHD)@}<;Fh!Ry6Qy7dklUj7Rh-#AoK9uB=lRVF0(tZ`8A@ux zcL~H~+j5tMVXxkXK2nvrbElUxXYoeEkvsoxVI`c#=(6lFgU*j}`BY$b&xi216OVb; zH~5TL+?H0al}1S#%`4!A((S$PSFlBGMhrL(QgQn@FZZ_$b2foO<197hgkVJ+xTh5h zge@Oa9b(qI@&~|dNw|Ek*x(X+(v300bh!=9PCybdTXMcOT1Zo>(HwkbUig@uD>(3m zeLlME2NzMYJbQpov;Msl44iHnb3c;pOWtG?d~O31D5ZfM4nff?;4??I}csU1#8Wkf-N-bdYoIK zHNR?(W2wg~2cr}s*s#l%z%v15$&ktH%q{yiMRd0q-{YtkfBvw`NYeH(5#q^ehW5vt z-+x1$ZO^CpP6YEd{>n8{j;l!v#%sPF_*Db#-f2UVC7EWR)rH6MRLRHiSWe+^81MHq zP5~#Jlwakf24d#-j7cT^eKo%sXa6PhUt#__swyCcB;h0m;z~F#TvJZK zRTuXq7x*nTSIUrM@1$knkHnqcM7uv7bz7L&0R27FF;&)Q%;^Bdpp$G2Q&gSx&k1>> zh^fEB+0~}gj6zki^D-9Y6GI^O_HfY9rDU&7%hP|=?5fTh5x%V_s%f~s;@fg0$x3)a1K|nl(P;D^+m=blM>Ww3{rdA_o@XS1UO8s{h0tH|z_T|{5Zuz}o%!_*l!ixcv zrE>w@xw56)%f2L}T>9X{T)#jIYuWoOi$2j4OLJxs3pmwf{@l!GXB?5ILlZnn5mx$o zoJ8!HKTe!ZVnseCDs1Vi0wnOhenAN24}YkcNO*DaY-~i1G@s$o6~%=^Jl5Q+xsvC5 zk+S1TV$>7DOD{o(Z8Uv``@4K>^3t6ML6a{ z)3XtwWGlNTu^_7tuSrZOz0!@D8O6*NI> zUqi(Ofxu^4xa#J=X{Pv@StZb5A^0AzsJ~zq^^D%#I~IwLB!cWNxUDu9F}6HKURK+Q z?41~Qmg{2?iKx4qp)S}altC{X3ei*PN4l))d zp^Wwlb>Ua~>AMr=Tn{-+*bDzX<^)>O-lVN}meSQx$~dX#o8MA4kgAlQCSCn-U{2JZ z*Mycs@#bo(CzxDvg=zDyf(v8khsHn-H_NJTG-CdX34+!1U^hpc(-f*No!7t2i81py z0AW9<6HLt_=OCG9;;Mhk)fF4^2$_ESNvZu^$6X*#c@2L*sObHN{NGik*Blh)kC6(O z5`%vyvy{awngd-MFKoKZ`h>qJti3)E!9MEr2JNQldx-|{RCMrX2|y($WGe6Q?)f-o`zjgZ9dRdO4Qt7#8 z%T{|c@%`aeFWgesHqD7PW`K2Kq;t3NvDLOTE+y`d*^DUHVU=fU+bJYv&n0P6BvwuG zSUhtkqOqWXGy+D&V3=qgtOg_`tY7@g_lQ?FJcYDLC=@b9k+FK*u| z0RJ3B{M8$8@7?j`aRgQrMQ@G>U%wSG{Rvn5H~i;(XAPD2#aUr<=lqV6j>K{I?@QEf z718yZ*46ezrvIr6$2N5EdHEIJt&EN`DOZG@%2&R9D|~Bv1AK|#!QV><*whDI!b`Uw zkj9b{<_!FQC!T`1tVnO05~~HS87KU#!+!ez%XL3JiIaW4?5+IwgjaYz2C(OZL_=nP zRKA(&SOcOxiUrhWpgI;cc^0E>HcCyEJp5tJ+nawebWs7dE$=sy=hfSrCi~4ngP|_C zj=XycT7S-4qXw-A_~Eh^_VO=;JpmWuyriIyb(UBr#t`$#5RJJ)`d+xaPU^QPmtr1D zPvQP!)`aKO=u8n~FYXqvD32*m?2sN$lb$ENaXD9~hCSUId@Uo;t*bImiLr~3PVOcP zK0}4fVgdw39s?FAVtYHRqf^z(^U5n(-wtH=OAFzLSQsN?x#pw+sJa#}xkY`kXb=gp z)Mo}|&_8tcLM0E^OHFdtNguB2aULJe%h~7!r~0_Wt_Io5{>0on2pWFN5eq%mK0x_$ z-C|erZ&8_VH}jLx%DcFCw_xjJ*vom;(o(g_wMKA9HZsh8G$A=8trP1+z*z^hvR#3& zXQ*m`?LiBMI;YG2mV|fTAMTl` z9=`tZ_&w?}QG9ZNm(e4z$w`{dd3Di1e3|GhgZWEFubP+wUZ zBrXL*RfLXMgs_E-FM6vp$JMklSyk0{tTr;Wnrw8 z4HBfo!+lrV6E~DlLiF`S^^~m9^_+fAPx(*AsE`Gp2Q+)n(2TIA6%3ywh*M?5U5!1? zhY)SKoS!x4lHjsJVuRq;1~f}_?yrpuX&oK!^UkFZr3yY-=iUC6C;nrotzZYq$9Ya+ zwC!#erFm<3ZBpQiuW=HAHHAA@npUf7I&E>ft{cg!o}tMWIvj z>cjkpI#*&$?f7oL&v;6opz(K3mdd=Ye!V4%tBt5z+*V~`!?m8+3q~meMJ-_khG-%M z>#iE1te^9Sr0>jT-BMmq)Jtp!{p~o1GWH6VwMEu*hS+mJo`|ixKgfs` zg;DD|AgrMKB;T2Z^07uKu;{W6eFbw^F4g-)Pj7FF6XW2OZl*s&uk>jy_vd~tR22@A z)1!?nne<@?l4gyc!h{4E8sm!oUrlcv)>iYp4w4N%-mar>p8@B3a?{yBRxb7uGK&d%I>@19o}a(Na{OzHj6 zlZG;{&M{U2tq9mvnW`qPacm}M2HVo^$|JsHq^#zKZvG4moRa^+YSfJ)`->4+oM05= zXX(7MMgb)Hi5d9iL|@B1h46p>{kOAGpuw>H6aKU-iTe}Dr6p3#Fe+>!8qD^@4yW@m zzmBGKLdI?VGYqqvHL)PGBg$xLcSubEXiTAj)b-FO9>PIWq6NQlQVkM=0~OVju`j=z zFOI#l{&3<*^Y6`uN*@+q;|PuNuTa`cW&CVzNOF6u(pBvyc#k8DE6LPE*Ce@y^c+{d z65IHe(Wt+S6!rTaN%(-z^YQHz>JSsM)e#Ne>9Js6Icw5S6w{5 z>NKmI`-Vkr>ZeaUsO(g9^SJZy1;5j%dG6}2p&jL`v~qOzj#7p{ zq{(TM{gHGBjb#hNODRAvEod0%zN77wwElDJ{pHIKJpTazsuaXLusO#gyf4C8?CyL$ zH3^(~Jw*`5{TzrxavyVGXU)W%>N6sDmCv-)N+pAaUxV5^6h>tZ_xG2=gSM7`Y@N!U z?k_hV4Hx9L$y~^6$;BFUTAqtuSAN5>FCGMNev^chK{obQh^Askca%gw z6UN1_52v3c1s1sUJ7#3&Di(&ko@tZ}D3#f)Ui^X}W!Yxg=AYBAOGiINb$~8*GFNS+ z>kytV%%_;XF>VKslaDXJN~VD`8n6W=0vhudolKqa=JczQq%5xdl4?TY0r4SbLNJOH zF~R)^75R$;$=)8|PtJJSDS~=TjcC-dZE+a?G(1;gMpZ*A!~k*Od~;H^{a%s12Z}$U z1ORfxdDgLmT8a-ig(SP_4bx)Y0?X6mK19Syi7dukT!(y#fR3N*!hxFmH$86;W_{i# zQ)GlmK}ASjZXUzEe0E2K&@*v|?7>>KlzcTxIRccos(1_2n*55mN?Xx31pKvY#(j~a zxL!F8y;8I()2#svW#d4Jp?qrO^X@+x@L9j-7hyII#Ubx2rk5lNkqA!xm+c=K_;sTt0XZs+W!HFelo#E> zS1Ll~2Jm49RoH>jY~0r>D~B`2@mv_b%=29hJ@{^v8J%x|vii97vHuz=QdV|a{cLe1 zqC4DX^m}!Rk#-WyQ~Ma!p7&2}2yMBE7(U7!$v!2OaIZapgL8&lPkNe?PYi)0cgnp|#?Z zKumWbDW%U6YIqe84fsL0tXF~9GFdjM&HY@hG#g#|o4m?TF$fyyiu3&bk<|jA--m&# z7#K*ct+6P@Iqm$@cANuofS2r6wN53?_B50`SYKga9?T{9+tZx4%Ic31%7c4D%)}eB zq1Jq^9B-y#S9iRX!6D%7i%yHGkB@>0FE!W>7JLf&RK`UEzF31xZ$03-JiMPMUuS>+ ziIrlMlmy$~VbSk@>6Msn+vPcN2wiXvD9LPjNy^|Kc7 zV|c81>LjLn=mg&i}EoN4G7sAmW6>~VM@=H5)HFMIb5l> zr`~Xy-r)p`tbk=w(kY<2(1>z}k*UYfM5hC+bjNhzavEpn`gJNCwx>VjfKwsn3u{x1 zsB`EIfY=W>IRUN5+rwD6^Z6CzWu=c)Yh}HnL{uZZn=};bj(*6Z-$z8*ROOA;t^fjJ zihlSibe8GqC;iMLpOH%|&geXwML*lwE73>>Pz_KgS{{9(Sr)%@H-c=ezxu?e7!cLy6tX{2jfNbn?11Nj$o_j*DP3XF0%W3NlyuT#rea7WP@H-(!;vnc2(5dO1s0iq#;&(f zSuOq8pV~DYUhSVp*8n(pbd9@B0_RrwQ!gyyL0rhms-1AB==~n;j$$Je+@+naXUklt zlIiA_DYJsd2^C^VUCIi=uKX9n&efw7$`8vy21B|C=iIr{2~XPhfe- z$IKJN^MwAS+fw7VJa1i2yI=lpg7XK8sF>0V-5cLN^9W_&B32VRP4bPB{t+~D{w&C; zL5qx(PjM&a*DG%fP48Cl)3;!b(8RR*E6n+9o2vqMEJygW$kTX!ov?Q z1GAsb&YV8aN#%*AUih5C{W^8KI%MrBxM6O@p)j>p5<}MMp z6(I%<+bSE9B^b{gZX=-IQLQD#z(qDt`Th~<N-;wgN8_H8_#AUNgr z5P7Y%kcU>jggf?bDhFzbx)r^1Iq@5b^VfhjRC#WOS-fEW32oNpv%=yAxs=Ha*}ma! zhp21X`oQ7&qD?5Q4IKonsCfO&9IAz9jclA4Bs#;nBmMh|`tz3P_!(^JSQ9Hbsl@@& zZ5C+*2s4Wg^2gtHMMI62uZ$HA|y_mO5MXg7AIU)rdRK{lirFl{w1 z3R{6T_Eg~jrs#%pIvA@|^Y`Yfds-rvXj>=Skd5(~XU)0TI<+md|tWsiI7+_}bA2CRjjq#U$S{{?+e_$m_{kA9t?xBu{aqpV=9 zgv*;rD8+Zxe8rjRn9OdhR5r@t`HcHj6GNAO3)+*WRCoPLI83w-dL&v!0oRz2W(U?& zYVA%~z=)xj!%u#+2sTs z%sSeYkxs>(6`Xnt+}lfY_${Gg8jNHIH7d|+O)-%ZHA^026f>((+Pu*0Zn zAD|6$i*fTcC%t{_8u+*y8fMbWv15qp0H_m-8;Q0G=n8+f`<7qZhf3Z_XX-NULze5M1y$w{=K6B%D zNJ``Up#?q}ym*VqbCTsjHS@5a3gm9$MlsPa5Zsyw%PONS1i3t(WI!Sr~*2` zo+qjvsr}GyURbqmocsnFu5wso%3F6FDf_aU7w6!e$>V{IiR8ADy^V{e6eA~KAklK# zWyd;6$YY+eorJnHJ@dqg%B)<-()4FFAr|0d@VN>yaG)Cy@XF&UMDO@HY6EHpAg-i< zKY$()D$gcf$>7C_`C3#3P65LD`~yVO(ir@Mt=?G^A?ti7WST|B=VhY9O|C=` z;^WMvbg3vg%Q>d(R zV!d?j*(6P}*eM>=E!j7Jr#m(L&eiQ0VzK%FC#7B3a{MKirQ z>(jZr2W`A(0?YxPbF?&HJ^=$ia&U4uYI6kJ97n?sv{>_ZWFTOdI=+61oATrKu{BA& z$aB1~57#-PeJzqC#6xW`Ud6KLz#yf(=s2LU`S?vKlui&1`(X!Cx|zBzf|8i%u(aAG zdl!%x>qt>5Z3)%%jgmpnQAH$G3omJZyrH!`2q9{TS-%+VZ<=S_emW;vpq2ieis~3i>oHPt*NGbz;_~2 z>ed|EP{->{u5HdHd7l(EyoWO5W~$urb`h~dY*ge_Qv|SP_~ZhUkGx(B*z+!`*fgu#mplp{nsP?xLBYY2L_-|0cZipGWWim9iz`ueVD2D;?$L+{E^%UF@ipK2 zo+h>~gl>9>d+4BMZY%63&e^W_ZFE=jU+}Gl>A6#PPg6ehAc(k;s6RE;&)>nyG8e+k znJ=m)$6CcK8Ts)>M51bV< zfA5~xdXq6J^!`^UonUUTf%C2$#kLdzDnl z`xg6>FKN}r9lfn^ro1%bOya5Ctgni4!tlN9`7uKKyU1f{aCs2#YGGyo+hgt~&W1ZB z`>>elgvw5oZkc!((ClCyY{@NHKyW5GrvEX z>i%;!50#fEQ~8a{2^#&@HhGTCvTYP~c*kCVb)!97GKEP(k%xqRd`3|zn_@be+ElrO zJA?@9Yjy%vA$nT+eMRu(JZ>)oPYRe%j z-j;k64rf#Hvv`}F=gq2@x$?zhk=zLvdtJe>+F{(*ABHDw<`hci00sPTY`$+M1w^e} zbJ9lZ1rA>bHkJ4TSiOYW z#s`(=z2SgP6^A_Y)tA?UK1#c1@9Adx2MtfAF8O#wiGN^Y`x7c`ZPH#??*_=82kk^f z{nZH#PZ8!V&l#V_OVk+Q*!*JnAK`vi6+$rG4Do){d6I(7 z)Dz>Tu&=d7Y4-xaD=Ie5ji2(_f7hBx%U$g$^FW$%1ZjbWv*5uC&%Uj}&A$)e_8~Q3 z5kMDEgVTcZ66*$-l7NoB-nZnumgp>aYH4@TXkEU|B5Ca%2AHJxrqk5`$>GuwTBVM_ z8@pYy$N(g9+f~{lLpj&We`!Acc&>T-wFq2v=4|iYvE$g=pPR6hgF9}6g5FTr)&3)b5QmU^Ca4%<(oMhK&y^s3^=TW zW8Dh#dDZDE#uKKbq9bzYwyC6}k>V_=o{=27;mhNUgyMMTeTOnP`X}M-fM8$~2b7Hd z4cviKB)-cU+K;#yC4}5}EkM)Rf?1?&&O1AlATucCCo{bk(iiwMU#>02aBR9|E1C-^ zQqo!Shzp2Xn2Bfii1$g~0;Cx~$BT*yKHuflS3nWT)H46_4;t1@xHv^U)HPKzMjD*_ z6iRNxTkWGqQRe2q)kBTX5i|6aXq6`Qw$g^$>CIfB+4!W~?VLKyR2e7{Y=lRp`*Gg| zHc67X*{0WR&i$upaz1)&t-e0VwByd|?GP8Q&X!u$S`#s+9Z;yNi)V}?bDB(v&178X+gi#RuQe38H?}vZiJQu&?(=DZA!C+kR|l4k(Z&ZzG(iJ1xbWJY^6D{3hWIR!OW}Y6Vo(Du&@p zGF(QZMLdZdL+ymX7th3*`&9KgLn-C-kA<`Lr?4f-^2M36XyvI+N3A(QZ@lWbHk=jr zdYUG3-pT50ItR-E7sZ58-`J7oF#%87nIfnjp-PiY9V9yCBK25llsC6nmkpk~_aVcex2BS{M;{Xo#v)U@Li~G5^T61XyTMG!x=tF7xKZ?xMs*$Q z{GA4<{A<-xeCgP}P2*m_f_+t9_L4-1aF5;Z;|0eKh1Woi%`T7{kAhl)HoX)F|=pSKOubARd6X+)0VG z3qhS61|ey*os+Awqa=-LIgyMys$=f9%1Uo@?>u_?`C@kNtW>~1y)BNNZW&J@`%vd! zMTsF1i}@BXps7a}q;JHX$GL#SVbG6@wgfrtj=j&{Ua1IK zgTcadp;%acbgl6TV+nJ2!91Ihxf2CzL51*yBAhmY#nhch{-_BU8_OGh%Z0oYpkmhV zNr71-yBr$}dI(hsWr|bL;5|fp!x)|TweE$djG$&u$xjE~P^=Zz4WHbT`xfkaw$oRf zsnhg{2rFm^J53s{xieHOGWH{NQU}F06-q;@FOWdIuQ8WU`^ECMvn|*lhfed=>|@t- zwjT_ndJ>a(0jd?Nn%}OEsS%Pp@Soi&8$Pz1x-iK|L z*)5l(MxA_Yv$YlYwEMgJ^H(uGEB93m8U40&J3Jxm{F5v4{9L;HHy;A%BH_|K9bH4B zti-9_1vpClh^8IMicq{``ggC!Mmfty2i%Kl(k;Hn!yPA29_MaMQJRx1Iz=5wLD!Lu zRa76uV&=?EJ1*(WXTPO7$IefBvC=zUSvzl{%Yw3?$*(w_&KXH|?>s#6lO57m?2b>{dQ9*wgKzq+bkZbcn`ZnVG zmguR0HD5+W@eTP8zE4`H!CJG8n>tr?TJW1kiS!Q)pptZYNk(R77$aC_o=od}CpGi& zP-Xxr_~?S__!VsjFSzX2HFu5#t+;EQL2Umu)r;8V%@n@G2<_mIFeeR6wiW669F50! z0pK&Z-8KYQq!@VvXEHh3IV?+-OK^5FrZa$Vh)Ka#e7v&vs!dGOI#snq>uNv&3Wg_h z$dt*wEBRCsCu+V!VB<6`lMuM{J8vZSl^SI2RfsM&%jMIzo9$=gL}hgtBtSS};rAWP z-%P*XhYn=E&}W2q3i`ZZl-pnTn;b4UXY1XF)-}KV3%39^$3la;#XHibfAe4drwzqM z^3WX(P|~fRJ{D}!^Y$>~ZAfd#@k}b=)cZae102&i!v*?Y1SxaUw0TjsAJj?C0HDk=aYlB)@v;w6y>vDaA7Y z-}siQ0UPbj&_VO4ob#l-z{7)?li|V^( zGTyKCPK}*EFQ+|Ma1*2VABWr0gH$~1Oa8NnE%p*3n_a}uW3)dRKR{HRS(W5fItXXM zg_(E-|2{dk7u7D6!rr?!(oE`gh%3ZVHud`hxyc-1O`HfUz)@SOb5Zl*Uom>pBJ(`@ zkLFkXXVNGU_){Mx*$rY+tn)&_S}*JDP}EiIe}{Ue1rOcj4ldQ^l?TgzV&EQfBm>0^ZGq} zJ1`55>uH(j6OYyLqBbK0rG`zWnpIIiZE1L8sIh04Te3pTle=|11Y~jtEWD2c+GJ8o zGDjzj%6Hb%*A4Y{KHB#~GMx6+utZF5`XQj_iCsRWjRn2GoIyQKW}!Pc&k7&dEa)S*toA+p_U;EbN#AO2>!g+>!h9 zAVd~Yu9CJ93-s)?&OFjB<2!Fsgf2>+&FJ}pNtw5Kq+HT|LBo@f-_vjs@5>b@Rc*~f++8X#P?8G!Q zB2@e9;j?j=vbjdiD8*YQWx__Ht-l&IEgzAWy#7a|^NYLJ3; zcZ{K<_-n>E@oC$Pc@Og=JfuM$36Lq~(HwFGX$&*+H=`zL@`rrJ!dDFKY6X*4eg5pxH+pZ*I<^_s>(8EE?zGcqQLhq0_S=isK4h-E zlX*4Vy>^>LRFk-`#m@AdaWl*-zIQGFXC7FStj)(NNpdN*$zer%E@tZ;0{C(o7^es@ za`H)hi4$rU`^qtOu^FPUxiL5ON(NQm`9b9oS}(E~c>n&a$2x(q3lF-&DU%8i4&mo? z+Oqrgsk!TEU^kI68LD-kjD)D z-1wute>+V(uV7{@Jj<0-$fo=~Kz^Jv%&rbC_RBxScb^Q9Khz@rWc%r5J=&RrG6O&c zd}aK(yv2^qd~f3N>=4?X5EekQ$hY@vNnH3)1+~{8nVR0!{CnS&tx8zRFu{ozC53^m zLWwN~()#cTZOm8`l)T?Sf>NObdgJQH8EC--pK1UZaU9{B_<+5by^|L7_XLCj(YfrC zIi)&_V_i_q!wU{37C@ZCx{m(<<*-VZFh=wk=Az#^Lp+xU zlZ2gYnRu9o1g_eJbi*kM3YxYUGBEJHk;D|uGr_kpT+sK8%{o?d0qVm;K@#A^HzLQR z;M7w&ilh{D2jZo$_Ui|{(?b&NN7lsoJhnM=fyw|7Wv#l}afpMhGeqxq3Pj=Lt*Gej z{v(z|Jvm@7JJgH~LLJ3=Ns~1gV&{GXhWt18)$WKLo_(&EiCH8@Xt&6$2}O~=^5)%e z{7;q^^@9Qqf0;j6?6`_=ruvD`(8(uD0?!iPwB8H#q-yU<^89K+4ql{}vz$6id}K4Z z*?(Y8|AC+n=k-gk7$deZdomD~!tj6O{S`73TNz`LXDDCs_i0udhF(H?TNRyN&`C!n z@|ZcMxxarc>^-&r^OK_EWo@U}ykkfgxL{ALSfl~#Dajj^v7+Q|5k&e+oPyxeKmyCM78lRXs54r&@-m5KdT87w5LdV=OqJCrS4;Z(XQQ)nnVu6u;ORb=hL-HFOB4G zVu}$G!r(YE^^M+K`k#-q-v)84o}_0UTFB}3zc^(S5=H*5{;UjE^I@aTS53kRr!gP? zw9ej4ADZ*)iO2KZcXh${F4mki1MfPxEbQzJOJ?}GxI;c)D1AQ-^KA+TPMn%sn$T3f zPXx?y-X=Lp%=Yw{wbWhOOf#}Tr6^=$L-ZZ>oD1|bmuHu!EiCn%#F^q6Y7%(?Z(|5| z{NZlT-?fE>ew`_^osGx)4Q~Dtbw<_;!Olex%7yccMsE40LUk&I*KGp31YQTZ-4g;9=9@q zOwq1Kq^Ya32hoa52zF^og(YNe&*AWf9vjl0bBJ$*eICYb0-kywX2<)@t8X%T#p*nM z9UbDrWml-Q_7xtBWUs?4jyWVqIp+i~<7_jb_%RSjgz#!8l5f?GFVIgWHZF+$>d~Ch zjIalj5HHeEF0%QsHajFo9(5nrhS&@7c#-$B;6qr1yf^lSJwsyT-pOySvcT5(-p@9f z4Nn?=6MJqBa0{e0RQ)iK`sqWI=gPTV3rRas947qh-6%d?l$FzBLsO+i1U56rmyObi z7{h;O7YlFPVdhT7%})w5x6*3HvP|h!wb2d8M(-kEi@fcCQx&iZNyjmk zKWHQNJT^AUD&u`bU_o9JSZj*!@1wom&araa08%c?O^1~y{0Jejes0q^JGxK2-h|r} zRpsW;f!){|wyK85CZaVaw3AvUeA)|;W|C+hsgF7|I68_jzcG z$xC_hf+|~|K-ZsfQe^RW#CZ1XRCUfLhi9d%OP;9-#?@{&!vg>5Pks+K+2@Cv5c)`) z)`iV9N{ytoLlSW+gp)qBwz$gzBqy=Z<*fz&Jchye{C%t0?d{>ELiprLW{AO z{|oN&mpAQ@cqP4tu^jChFy4Bcd1p1U&R90^A(Q+jou@A^&P--;xb# z$?L|6cJ!#87|BBRIVsGVBSyA=kjp}JRrSteW=2vs6R1ngr}1u*3s8t`9Kzhh zuyGU%6|EJE{_ayTLB*jzKwbW^Xc47EOyBsQv`^k+C{sn(jWaZ?{{JK}RHHcH3^+-z zf%l-W!5UM6e{~R58P_`Qh>;9a++h5Skt8ZijYOEVerKGiJpX<+_xdH8&hF;T{D!11 z(=lzg%R4(cbbwuY-L4JXc_(c+$rlraJ(9-R!o{r@@A1hK70}Or43+Ps@9eXmUh3qg zfd3qRiZQmdgg21P^F?M_K$pC`=;;(_FRaMvtS(GvL{B=-r(qEJioUPLC=uZmnOy$s z{_=9kop!b&Yu#PTazF13$70K^81v3pKVe1qT097hO$9NOy{H(Ydf4oIu^(mCNrZl) z9n-rIMzd@Q93~z5l@P`Nd`s8YvtILct?lni`yWc3GWLteV;g$Ra<-2HGjoh1mmBsX zkdMR>jBAGb9c$gtaRE07I}roYkX9rB|eA~C^zGlQS;*AC)0*m|)Z?E86b0C+y# zcTOR`eJ-X8o=m)}U^`&u71y!K+SzXBUBY_b>?N60BZl8LM**iugE{zIOB_AIG#rJI zS=2m14qhu6LuANJ3cd@@Sgi0tRCHIPnH|n+%VAuiIt9)GCW1g5alFy)o-OADM@G=O zFbSwu#@>P2?r9xm6JVR{mOLktuBU12s?j$rvGy39Uf73-3!qv{!|DRJGwzSab;SY=3KtuwH53!TS(dW{qlQ8qWo` zRvWS^&SPWu#Ye-UEp84=_c=KQ+L(ieYK2bne}@ex($y0Yo4>G0S6VHYFFkK&AeNew zEm%0T54(O_)Yo%z2zjAwO`Ax#VA&PE(%U+tE830cP629#(K`q$wIS%Y>5LyewIz@g zzm+GIwqhLM-pSjtSPq)D@~8S`CCjOxccs7Q z0*N?pVQ($M)lz&`3=LrCJ+j}wRMxo#?w$=ZIA+-~&Hk33E!*^%rasNk)oEj{)e5ch z=q^!|LDO{RLbkg>kVo#U8)eatUtvkeERwIg*UmIRx=^4Es*N zs}x|(|6OCuxc3Gk+CNt__g5x^7}4OHwD<8%8#yS4QJCzdJR~wiWU^@O8L@DxHw}#M z%*Ko_Ma+exS*)MVn9oO&`H~|m=Ny;s{4B{%%d6w4fUW zZTLN6X1We52;&$w2Nvcs{B%l6z!)^faIXVUic4Vh0GOO(Yu0lQBlF~u4}YNN!$12l z)zlDKXtcN7Rk}v-QDM1nO;4iI-pXXFo`h3BooTKDvaOsZ#$DTA=)!$27laKiZ4~2- zVeB8IREj`7hh`@HF0~p8g4zNbfzDqoO3(e?IKUOcq*IJ|ro<8>XokBlWvS~oLLFDJ z5SIyc<53X~PmHaf)>6Mwn>!lGAYKB3vIjpvB{g`mFW(S@ibG>(3lUD`)H(e^o2fYj z=WqBHB3}L!Jx$jzJct}a&`yPM(~No8@MX(7@7%()m5BbRLs@E=mSZv zV`se(fGUlQ{+2#_s?&<6FXHWHBUX-EWHvp`)q2@0{1A|poRrnOo#2hf{ez^a>rV%+ zhB-BqvK&pe$0=wwR3cRC`Rd*7`FAe|1$BuDH;g-fCVo~C{pKmBe@t(X0I(zCd9T|A z3R3ZfMG1J6L()@j&|Sx9k=C>L8}}!vFX~f3ezu>zf8P z8+KIGc)K~r%dBwYX-d9B@q>~8O@;_MZWgCsIT8+fLm#WxxY2(Ge&$cTEcP%cLWG1H z-^_7idVz9j#O0Mn%7tlv>)Z5-90cj+*#h3_=kWLh7#ZTmE6o&wGz{X4a(lDZCScDj z*L|<>$pJW>m*aYyv&jF|(vE4OJ%>Cz`j0z~+f3)-`lRs0xy>ABw%r2JsZ-zQ&}W>U zj@JaC?b=@7DYC3&r%Bx_b^*EnD(4OU&CQ>VIZTQFKKHZ;e`d<+!OYjagb68{DELU3rt&e| z?tftbLmB5{w8dHs(#R(v<7zrN08^Gn06qWoQ%fhno;XkcigRhSp%QV>CAB4%H0s9C znr^%^$$Ju7(-?wzM2L!>T}k|C{8KZHFD_AV1{37@UmiYC1pBd*ar2O|kto)lUj&OO zy*L-r^IDM@-z_Y07K&}*)Verg8q5*!l9T9ccB*j{Q(0^b``%%k8fF3`sc(GB3mAKvc}(~7 zeu?g|Kh(+DvTlO_mBKtD^eSHHP@>|j8o)&iCTt&epSSW(g{PZ@eeW=ziRt7P!Zg`+ zk6znQ9qSOL5t1WKyQPKX5;DJ85;A0C?EA~NZJ<9ib^TJY`{!&GX!|wn@#^DHqHQ|; z%APbEyh2=Q@ZZO-c-s*$kh}d~J2>tXz_ao?HKTvO|CTUcZTZn&*I%h)I`6mq=eAJ; zKUL&tY|gLlAHmYm-<76PK&>22)*BbEDILj(3h^FzFH_MzC_T!f3vw%#;3r8T*52tn zaiL;4z%2FSAoN8J!P#&|AtQi>SdCDg`ql`n`+;FnPe@xx-&<4ihzt-k`2MPFEKCCB z2h{_N5e1G2m-@|Z-9w9sY$%dwJQ>$a;<>+NP~Vb*why|mgU$I79k>8JpgcHaGXa!l-=uI{f-o`zy8d#MOg;#Plmj$IA7<3=35hLyk$y7#hR-@970 zlnA2ZVh)O49J~%B|5^;y#bqRX!@A>aqFo&44Wv#OwpGK2m?rkI^HJ@0BvB@Ab~Y(v|a7fuBa z>qQT=$?xx3pdO)SHNW|Jl@o}agnOWH%w;`VYtrEy7gi`+XyF+Cf}U>Dg4}-SS35Xb zPf1%aYdTDa>7}0eWNs>8EB-Veq;mbWr>V%k_%(59+?_S=oGj3ukd)Xl^`SSs$mUIOj8#Ftn)SkIEXd+Sj5y?U20p&>$zDEy*!jREef^O5v+vnIVvJ)NRL@%t(h=aW1VI7J_hy5a+U*y= z_Ow7?qiGGFb(lJ!iX?l!x+lcq)Nt}g8r3T{bs~2X{(n;SPO`-<4e!TpO|#-KS=-NY z8E>IuauwdLpD%?b(mX2fi1_mF!qKnLZWy-F%?&nhWT?@?5_R}tvBiUkis+E7T76`K zapA8pmh(Fa0H|0v2#Yw1xvVdOv^68Rjp~$khNC8eI_qma=jMpVU0cJGknuj3z?0?jW2I{=rMbZar2K&Pe3~F9Yu-%WJhH)s19^MPxBK+xzZicoMWpUSuv9h=?#qetnUdh{%3)WJH#Iusn?<0)t2r`ab}PF4 z$09~0BLWF4)-dUmNeDwUVT}*W3$(X_LrM?;b$xZU>&mgMjqb>SNe}*sZ!hB#V*Jce zvHmtXpiirxYm<_dn6RjY0fW$$^5*mPs@NY*T@Ol~3)5^vdhZ>d+s;aO zH&!1pMm5yYj$<}HBg&UQyx*v3*GNuO;PpwKY0^}z(R}v6){{|siNw$M_$+vS{3na2 zV>UfTKah@qro3cibajhC7JnL(2(q20Et~b;sM~+$h5-|?ts@f#Y#jz)2oJr`-r#gi z40~M?>p@Clz>nDWC|qxwF{Xl!Zbi#yI_pXOf?J-$YHptZN~5Ok0B6S!XZ2;J%R|Ujg|ffNa&d+`{P06 zsKn=0n_erc7`Fs`c~TSqj_?HF_`yw;UF zrk(oF*$<|Exs*sQgXuj5kqOn7EN?pgbUp7Rg<*?ZEjT!wlfH|?rNR9YMF2|By>MKv z%2%|{v$2ly5FVR8#-G1^w%OK}L@tw%P{=Dxwu?)$7@@1YxWj?r7Vsu~-^9%?|Ew>a z`cp&RvI?1#_wqGD5>XqT;Kd3D880J8olj`*oP|*x{jFusLC+8SkcX*bh=ahLn7X`o z`>*e%P2amO^;kwnC1be99HkB%h&QD(3RrwyY2`dIzQC69)&78j%>*?3L-o_#?9Tro zL|y~1+RY~r`gqF;5?BgMP|?wA;iwZQu&_EdGhKZt6?*#|IX4>z#TFmAE8-iWf~;TnR<^1%cvM}W8b z%(!lcWMgzq&m^*3Brzh)ydhr-nPzm9Jfl<@pdeGPNf_BCdm;TuY0&@bU?7P`^|D#q1Ti z)IZZlmn=&-@og$FelFCYGP~ux!BK6R;~i;d9-${zhW*7zwxMH6Z)p@Jp;Q2#ZZ2t% zIthQ;;d|KWY#QkCesjy>nJ>@@;d65*vrB%OL70(gJ%`zYd`R%X_1w!Harl9I@jwj?(oY@~EUcONJ1wL}4^Z@u zlF&@#1|Ltx5^J_BZ8TgdWNCh|livOz5ffpZ15Jtbq77MX{y|WfYubMU!Aa)l4QuEv z1^~c>ORBNNQ~$C5YVO6W{NHs-IzKeK`|tg%Sn5Ai3-c>bQ<<3zF(5C$aKsm^42Z?3(Wrihq0Z* z!s$?H+Wh}BslS3iKwm4hiQu$JyW0DIPpsg=lmM0J;hFU8^L`__. \ No newline at end of file +For more documentation on that topic refer to ``__. diff --git a/examples/boxplot.py b/examples/boxplot.py index b3bcab3dd..c9c58f049 100644 --- a/examples/boxplot.py +++ b/examples/boxplot.py @@ -73,8 +73,7 @@ def boxplot(neurons, feature, new_fig=True, subplot=111): def main(): - - morphology_directory = Path(PACKAGE_DIR, "tests/data/valid_set") + morphology_directory = Path(PACKAGE_DIR, "tests/data/valid_set") neurons = load_morphologies(morphology_directory) boxplot(neurons, "section_lengths") diff --git a/examples/density_plot.py b/examples/density_plot.py index b8df29909..59249a2e9 100644 --- a/examples/density_plot.py +++ b/examples/density_plot.py @@ -42,30 +42,41 @@ PACKAGE_DIR = Path(__file__).resolve().parent.parent -def extract_density(population, plane='xy', bins=100, neurite_type=NeuriteType.basal_dendrite): +def extract_density(population, plane="xy", bins=100, neurite_type=NeuriteType.basal_dendrite): """Extracts the 2d histogram of the center - coordinates of segments in the selected plane. + coordinates of segments in the selected plane. """ segment_midpoints = np.array( - get_feat('segment_midpoints', population, neurite_type=neurite_type) + get_feat("segment_midpoints", population, neurite_type=neurite_type) ) - horiz = segment_midpoints[:, 'xyz'.index(plane[0])] - vert = segment_midpoints[:, 'xyz'.index(plane[1])] + horiz = segment_midpoints[:, "xyz".index(plane[0])] + vert = segment_midpoints[:, "xyz".index(plane[1])] return np.histogram2d(np.array(horiz), np.array(vert), bins=(bins, bins)) -def plot_density(population, # pylint: disable=too-many-arguments, too-many-locals - bins=100, new_fig=True, subplot=111, levels=None, plane='xy', - colorlabel='Nodes per unit area', labelfontsize=16, - color_map='Reds', no_colorbar=False, threshold=0.01, - neurite_type=NeuriteType.basal_dendrite, **kwargs): +def plot_density( + population, # pylint: disable=too-many-arguments, too-many-locals + bins=100, + new_fig=True, + subplot=111, + levels=None, + plane="xy", + colorlabel="Nodes per unit area", + labelfontsize=16, + color_map="Reds", + no_colorbar=False, + threshold=0.01, + neurite_type=NeuriteType.basal_dendrite, + **kwargs, +): """Plots the 2d histogram of the center - coordinates of segments in the selected plane. + coordinates of segments in the selected plane. """ fig, ax = matplotlib_utils.get_figure(new_fig=new_fig, subplot=subplot) - H1, xedges1, yedges1 = extract_density(population, plane=plane, bins=bins, - neurite_type=neurite_type) + H1, xedges1, yedges1 = extract_density( + population, plane=plane, bins=bins, neurite_type=neurite_type + ) mask = H1 < threshold # mask = H1==0 H2 = np.ma.masked_array(H1, mask) @@ -73,44 +84,67 @@ def plot_density(population, # pylint: disable=too-many-arguments, too-many-loc colormap = plt.get_cmap(color_map).copy() colormap.set_bad(color='white', alpha=None) - plots = ax.contourf((xedges1[:-1] + xedges1[1:]) / 2, - (yedges1[:-1] + yedges1[1:]) / 2, - np.transpose(H2), # / np.max(H2), - cmap=colormap, levels=levels) + plots = ax.contourf( + (xedges1[:-1] + xedges1[1:]) / 2, + (yedges1[:-1] + yedges1[1:]) / 2, + np.transpose(H2), # / np.max(H2), + cmap=colormap, + levels=levels, + ) if not no_colorbar: cbar = plt.colorbar(plots) cbar.ax.set_ylabel(colorlabel, fontsize=labelfontsize) - kwargs['title'] = kwargs.get('title', '') - kwargs['xlabel'] = kwargs.get('xlabel', plane[0]) - kwargs['ylabel'] = kwargs.get('ylabel', plane[1]) + kwargs["title"] = kwargs.get("title", "") + kwargs["xlabel"] = kwargs.get("xlabel", plane[0]) + kwargs["ylabel"] = kwargs.get("ylabel", plane[1]) return matplotlib_utils.plot_style(fig=fig, ax=ax, **kwargs) -def plot_neuron_on_density(population, # pylint: disable=too-many-arguments - bins=100, new_fig=True, subplot=111, levels=None, plane='xy', - colorlabel='Nodes per unit area', labelfontsize=16, - color_map='Reds', no_colorbar=False, threshold=0.01, - neurite_type=NeuriteType.basal_dendrite, **kwargs): +def plot_neuron_on_density( + population, # pylint: disable=too-many-arguments + bins=100, + new_fig=True, + subplot=111, + levels=None, + plane="xy", + colorlabel="Nodes per unit area", + labelfontsize=16, + color_map="Reds", + no_colorbar=False, + threshold=0.01, + neurite_type=NeuriteType.basal_dendrite, + **kwargs, +): """Plots the 2d histogram of the center - coordinates of segments in the selected plane - and superimposes the view of the first neurite of the collection. + coordinates of segments in the selected plane + and superimposes the view of the first neurite of the collection. """ _, ax = matplotlib_utils.get_figure(new_fig=new_fig) ref_neuron = population[0] matplotlib_impl.plot_tree(ref_neuron.neurites[0], ax) - return plot_density(population, plane=plane, bins=bins, new_fig=False, subplot=subplot, - colorlabel=colorlabel, labelfontsize=labelfontsize, levels=levels, - color_map=color_map, no_colorbar=no_colorbar, threshold=threshold, - neurite_type=neurite_type, **kwargs) + return plot_density( + population, + plane=plane, + bins=bins, + new_fig=False, + subplot=subplot, + colorlabel=colorlabel, + labelfontsize=labelfontsize, + levels=levels, + color_map=color_map, + no_colorbar=no_colorbar, + threshold=threshold, + neurite_type=neurite_type, + **kwargs, + ) def main(): - morphology_directory = Path(PACKAGE_DIR, "tests/data/valid_set") neurons = load_morphologies(morphology_directory) diff --git a/examples/end_to_end_distance.py b/examples/end_to_end_distance.py index 4e2adeb3e..2b22a6ec7 100755 --- a/examples/end_to_end_distance.py +++ b/examples/end_to_end_distance.py @@ -42,8 +42,7 @@ def path_end_to_end_distance(neurite): """Calculate and return end-to-end-distance of a given neurite.""" trunk = neurite.root_node.points[0] - return max(morphmath.point_dist(l.points[-1], trunk) - for l in neurite.root_node.ileaf()) + return max(morphmath.point_dist(l.points[-1], trunk) for l in neurite.root_node.ileaf()) def mean_end_to_end_dist(neurites): @@ -56,10 +55,10 @@ def make_end_to_end_distance_plot(nb_segments, end_to_end_distance, neurite_type plt.figure() plt.plot(nb_segments, end_to_end_distance) plt.title(neurite_type) - plt.xlabel('Number of segments') - plt.ylabel('End-to-end distance') + plt.xlabel("Number of segments") + plt.ylabel("End-to-end distance") # uncomment to show - #plt.show() + # plt.show() def calculate_and_plot_end_to_end_distance(neurite): @@ -67,37 +66,47 @@ def calculate_and_plot_end_to_end_distance(neurite): an increasingly larger part of a given neurite. Note that the plots are not very meaningful for bifurcating trees.""" + def _dist(seg): """Distance between segmenr end and trunk.""" return morphmath.point_dist(seg[1], neurite.root_node.points[0]) end_to_end_distance = [_dist(s) for s in nm.iter_segments(neurite)] - make_end_to_end_distance_plot(np.arange(len(end_to_end_distance)) + 1, - end_to_end_distance, neurite.type) + make_end_to_end_distance_plot( + np.arange(len(end_to_end_distance)) + 1, end_to_end_distance, neurite.type + ) def main(): # load a neuron from an SWC file - filename = Path(PACKAGE_DIR, 'tests/data/swc/Neuron_3_random_walker_branches.swc') + filename = Path(PACKAGE_DIR, "tests/data/swc/Neuron_3_random_walker_branches.swc") m = nm.load_morphology(filename) # print mean end-to-end distance per neurite type - print('Mean end-to-end distance for axons: ', - mean_end_to_end_dist(n for n in m.neurites if n.type == nm.AXON)) - print('Mean end-to-end distance for basal dendrites: ', - mean_end_to_end_dist(n for n in m.neurites if n.type == nm.BASAL_DENDRITE)) - print('Mean end-to-end distance for apical dendrites: ', - mean_end_to_end_dist(n for n in m.neurites - if n.type == nm.APICAL_DENDRITE)) - - print('End-to-end distance per neurite (nb segments, end-to-end distance, neurite type):') + print( + "Mean end-to-end distance for axons: ", + mean_end_to_end_dist(n for n in m.neurites if n.type == nm.AXON), + ) + print( + "Mean end-to-end distance for basal dendrites: ", + mean_end_to_end_dist(n for n in m.neurites if n.type == nm.BASAL_DENDRITE), + ) + print( + "Mean end-to-end distance for apical dendrites: ", + mean_end_to_end_dist(n for n in m.neurites if n.type == nm.APICAL_DENDRITE), + ) + + print("End-to-end distance per neurite (nb segments, end-to-end distance, neurite type):") for nrte in m.neurites: # plot end-to-end distance for increasingly larger parts of neurite calculate_and_plot_end_to_end_distance(nrte) # print (number of segments, end-to-end distance, neurite type) - print(sum(len(s.points) - 1 for s in nrte.root_node.ipreorder()), - path_end_to_end_distance(nrte), nrte.type) + print( + sum(len(s.points) - 1 for s in nrte.root_node.ipreorder()), + path_end_to_end_distance(nrte), + nrte.type, + ) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/examples/extract_distribution.py b/examples/extract_distribution.py index 1288e5e5a..9595b8bc6 100755 --- a/examples/extract_distribution.py +++ b/examples/extract_distribution.py @@ -47,9 +47,9 @@ def find_optimal_distribution(population_directory, feature): """Loads a list of morphologies, extracts feature - and transforms the fitted distribution in the correct format. - Returns the optimal distribution, corresponding parameters, - minimun and maximum values. + and transforms the fitted distribution in the correct format. + Returns the optimal distribution, corresponding parameters, + minimun and maximum values. """ population = nm.load_morphologies(population_directory) @@ -60,17 +60,14 @@ def find_optimal_distribution(population_directory, feature): def main(): - population_directory = Path(PACKAGE_DIR, "tests/data/valid_set") result = stats.fit_results_to_dict( - find_optimal_distribution(population_directory, "section_lengths") + find_optimal_distribution(population_directory, "section_lengths") ) - print(json.dumps( - result, indent=2, separators=(',', ': '), cls=NeuromJSON - )) + print(json.dumps(result, indent=2, separators=(",", ": "), cls=NeuromJSON)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/examples/features_graph_table.py b/examples/features_graph_table.py index 8d934a31b..9de66ec20 100755 --- a/examples/features_graph_table.py +++ b/examples/features_graph_table.py @@ -39,10 +39,9 @@ def stylize(ax, name, feature): - """Stylization modifications to the plots - """ + """Stylization modifications to the plots""" ax.set_ylabel(feature) - ax.set_title(name, fontsize='small') + ax.set_title(name, fontsize="small") def histogram(neuron, feature, ax, bins=15, normed=True, cumulative=False): @@ -73,8 +72,7 @@ def histogram(neuron, feature, ax, bins=15, normed=True, cumulative=False): def plot_feature(feature, cell): - """Plot a feature - """ + """Plot a feature""" fig = pl.figure() ax = fig.add_subplot(111) @@ -88,7 +86,6 @@ def plot_feature(feature, cell): def create_feature_plots(morphologies_dir, feature_list, output_dir): - for morph_file in get_morph_files(morphologies_dir): m = nm.load_morphology(morph_file) @@ -102,10 +99,10 @@ def create_feature_plots(morphologies_dir, feature_list, output_dir): def main(): create_feature_plots( morphologies_dir=Path(PACKAGE_DIR, "tests/data/valid_set"), - feature_list=["section_lengths"], - output_dir=".", + feature_list=["section_lengths"], + output_dir=".", ) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/examples/get_features.py b/examples/get_features.py index 1fee0f167..a3a9739a2 100755 --- a/examples/get_features.py +++ b/examples/get_features.py @@ -50,12 +50,14 @@ def stats(data): dicitonary with length, mean, sum, standard deviation,\ min and max of data """ - return {'len': len(data), - 'mean': np.mean(data), - 'sum': np.sum(data), - 'std': np.std(data), - 'min': np.min(data), - 'max': np.max(data)} + return { + "len": len(data), + "mean": np.mean(data), + "sum": np.sum(data), + "std": np.std(data), + "min": np.min(data), + "max": np.max(data), + } def pprint_stats(data): @@ -64,16 +66,15 @@ def pprint_stats(data): def main(): - - filename = Path(PACKAGE_DIR, 'tests/data/swc/Neuron.swc') + filename = Path(PACKAGE_DIR, "tests/data/swc/Neuron.swc") # load a neuron from an SWC file m = nm.load_morphology(filename) # Get some soma information # Soma radius and surface area - print("Soma radius", nm.get('soma_radius', m)) - print("Soma surface area", nm.get('soma_surface_area', m)) + print("Soma radius", nm.get("soma_radius", m)) + print("Soma surface area", nm.get("soma_surface_area", m)) # Get information about neurites # Most neurite data can be queried for a particular type of neurite. @@ -84,35 +85,49 @@ def main(): # to warm up... # number of neurites - print('Number of neurites (all):', nm.get('number_of_neurites', m)) - print('Number of neurites (axons):', - nm.get('number_of_neurites', m, neurite_type=nm.NeuriteType.axon)) - print('Number of neurites (apical dendrites):', - nm.get('number_of_neurites', m, neurite_type=nm.NeuriteType.apical_dendrite)) - print('Number of neurites (basal dendrites):', - nm.get('number_of_neurites', m, neurite_type=nm.NeuriteType.basal_dendrite)) + print("Number of neurites (all):", nm.get("number_of_neurites", m)) + print( + "Number of neurites (axons):", + nm.get("number_of_neurites", m, neurite_type=nm.NeuriteType.axon), + ) + print( + "Number of neurites (apical dendrites):", + nm.get("number_of_neurites", m, neurite_type=nm.NeuriteType.apical_dendrite), + ) + print( + "Number of neurites (basal dendrites):", + nm.get("number_of_neurites", m, neurite_type=nm.NeuriteType.basal_dendrite), + ) # number of sections - print('Number of sections:', - nm.get('number_of_sections', m)) - print('Number of sections (axons):', - nm.get('number_of_sections', m, neurite_type=nm.NeuriteType.axon)) - print('Number of sections (apical dendrites):', - nm.get('number_of_sections', m, neurite_type=nm.NeuriteType.apical_dendrite)) - print('Number of sections (basal dendrites):', - nm.get('number_of_sections', m, neurite_type=nm.NeuriteType.basal_dendrite)) + print("Number of sections:", nm.get("number_of_sections", m)) + print( + "Number of sections (axons):", + nm.get("number_of_sections", m, neurite_type=nm.NeuriteType.axon), + ) + print( + "Number of sections (apical dendrites):", + nm.get("number_of_sections", m, neurite_type=nm.NeuriteType.apical_dendrite), + ) + print( + "Number of sections (basal dendrites):", + nm.get("number_of_sections", m, neurite_type=nm.NeuriteType.basal_dendrite), + ) # number of sections per neurite - print('Number of sections per neurite:', - nm.get('number_of_sections_per_neurite', m)) - print('Number of sections per neurite (axons):', - nm.get('number_of_sections_per_neurite', m, neurite_type=nm.NeuriteType.axon)) - print('Number of sections per neurite (apical dendrites):', - nm.get('number_of_sections_per_neurite', - m, neurite_type=nm.NeuriteType.apical_dendrite)) - print('Number of sections per neurite (basal dendrites):', - nm.get('number_of_sections_per_neurite', - m, neurite_type=nm.NeuriteType.apical_dendrite)) + print("Number of sections per neurite:", nm.get("number_of_sections_per_neurite", m)) + print( + "Number of sections per neurite (axons):", + nm.get("number_of_sections_per_neurite", m, neurite_type=nm.NeuriteType.axon), + ) + print( + "Number of sections per neurite (apical dendrites):", + nm.get("number_of_sections_per_neurite", m, neurite_type=nm.NeuriteType.apical_dendrite), + ) + print( + "Number of sections per neurite (basal dendrites):", + nm.get("number_of_sections_per_neurite", m, neurite_type=nm.NeuriteType.apical_dendrite), + ) # OK, this is getting repetitive, so lets loop over valid neurite types. # The following methods return arrays of measurements. We will gather some @@ -120,42 +135,42 @@ def main(): # Section lengths for all and different types of neurite for ttype in nm.NEURITE_TYPES: - sec_len = nm.get('section_lengths', m, neurite_type=ttype) - print('Section lengths (', ttype, '):', sep='') + sec_len = nm.get("section_lengths", m, neurite_type=ttype) + print("Section lengths (", ttype, "):", sep="") pprint_stats(sec_len) # Segment lengths for all and different types of neurite for ttype in nm.NEURITE_TYPES: - seg_len = nm.get('segment_lengths', m, neurite_type=ttype) - print('Segment lengths (', ttype, '):', sep='') + seg_len = nm.get("segment_lengths", m, neurite_type=ttype) + print("Segment lengths (", ttype, "):", sep="") pprint_stats(seg_len) # Section radial distances for all and different types of neurite # Careful! Here we need to pass tree type as a named argument for ttype in nm.NEURITE_TYPES: - sec_rad_dist = nm.get('section_radial_distances', m, neurite_type=ttype) - print('Section radial distance (', ttype, '):', sep='') + sec_rad_dist = nm.get("section_radial_distances", m, neurite_type=ttype) + print("Section radial distance (", ttype, "):", sep="") pprint_stats(sec_rad_dist) # Section path distances for all and different types of neurite # Careful! Here we need to pass tree type as a named argument for ttype in nm.NEURITE_TYPES: - sec_path_dist = nm.get('section_path_distances', m, neurite_type=ttype) - print('Section path distance (', ttype, '):', sep='') + sec_path_dist = nm.get("section_path_distances", m, neurite_type=ttype) + print("Section path distance (", ttype, "):", sep="") pprint_stats(sec_path_dist) # Local bifurcation angles for all and different types of neurite for ttype in nm.NEURITE_TYPES: - local_bifangles = nm.get('local_bifurcation_angles', m, neurite_type=ttype) - print('Local bifurcation angles (', ttype, '):', sep='') + local_bifangles = nm.get("local_bifurcation_angles", m, neurite_type=ttype) + print("Local bifurcation angles (", ttype, "):", sep="") pprint_stats(local_bifangles) # Remote bifurcation angles for all and different types of neurite for ttype in nm.NEURITE_TYPES: - rem_bifangles = nm.get('remote_bifurcation_angles', m, neurite_type=ttype) - print('Local bifurcation angles (', ttype, '):', sep='') + rem_bifangles = nm.get("remote_bifurcation_angles", m, neurite_type=ttype) + print("Local bifurcation angles (", ttype, "):", sep="") pprint_stats(rem_bifangles) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/examples/histogram.py b/examples/histogram.py index 0d55670bd..4c9223ed4 100644 --- a/examples/histogram.py +++ b/examples/histogram.py @@ -76,16 +76,16 @@ def histogram(neurons, feature, new_fig=True, subplot=111, normed=False, **kwarg figure file. """ - bins = kwargs.get('bins', 25) - cumulative = kwargs.get('cumulative', False) + bins = kwargs.get("bins", 25) + cumulative = kwargs.get("cumulative", False) fig, ax = matplotlib_utils.get_figure(new_fig=new_fig, subplot=subplot) - kwargs['xlabel'] = kwargs.get('xlabel', feature) + kwargs["xlabel"] = kwargs.get("xlabel", feature) - kwargs['ylabel'] = kwargs.get('ylabel', feature + ' fraction') + kwargs["ylabel"] = kwargs.get("ylabel", feature + " fraction") - kwargs['title'] = kwargs.get('title', feature + ' histogram') + kwargs["title"] = kwargs.get("title", feature + " histogram") feature_values = [neurom.features.get(feature, neu) for neu in neurons] @@ -93,23 +93,20 @@ def histogram(neurons, feature, new_fig=True, subplot=111, normed=False, **kwarg ax.hist(feature_values, bins=bins, cumulative=cumulative, label=neu_labels, density=normed) - kwargs['no_legend'] = len(neu_labels) == 1 + kwargs["no_legend"] = len(neu_labels) == 1 return matplotlib_utils.plot_style(fig=fig, ax=ax, **kwargs) def population_feature_values(pops, feature): - """Extracts feature values per population - """ + """Extracts feature values per population""" pops_feature_values = [] for pop in pops: - feature_values = [neurom.features.get(feature, neu) for neu in pop] # ugly hack to chain in case of list of lists if any([isinstance(p, (list, np.ndarray)) for p in feature_values]): - feature_values = list(chain(*feature_values)) pops_feature_values.append(feature_values) @@ -152,30 +149,31 @@ def population_histogram(pops, feature, new_fig=True, normed=False, subplot=111, figure file. """ - bins = kwargs.get('bins', 25) - cumulative = kwargs.get('cumulative', False) + bins = kwargs.get("bins", 25) + cumulative = kwargs.get("cumulative", False) fig, ax = matplotlib_utils.get_figure(new_fig=new_fig, subplot=subplot) - kwargs['xlabel'] = kwargs.get('xlabel', feature) + kwargs["xlabel"] = kwargs.get("xlabel", feature) - kwargs['ylabel'] = kwargs.get('ylabel', feature + ' fraction') + kwargs["ylabel"] = kwargs.get("ylabel", feature + " fraction") - kwargs['title'] = kwargs.get('title', feature + ' histogram') + kwargs["title"] = kwargs.get("title", feature + " histogram") pops_feature_values = population_feature_values(pops, feature) pops_labels = [pop.name for pop in pops] - ax.hist(pops_feature_values, bins=bins, cumulative=cumulative, label=pops_labels, density=normed) + ax.hist( + pops_feature_values, bins=bins, cumulative=cumulative, label=pops_labels, density=normed + ) - kwargs['no_legend'] = len(pops_labels) == 1 + kwargs["no_legend"] = len(pops_labels) == 1 return matplotlib_utils.plot_style(fig=fig, ax=ax, **kwargs) def main(): - pop1 = load_morphologies(Path(PACKAGE_DIR, "tests/data/valid_set")) pop2 = load_morphologies(Path(PACKAGE_DIR, "tests/data/valid_set")) population_histogram([pop1, pop2], "section_lengths") diff --git a/examples/iteration_analysis.py b/examples/iteration_analysis.py index ffc6a6b11..020edbb91 100755 --- a/examples/iteration_analysis.py +++ b/examples/iteration_analysis.py @@ -48,8 +48,7 @@ def main(): - - filename = Path(PACKAGE_DIR, 'tests/data/swc/Neuron.swc') + filename = Path(PACKAGE_DIR, "tests/data/swc/Neuron.swc") # load a neuron from an SWC file m = nm.load_morphology(filename) @@ -65,24 +64,22 @@ def sec_len(sec): """Return the length of a section.""" return mm.section_length(sec.points) - print('Total neurite length (sections):', - sum(sec_len(s) for s in nm.iter_sections(m))) + print("Total neurite length (sections):", sum(sec_len(s) for s in nm.iter_sections(m))) # Get length of all neurites in cell by iterating over segments, # and summing the segment lengths. # This should yield the same result as iterating over sections. - print('Total neurite length (segments):', - sum(mm.segment_length(s) for s in nm.iter_segments(m))) + print( + "Total neurite length (segments):", sum(mm.segment_length(s) for s in nm.iter_segments(m)) + ) # get volume of all neurites in cell by summing over segment # volumes - print('Total neurite volume:', - sum(mm.segment_volume(s) for s in nm.iter_segments(m))) + print("Total neurite volume:", sum(mm.segment_volume(s) for s in nm.iter_segments(m))) # get area of all neurites in cell by summing over segment # areas - print('Total neurite surface area:', - sum(mm.segment_area(s) for s in nm.iter_segments(m))) + print("Total neurite surface area:", sum(mm.segment_area(s) for s in nm.iter_segments(m))) # get total number of neurite points in cell. def n_points(sec): @@ -91,60 +88,74 @@ def n_points(sec): # Non-root sections have duplicate first point return n if sec.parent is None else n - 1 - print('Total number of points:', - sum(n_points(s) for s in nm.iter_sections(m))) + print("Total number of points:", sum(n_points(s) for s in nm.iter_sections(m))) # get mean radius of neurite points in cell. # p[COLS.R] yields the radius for point p. # Note: this includes duplicated points at beginning of # non-trunk sections - print('Mean radius of points:', - np.mean([s.points[:, COLS.R] for s in nm.iter_sections(m)])) + print("Mean radius of points:", np.mean([s.points[:, COLS.R] for s in nm.iter_sections(m)])) # get mean radius of neurite points in cell. # p[COLS.R] yields the radius for point p. # Note: this includes duplicated points at beginning of # non-trunk sections pts = [p[COLS.R] for s in m.sections[1:] for p in s.points] - print('Mean radius of points:', - np.mean(pts)) + print("Mean radius of points:", np.mean(pts)) # get mean radius of segments - print('Mean radius of segments:', - np.mean(list(mm.segment_radius(s) for s in nm.iter_segments(m)))) + print( + "Mean radius of segments:", np.mean(list(mm.segment_radius(s) for s in nm.iter_segments(m))) + ) # get stats for the segment taper rate, for different types of neurite for ttype in NEURITES: ttt = ttype - seg_taper_rate = [mm.segment_taper_rate(s) - for s in nm.iter_segments(m, neurite_filter=tree_type_checker(ttt))] - - print('Segment taper rate (', ttype, - '):\n mean=', np.mean(seg_taper_rate), - ', std=', np.std(seg_taper_rate), - ', min=', np.min(seg_taper_rate), - ', max=', np.max(seg_taper_rate), - sep='') + seg_taper_rate = [ + mm.segment_taper_rate(s) + for s in nm.iter_segments(m, neurite_filter=tree_type_checker(ttt)) + ] + + print( + "Segment taper rate (", + ttype, + "):\n mean=", + np.mean(seg_taper_rate), + ", std=", + np.std(seg_taper_rate), + ", min=", + np.min(seg_taper_rate), + ", max=", + np.max(seg_taper_rate), + sep="", + ) # Number of bifurcation points. - print('Number of bifurcation points:', - sum(1 for _ in nm.iter_sections(m, - iterator_type=Section.ibifurcation_point))) + print( + "Number of bifurcation points:", + sum(1 for _ in nm.iter_sections(m, iterator_type=Section.ibifurcation_point)), + ) # Number of bifurcation points for apical dendrites - print('Number of bifurcation points (apical dendrites):', - sum(1 for _ in nm.iter_sections(m, - iterator_type=Section.ibifurcation_point, - neurite_filter=tree_type_checker(nm.APICAL_DENDRITE)))) + print( + "Number of bifurcation points (apical dendrites):", + sum( + 1 + for _ in nm.iter_sections( + m, + iterator_type=Section.ibifurcation_point, + neurite_filter=tree_type_checker(nm.APICAL_DENDRITE), + ) + ), + ) # Maximum branch order - print('Maximum branch order:', - max(section.branch_order(s) for s in nm.iter_sections(m))) + print("Maximum branch order:", max(section.branch_order(s) for s in nm.iter_sections(m))) # Morphology's bounding box # Note: does not account for soma radius - print('Bounding box ((min x, y, z), (max x, y, z))', geom.bounding_box(m)) + print("Bounding box ((min x, y, z), (max x, y, z))", geom.bounding_box(m)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/examples/nl_fst_compat.py b/examples/nl_fst_compat.py index 38671fc28..36623e646 100755 --- a/examples/nl_fst_compat.py +++ b/examples/nl_fst_compat.py @@ -40,24 +40,27 @@ def main(): + m_h5 = nm.load_morphology(Path(PACKAGE_DIR, "tests/data/h5/v1/bio_neuron-001.h5")) + m_asc = nm.load_morphology(Path(PACKAGE_DIR, "tests/data/neurolucida/bio_neuron-001.asc")) - m_h5 = nm.load_morphology(Path(PACKAGE_DIR, 'tests/data/h5/v1/bio_neuron-001.h5')) - m_asc = nm.load_morphology(Path(PACKAGE_DIR, 'tests/data/neurolucida/bio_neuron-001.asc')) + print("h5 number of sections:", nm.get("number_of_sections", m_h5)) + print("nl number of sections:", nm.get("number_of_sections", m_asc)) + print("h5 number of segments:", nm.get("number_of_segments", m_h5)) + print("nl number of segments:", nm.get("number_of_segments", m_asc)) + print("h5 total neurite length:", np.sum(nm.get("section_lengths", m_h5))) + print("nl total neurite length:", np.sum(nm.get("section_lengths", m_asc))) + print("h5 principal direction extents:", nm.get("principal_direction_extents", m_h5)) + print("nl principal direction extents:", nm.get("principal_direction_extents", m_asc)) - print('h5 number of sections:', nm.get('number_of_sections', m_h5)) - print('nl number of sections:', nm.get('number_of_sections', m_asc)) - print('h5 number of segments:', nm.get('number_of_segments', m_h5)) - print('nl number of segments:', nm.get('number_of_segments', m_asc)) - print('h5 total neurite length:', np.sum(nm.get('section_lengths', m_h5))) - print('nl total neurite length:', np.sum(nm.get('section_lengths', m_asc))) - print('h5 principal direction extents:', nm.get('principal_direction_extents', m_h5)) - print('nl principal direction extents:', nm.get('principal_direction_extents', m_asc)) - - print('\nNumber of neurites:') + print("\nNumber of neurites:") for nt in iter(nm.NeuriteType): - print(nt, mf.number_of_neurites(m_h5, neurite_type=nt), mf.number_of_neurites(m_asc, neurite_type=nt)) + print( + nt, + mf.number_of_neurites(m_h5, neurite_type=nt), + mf.number_of_neurites(m_asc, neurite_type=nt), + ) - print('\nNumber of segments:') + print("\nNumber of segments:") for nt in iter(nm.NeuriteType): print(nt, nf.number_of_segments(m_h5.neurites[0]), nf.number_of_segments(m_asc.neurites[0])) diff --git a/examples/plot_somas.py b/examples/plot_somas.py index 464578881..d3e9d596a 100755 --- a/examples/plot_somas.py +++ b/examples/plot_somas.py @@ -36,7 +36,7 @@ import matplotlib.pyplot as plt import numpy as np -DATA_PATH = Path(__file__).resolve().parent.parent / 'tests/data/swc' +DATA_PATH = Path(__file__).resolve().parent.parent / "tests/data/swc" def random_color(): @@ -46,8 +46,9 @@ def random_color(): def plot_somas(somas): """Plot set of somas on same figure as spheres, each with different color.""" - _, ax = matplotlib_utils.get_figure(new_fig=True, subplot=111, - params={'projection': '3d', 'aspect': 'auto'}) + _, ax = matplotlib_utils.get_figure( + new_fig=True, subplot=111, params={"projection": "3d", "aspect": "auto"} + ) for s in somas: matplotlib_utils.plot_sphere(ax, s.center, s.radius, color=random_color(), alpha=1) @@ -57,13 +58,15 @@ def plot_somas(somas): def main(): # define set of files containing relevant morphs - file_nms = [Path(DATA_PATH, file_nm) for file_nm in ['Soma_origin.swc', - 'Soma_translated_1.swc', - 'Soma_translated_2.swc']] + file_nms = [ + Path(DATA_PATH, file_nm) + for file_nm in ["Soma_origin.swc", "Soma_translated_1.swc", "Soma_translated_2.swc"] + ] # load from file and plot sms = [load_morphology(file_nm).soma for file_nm in file_nms] plot_somas(sms) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/examples/radius_of_gyration.py b/examples/radius_of_gyration.py index 0bcd43064..ad9459d77 100755 --- a/examples/radius_of_gyration.py +++ b/examples/radius_of_gyration.py @@ -92,24 +92,36 @@ def mean_rad_of_gyration(neurites): def main(): - # load a neuron from an SWC file - filename = Path(PACKAGE_DIR, 'tests/data/swc/Neuron.swc') + filename = Path(PACKAGE_DIR, "tests/data/swc/Neuron.swc") m = nm.load_morphology(filename) # for every neurite, print (number of segments, radius of gyration, neurite type) - print([(sum(len(s.points) - 1 for s in nrte.iter_sections()), - radius_of_gyration(nrte), nrte.type) for nrte in m.neurites]) + print( + [ + ( + sum(len(s.points) - 1 for s in nrte.sections), + radius_of_gyration(nrte), + nrte.type, + ) + for nrte in m.neurites + ] + ) # print mean radius of gyration per neurite type - print('Mean radius of gyration for axons: ', - mean_rad_of_gyration(n for n in m.neurites if n.type == nm.AXON)) - print('Mean radius of gyration for basal dendrites: ', - mean_rad_of_gyration(n for n in m.neurites if n.type == nm.BASAL_DENDRITE)) - print('Mean radius of gyration for apical dendrites: ', - mean_rad_of_gyration(n for n in m.neurites - if n.type == nm.APICAL_DENDRITE)) - - -if __name__ == '__main__': + print( + "Mean radius of gyration for axons: ", + mean_rad_of_gyration(n for n in m.neurites if n.type == nm.AXON), + ) + print( + "Mean radius of gyration for basal dendrites: ", + mean_rad_of_gyration(n for n in m.neurites if n.type == nm.BASAL_DENDRITE), + ) + print( + "Mean radius of gyration for apical dendrites: ", + mean_rad_of_gyration(n for n in m.neurites if n.type == nm.APICAL_DENDRITE), + ) + + +if __name__ == "__main__": main() diff --git a/examples/section_ids.py b/examples/section_ids.py index 00cac5c83..e615bb7f3 100755 --- a/examples/section_ids.py +++ b/examples/section_ids.py @@ -45,18 +45,17 @@ def get_segment(neuron, section_id, segment_id): array of two [x, y, z, r] points defining segment """ sec = neuron.sections[section_id] - return sec.points[segment_id:segment_id + 2][:, COLS.XYZR] + return sec.points[segment_id : segment_id + 2][:, COLS.XYZR] def main(): - - m = nm.load_morphology(Path(PACKAGE_DIR, 'tests/data/h5/v1/Neuron.h5')) + m = nm.load_morphology(Path(PACKAGE_DIR, "tests/data/h5/v1/Neuron.h5")) seg = get_segment(m, 3, 2) - print('Segment:\n', seg) - print('Mid-point (x, y, z):\n', mm.linear_interpolate(seg[0], seg[1], 0.5)) - print('Mid-point R:\n', mm.interpolate_radius(seg[0][COLS.R], seg[1][COLS.R], 0.5)) + print("Segment:\n", seg) + print("Mid-point (x, y, z):\n", mm.linear_interpolate(seg[0], seg[1], 0.5)) + print("Mid-point R:\n", mm.interpolate_radius(seg[0][COLS.R], seg[1][COLS.R], 0.5)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/examples/soma_radius_fit.py b/examples/soma_radius_fit.py index 91e06db3e..7169d6c6c 100755 --- a/examples/soma_radius_fit.py +++ b/examples/soma_radius_fit.py @@ -42,28 +42,29 @@ def test_multiple_distr(filepath): """Runs the distribution fit for multiple distributions and returns - the optimal distribution along with the corresponding parameters. + the optimal distribution along with the corresponding parameters. """ # load a neuron from an SWC file population = nm.load_morphologies(filepath) # Create a list of basic distributions - distr_to_check = ('norm', 'expon', 'uniform') + distr_to_check = ("norm", "expon", "uniform") # Get the soma radii of a population of morphs - soma_size = nm.get('soma_radius', population) + soma_size = nm.get("soma_radius", population) # Find the best fit distribution return st.optimal_distribution(soma_size, distr_to_check) def main(): - morphology_path = Path(PACKAGE_DIR, "tests/data/swc/Neuron.swc") result = test_multiple_distr(morphology_path) - print(f"Optimal distribution fit for soma radius is: {result.type} with parameters {result.params}") + print( + f"Optimal distribution fit for soma radius is: {result.type} with parameters {result.params}" + ) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/neurom/__init__.py b/neurom/__init__.py index 2b2f24aa9..4a8c52420 100644 --- a/neurom/__init__.py +++ b/neurom/__init__.py @@ -55,14 +55,17 @@ >>> mapping = lambda n : len(n.points) >>> n_points = [n for n in nm.iter_neurites(pop, mapping, filter)] """ +from importlib.metadata import version + +__version__ = version(__package__) + from neurom.core.dataformat import COLS -from neurom.core.types import NeuriteType, NeuriteIter, NEURITES as NEURITE_TYPES from neurom.core.morphology import graft_morphology, iter_neurites, iter_sections, iter_segments - -from neurom.features import get -from neurom.io.utils import MorphLoader, load_morphology, load_morphologies -from neurom.io.utils import load_neuron, load_neurons +from neurom.core.types import NEURITES as NEURITE_TYPES +from neurom.core.types import NeuriteIter, NeuriteType from neurom.exceptions import NeuroMDeprecationWarning +from neurom.features import get +from neurom.io.utils import MorphLoader, load_morphologies, load_morphology APICAL_DENDRITE = NeuriteType.apical_dendrite BASAL_DENDRITE = NeuriteType.basal_dendrite diff --git a/neurom/apps/__init__.py b/neurom/apps/__init__.py index 8eb261172..5fb105003 100644 --- a/neurom/apps/__init__.py +++ b/neurom/apps/__init__.py @@ -27,9 +27,10 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Helper code for neurom applications.""" -import sys import logging +import sys from pathlib import Path + import yaml from neurom.exceptions import ConfigError diff --git a/neurom/apps/annotate.py b/neurom/apps/annotate.py index fa33d7c05..f8245acec 100644 --- a/neurom/apps/annotate.py +++ b/neurom/apps/annotate.py @@ -46,14 +46,21 @@ def generate_annotation(result, settings): if result.status: return '' - header = ('\n\n' - f'({settings["label"]} ; MUK_ANNOTATION\n' - f' (Color {settings["color"]}) ; MUK_ANNOTATION\n' - f' (Name "{settings["name"]}") ; MUK_ANNOTATION') + header = ( + '\n\n' + f'({settings["label"]} ; MUK_ANNOTATION\n' + f' (Color {settings["color"]}) ; MUK_ANNOTATION\n' + f' (Name "{settings["name"]}") ; MUK_ANNOTATION' + ) points = [p for _, _points in result.info for p in _points] - annotations = '\n'.join((f' ' - f'({p[COLS.X]:10.2f} {p[COLS.Y]:10.2f} {p[COLS.Z]:10.2f} 0.50)' - f' ; MUK_ANNOTATION' for p in points)) + annotations = '\n'.join( + ( + f' ' + f'({p[COLS.X]:10.2f} {p[COLS.Y]:10.2f} {p[COLS.Z]:10.2f} 0.50)' + f' ; MUK_ANNOTATION' + for p in points + ) + ) footer = ') ; MUK_ANNOTATION\n' return f'{header}\n{annotations}\n{footer}' @@ -61,6 +68,7 @@ def generate_annotation(result, settings): def annotate(results, settings): """Concatenate the annotations of all checkers.""" - annotations = (generate_annotation(result, setting) - for result, setting in zip(results, settings)) + annotations = ( + generate_annotation(result, setting) for result, setting in zip(results, settings) + ) return '\n'.join(annot for annot in annotations if annot) diff --git a/neurom/apps/cli.py b/neurom/apps/cli.py index 69383e3fd..d2548e090 100644 --- a/neurom/apps/cli.py +++ b/neurom/apps/cli.py @@ -33,14 +33,15 @@ import click import matplotlib.pyplot as plt -from neurom.apps import morph_stats, morph_check, EXAMPLE_CHECK_CONFIG, EXAMPLE_STATS_CONFIG from neurom import load_morphology +from neurom.apps import morph_check, morph_stats from neurom.view import matplotlib_impl, matplotlib_utils @click.group() -@click.option('-v', '--verbose', count=True, default=0, - help='-v for WARNING, -vv for INFO, -vvv for DEBUG') +@click.option( + '-v', '--verbose', count=True, default=0, help='-v for WARNING, -vv for INFO, -vvv for DEBUG' +) def cli(verbose): """The CLI entry point.""" level = (logging.WARNING, logging.INFO, logging.DEBUG)[min(verbose, 2)] @@ -52,9 +53,13 @@ def cli(verbose): @click.option('--3d', 'is_3d', is_flag=True) @click.option('--plane', type=click.Choice(['xy', 'yx', 'yz', 'zy', 'xz', 'zx']), default='xy') @click.option('--backend', type=click.Choice(['plotly', 'matplotlib']), default='matplotlib') -@click.option('-r', '--realistic-diameters/--no-realistic-diameters', default=False, - help='Scale diameters according to the plot axis\n' - 'Warning: Only works with the matplotlib backend') +@click.option( + '-r', + '--realistic-diameters/--no-realistic-diameters', + default=False, + help='Scale diameters according to the plot axis\n' + 'Warning: Only works with the matplotlib backend', +) def view(input_file, is_3d, plane, backend, realistic_diameters): """CLI interface to draw morphologies.""" # pylint: disable=import-outside-toplevel @@ -65,10 +70,15 @@ def view(input_file, is_3d, plane, backend, realistic_diameters): plot = partial(matplotlib_impl.plot_morph3d, ax=ax) else: _, ax = matplotlib_utils.get_figure() - plot = partial(matplotlib_impl.plot_morph, ax=ax, - plane=plane, realistic_diameters=realistic_diameters) + plot = partial( + matplotlib_impl.plot_morph, + ax=ax, + plane=plane, + realistic_diameters=realistic_diameters, + ) else: from neurom.view import plotly_impl + if is_3d: plot = plotly_impl.plot_morph3d else: @@ -81,35 +91,80 @@ def view(input_file, is_3d, plane, backend, realistic_diameters): plt.show() -@cli.command(short_help='Morphology statistics extractor, more details at' - 'https://neurom.readthedocs.io/en/latest/morph_stats.html') +@cli.command( + short_help='Morphology statistics extractor, more details at' + 'https://neurom.readthedocs.io/en/latest/morph_stats.html' +) @click.argument('datapath', required=False) -@click.option('-C', '--config', type=click.Path(exists=True, dir_okay=False), - default=EXAMPLE_STATS_CONFIG, show_default=True, - help='Configuration File') -@click.option('-o', '--output', type=click.Path(exists=False, dir_okay=False), - help='Path to output file, if it ends in .json, a json file is created,' - 'otherwise a csv file is created') -@click.option('-f', '--full-config', is_flag=True, default=False, - help='If passed then --config is ignored. Compute statistics for all neurite' - 'types, all modes and all features') -@click.option('--as-population', is_flag=True, default=False, - help='If enabled the directory is treated as a population') -@click.option('-I', '--ignored-exceptions', help='Exception to ignore', - type=click.Choice(morph_stats.IGNORABLE_EXCEPTIONS.keys())) -def stats(datapath, config, output, full_config, as_population, ignored_exceptions): +@click.option( + '-C', + '--config', + type=click.Path(exists=True, dir_okay=False), + default=morph_stats.EXAMPLE_STATS_CONFIG, + show_default=True, + help='Configuration File', +) +@click.option( + '-o', + '--output', + type=click.Path(exists=False, dir_okay=False), + help='Path to output file, if it ends in .json, a json file is created,' + 'otherwise a csv file is created', +) +@click.option( + '-f', + '--full-config', + is_flag=True, + default=False, + help='If passed then --config is ignored. Compute statistics for all neurite' + 'types, all modes and all features', +) +@click.option( + '--as-population', + is_flag=True, + default=False, + help='If enabled the directory is treated as a population', +) +@click.option( + '-I', + '--ignored-exceptions', + help='Exception to ignore', + type=click.Choice(morph_stats.IGNORABLE_EXCEPTIONS.keys()), +) +@click.option( + '--use-subtrees', + is_flag=True, + show_default=True, + default=False, + help="Enable mixed subtree processing.", +) +def stats(datapath, config, output, full_config, as_population, ignored_exceptions, use_subtrees): """Cli for apps/morph_stats.""" - morph_stats.main(datapath, config, output, full_config, as_population, ignored_exceptions) + morph_stats.main( + datapath, config, output, full_config, as_population, ignored_exceptions, use_subtrees + ) -@cli.command(short_help='Perform checks on morphologies, more details at' - 'https://neurom.readthedocs.io/en/latest/morph_check.html') +@cli.command( + short_help='Perform checks on morphologies, more details at' + 'https://neurom.readthedocs.io/en/latest/morph_check.html' +) @click.argument('datapath') -@click.option('-C', '--config', type=click.Path(exists=True, dir_okay=False), - default=EXAMPLE_CHECK_CONFIG, show_default=True, - help='Configuration File') -@click.option('-o', '--output', type=click.Path(exists=False, dir_okay=False), - help='Path to output json summary file', required=True) +@click.option( + '-C', + '--config', + type=click.Path(exists=True, dir_okay=False), + default=morph_check.EXAMPLE_CHECK_CONFIG, + show_default=True, + help='Configuration File', +) +@click.option( + '-o', + '--output', + type=click.Path(exists=False, dir_okay=False), + help='Path to output json summary file', + required=True, +) def check(datapath, config, output): """Cli for apps/morph_check.""" morph_check.main(datapath, config, output) diff --git a/neurom/apps/morph_check.py b/neurom/apps/morph_check.py index 9dd3e90cf..cb3b7ab2d 100644 --- a/neurom/apps/morph_check.py +++ b/neurom/apps/morph_check.py @@ -29,7 +29,8 @@ """Check on morphologies.""" import json -from neurom.apps import get_config, EXAMPLE_CHECK_CONFIG + +from neurom.apps import EXAMPLE_CHECK_CONFIG, get_config from neurom.check.runner import CheckRunner diff --git a/neurom/apps/morph_stats.py b/neurom/apps/morph_stats.py index b0db089d8..93a119810 100644 --- a/neurom/apps/morph_stats.py +++ b/neurom/apps/morph_stats.py @@ -44,28 +44,32 @@ from morphio import SomaError import neurom as nm -from neurom.apps import get_config, EXAMPLE_STATS_CONFIG +from neurom.apps import EXAMPLE_STATS_CONFIG, get_config from neurom.core.morphology import Morphology, Neurite from neurom.core.population import Population from neurom.exceptions import ConfigError -from neurom.features import _NEURITE_FEATURES, _MORPHOLOGY_FEATURES, _POPULATION_FEATURES, \ - _get_feature_value_and_func +from neurom.features import ( + _MORPHOLOGY_FEATURES, + _NEURITE_FEATURES, + _POPULATION_FEATURES, + _get_feature_value_and_func, +) from neurom.io.utils import get_files_by_path -from neurom.utils import flatten, NeuromJSON +from neurom.utils import NeuromJSON, flatten L = logging.getLogger(__name__) IGNORABLE_EXCEPTIONS = {'SomaError': SomaError} -def _run_extract_stats(morph, config): +def _run_extract_stats(morph, config, process_subtrees): """The function to be called by multiprocessing.Pool.imap_unordered.""" if not isinstance(morph, (Morphology, Population)): - morph = nm.load_morphologies(morph) + morph = nm.load_morphologies(morph, process_subtrees=process_subtrees) return morph.name, extract_stats(morph, config) -def extract_dataframe(morphs, config, n_workers=1): +def extract_dataframe(morphs, config, n_workers=1, process_subtrees=False): """Extract stats grouped by neurite type from morphs. Arguments: @@ -96,7 +100,7 @@ def extract_dataframe(morphs, config, n_workers=1): elif isinstance(morphs, Population): morphs = morphs._files # pylint: disable=protected-access - func = partial(_run_extract_stats, config=config) + func = partial(_run_extract_stats, config=config, process_subtrees=process_subtrees) if n_workers == 1: stats = list(map(func, morphs)) else: @@ -110,20 +114,23 @@ def extract_dataframe(morphs, config, n_workers=1): columns = [('property', 'name')] + [ (key1, key2) for key1, data in stats[0][1].items() for key2 in data ] - rows = [[name] + list(flatten(features.values() for features in data.values())) - for name, data in stats] + rows = [ + [name] + list(flatten(features.values() for features in data.values())) + for name, data in stats + ] return pd.DataFrame(columns=pd.MultiIndex.from_tuples(columns), data=rows) extract_dataframe.__doc__ = extract_dataframe.__doc__.strip() + "\n\t" + str(EXAMPLE_STATS_CONFIG) -def _get_feature_stats(feature_name, morphs, modes, kwargs): +def _get_feature_stats(feature_name, morphs, modes, **kwargs): """Insert the stat data in the dict. If the feature is 2-dimensional, the feature is flattened on its last axis """ - def stat_name_format(mode, feature_name, kwargs): + + def stat_name_format(mode, feature_name, **kwargs): """Returns the key name for the data dictionary. The key is a combination of the mode, feature_name and an optional suffix of all the extra @@ -145,8 +152,7 @@ def stat_name_format(mode, feature_name, kwargs): raise ValueError(f'Len of "{feature_name}" feature shape must be <= 2') # pragma: no cover for mode in modes: - - stat_name = stat_name_format(mode, feature_name, kwargs) + stat_name = stat_name_format(mode, feature_name, **kwargs) stat = value if isinstance(value, Sized): @@ -200,14 +206,11 @@ def extract_stats(morphs, config): stats = defaultdict(dict) for category in ("neurite", "morphology", "population"): for feature_name, opts in config[category].items(): - list_of_kwargs = opts["kwargs"] modes = opts["modes"] for feature_kwargs in list_of_kwargs: - if category == 'neurite': - # mutated below, need a copy feature_kwargs = deepcopy(feature_kwargs) @@ -221,12 +224,17 @@ def extract_stats(morphs, config): if not isinstance(morphs, Neurite): feature_kwargs["neurite_type"] = neurite_type stats[neurite_type.name].update( - _get_feature_stats(feature_name, morphs, modes, feature_kwargs) + _get_feature_stats( + feature_name, + morphs, + modes, + **feature_kwargs, + ) ) else: stats[category].update( - _get_feature_stats(feature_name, morphs, modes, feature_kwargs) + _get_feature_stats(feature_name, morphs, modes, **feature_kwargs) ) return dict(stats) @@ -239,11 +247,7 @@ def _get_header(results): """Extracts the headers, using the first value in the dict as the template.""" values = next(iter(results.values())) - return ['name'] + [ - f'{k}:{metric}' - for k, v in values.items() - for metric in v.keys() - ] + return ['name'] + [f'{k}:{metric}' for k, v in values.items() for metric in v.keys()] def _generate_flattened_dict(headers, results): @@ -265,7 +269,7 @@ def _generate_flattened_dict(headers, results): 'AXON': nm.AXON, 'BASAL_DENDRITE': nm.BASAL_DENDRITE, 'APICAL_DENDRITE': nm.APICAL_DENDRITE, - 'ALL': nm.ANY_NEURITE + 'ALL': nm.ANY_NEURITE, } @@ -276,7 +280,7 @@ def full_config(): categories = { "neurite": _NEURITE_FEATURES, "morphology": _MORPHOLOGY_FEATURES, - "population": _POPULATION_FEATURES + "population": _POPULATION_FEATURES, } config = { @@ -318,6 +322,7 @@ def _standardize_layout(category_features): - mode1 - mode2 """ + def standardize_options(options): """Returns options as a dict with two keys: 'kwargs' and 'modes'.""" # convert short format @@ -353,7 +358,15 @@ def _sanitize_config(config): return config -def main(datapath, config, output_file, is_full_config, as_population, ignored_exceptions): +def main( + datapath, + config, + output_file, + is_full_config, + as_population, + ignored_exceptions, + use_subtrees=False, +): """Main function that get statistics for morphologies. Args: @@ -363,6 +376,7 @@ def main(datapath, config, output_file, is_full_config, as_population, ignored_e is_full_config (bool): should be statistics made over all possible features, modes, neurites as_population (bool): treat ``datapath`` as directory of morphologies population ignored_exceptions (list|tuple|None): exceptions to ignore when loading a morphology + use_subtrees (bool): Enable of heterogeneous subtree processing """ config = full_config() if is_full_config else get_config(config, EXAMPLE_STATS_CONFIG) @@ -376,7 +390,8 @@ def main(datapath, config, output_file, is_full_config, as_population, ignored_e morphs = nm.load_morphologies( get_files_by_path(datapath), - ignored_exceptions=tuple(IGNORABLE_EXCEPTIONS[k] for k in ignored_exceptions) + ignored_exceptions=tuple(IGNORABLE_EXCEPTIONS[k] for k in ignored_exceptions), + process_subtrees=use_subtrees, ) if as_population: diff --git a/neurom/check/__init__.py b/neurom/check/__init__.py index 0776510a2..622c2394c 100644 --- a/neurom/check/__init__.py +++ b/neurom/check/__init__.py @@ -33,6 +33,7 @@ def check_wrapper(fun): """Decorate a checking function.""" + @wraps(fun) def _wrapper(*args, **kwargs): """Sets the title property of the result of running a checker.""" @@ -46,6 +47,7 @@ def _wrapper(*args, **kwargs): class CheckResult: """Class representing a check result.""" + def __init__(self, status, info=None, title=None): """Initialize a CheckResult object.""" self.status = bool(status) diff --git a/neurom/check/morphology_checks.py b/neurom/check/morphology_checks.py index ef2fc5687..0cdc9ef0f 100644 --- a/neurom/check/morphology_checks.py +++ b/neurom/check/morphology_checks.py @@ -33,11 +33,12 @@ from itertools import islice import numpy as np + from neurom import NeuriteType from neurom.check import CheckResult -from neurom.check.morphtree import get_flat_neurites, back_tracking_segments, overlapping_points -from neurom.core.morphology import Section, iter_neurites, iter_sections, iter_segments +from neurom.check.morphtree import back_tracking_segments, get_flat_neurites, overlapping_points from neurom.core.dataformat import COLS +from neurom.core.morphology import Section, iter_neurites, iter_sections, iter_segments from neurom.exceptions import NeuroMError from neurom.morphmath import section_length, segment_length from neurom.utils import flatten @@ -137,8 +138,7 @@ def has_all_nonzero_section_lengths(morph, threshold=0.0): Returns: CheckResult with result including list of ids of bad sections """ - bad_ids = [s.id for s in iter_sections(morph.neurites) - if section_length(s.points) <= threshold] + bad_ids = [s.id for s in iter_sections(morph.neurites) if section_length(s.points) <= threshold] return CheckResult(len(bad_ids) == 0, bad_ids) @@ -192,10 +192,15 @@ def has_no_jumps(morph, max_distance=30.0, axis='z'): CheckResult with result list of ids of bad sections """ bad_ids = [] - axis = {'x': COLS.X, 'y': COLS.Y, 'z': COLS.Z, }[axis.lower()] + axis = { + 'x': COLS.X, + 'y': COLS.Y, + 'z': COLS.Z, + }[axis.lower()] for neurite in iter_neurites(morph): - section_segment = ((sec, seg) for sec in iter_sections(neurite) - for seg in iter_segments(sec)) + section_segment = ( + (sec, seg) for sec in iter_sections(neurite) for seg in iter_segments(sec) + ) for sec, (p0, p1) in islice(section_segment, 1, None): # Skip neurite root segment if max_distance < abs(p0[axis] - p1[axis]): bad_ids.append((sec.id, [p0, p1])) @@ -254,9 +259,11 @@ def has_no_narrow_start(morph, frac=0.9): Returns: CheckResult with a list of all first segments of neurites with a narrow start """ - bad_ids = [(neurite.root_node.id, neurite.root_node.points[np.newaxis, 1]) - for neurite in morph.neurites - if neurite.root_node.points[0][COLS.R] < frac * neurite.root_node.points[1][COLS.R]] + bad_ids = [ + (neurite.root_node.id, neurite.root_node.points[np.newaxis, 1]) + for neurite in morph.neurites + if neurite.root_node.points[0][COLS.R] < frac * neurite.root_node.points[1][COLS.R] + ] return CheckResult(len(bad_ids) == 0, bad_ids) @@ -282,33 +289,34 @@ def has_no_dangling_branch(morph): radius = np.linalg.norm(recentered_soma, axis=1) soma_max_radius = radius.max() - dendritic_points = np.array(list(flatten(n.points - for n in iter_neurites(morph) - if n.type != NeuriteType.axon))) + dendritic_points = np.array( + list(flatten(n.points for n in iter_neurites(morph) if n.type != NeuriteType.axon)) + ) def is_dangling(neurite): """Is the neurite dangling?""" starting_point = neurite.points[0][COLS.XYZ] - if np.linalg.norm(starting_point - soma_center) - soma_max_radius <= 12.: + if np.linalg.norm(starting_point - soma_center) - soma_max_radius <= 12.0: return False if neurite.type != NeuriteType.axon: return True - distance_to_dendrites = np.linalg.norm(dendritic_points[:, COLS.XYZ] - starting_point, - axis=1) + distance_to_dendrites = np.linalg.norm( + dendritic_points[:, COLS.XYZ] - starting_point, axis=1 + ) return np.all(distance_to_dendrites >= 2 * dendritic_points[:, COLS.R] + 2) - bad_ids = [(n.root_node.id, [n.root_node.points[0]]) - for n in iter_neurites(morph) if is_dangling(n)] + bad_ids = [ + (n.root_node.id, [n.root_node.points[0]]) for n in iter_neurites(morph) if is_dangling(n) + ] return CheckResult(len(bad_ids) == 0, bad_ids) -def has_no_narrow_neurite_section(morph, - neurite_filter, - radius_threshold=0.05, - considered_section_min_length=50): +def has_no_narrow_neurite_section( + morph, neurite_filter, radius_threshold=0.05, considered_section_min_length=50 +): """Check if the morphology has dendrites with narrow sections. Arguments: @@ -322,22 +330,31 @@ def has_no_narrow_neurite_section(morph, CheckResult with result. `result.info` contains the narrow section ids and their first point """ - considered_sections = (sec for sec in iter_sections(morph, neurite_filter=neurite_filter) - if sec.length > considered_section_min_length) + considered_sections = ( + sec + for sec in iter_sections(morph, neurite_filter=neurite_filter) + if sec.length > considered_section_min_length + ) def narrow_section(section): """Select narrow sections.""" return section.points[:, COLS.R].mean() < radius_threshold - bad_ids = [(section.id, section.points[np.newaxis, 1]) - for section in considered_sections if narrow_section(section)] + bad_ids = [ + (section.id, section.points[np.newaxis, 1]) + for section in considered_sections + if narrow_section(section) + ] return CheckResult(len(bad_ids) == 0, bad_ids) def has_multifurcation(morph): """Check if a section has more than 3 children.""" - bad_ids = [(section.id, section.points[np.newaxis, -1]) for section in iter_sections(morph) - if len(section.children) > 3] + bad_ids = [ + (section.id, section.points[np.newaxis, -1]) + for section in iter_sections(morph) + if len(section.children) > 3 + ] return CheckResult(len(bad_ids) == 0, bad_ids) @@ -360,7 +377,7 @@ def has_no_single_children(morph): def has_no_back_tracking(morph): """Check if the morphology has sections with back-tracks.""" bad_ids = [ - (i, morph.section(i[0]).points[np.newaxis, i[1]]) + (i, morph.section(i[0]).points[:, COLS.XYZ][np.newaxis, i[1]]) for neurite in iter_neurites(morph) for i in back_tracking_segments(neurite) ] diff --git a/neurom/check/morphtree.py b/neurom/check/morphtree.py index 1f0abf174..fbb591417 100644 --- a/neurom/check/morphtree.py +++ b/neurom/check/morphtree.py @@ -31,8 +31,9 @@ import numpy as np from scipy.spatial import KDTree -from neurom.core.dataformat import COLS from neurom import morphmath as mm +from neurom.core.dataformat import COLS +from neurom.core.morphology import iter_sections from neurom.morphmath import principal_direction_extent @@ -48,15 +49,14 @@ def is_monotonic(neurite, tol): Returns: True if neurite monotonic """ - for node in neurite.iter_sections(): + for node in iter_sections(neurite): # check that points in section satisfy monotonicity sec = node.points for point_id in range(len(sec) - 1): if sec[point_id + 1][COLS.R] > sec[point_id][COLS.R] + tol: return False # Check that section boundary points satisfy monotonicity - if (node.parent is not None and - sec[0][COLS.R] > node.parent.points[-1][COLS.R] + tol): + if node.parent is not None and sec[0][COLS.R] > node.parent.points[-1][COLS.R] + tol: return False return True @@ -86,7 +86,7 @@ def is_flat(neurite, tol, method='tolerance'): return any(ext < float(tol)) -def back_tracking_segments(neurite): +def back_tracking_segments(neurite): # pylint: disable=too-many-locals """Check if a neurite process backtracks to a previous node. Back-tracking takes place @@ -101,7 +101,7 @@ def back_tracking_segments(neurite): for which a back tracking is detected (so the first point of these segments can be retrieved with ``morph.section(section_id).points[segment_id]``. """ - # pylint: disable=too-many-locals + def pair(segs): """Pairs the input list into triplets.""" return zip(segs, segs[1:]) @@ -173,7 +173,7 @@ def is_inside_cylinder(seg1, seg2): return not is_in_the_same_verse(seg1, seg2) and is_seg1_overlapping_with_seg2(seg1, seg2) # filter out single segment sections - section_itr = (sec for sec in neurite.iter_sections() if sec.points.shape[0] > 2) + section_itr = (sec for sec in iter_sections(neurite) if sec.points.shape[0] > 2) for sec in section_itr: # group each section's points intro triplets segment_pairs = list(filter(is_not_zero_seg, pair(sec.points))) @@ -182,7 +182,7 @@ def is_inside_cylinder(seg1, seg2): for i, seg1 in enumerate(segment_pairs[1:]): # check if the end point of the segment lies within the previous # ones in the current section - for j, seg2 in enumerate(segment_pairs[0: i + 1]): + for j, seg2 in enumerate(segment_pairs[0 : i + 1]): if is_inside_cylinder(seg1, seg2): yield (sec.id, i, j) @@ -230,7 +230,7 @@ def overlapping_points(neurite, tolerance=None): [np.ones((len(sec.points) - 1, 1)) * sec.id, sec.points[1:, :3]], axis=1, ) - for sec in neurite.iter_sections() + for sec in iter_sections(neurite) ], ), ], diff --git a/neurom/check/neuron_checks.py b/neurom/check/neuron_checks.py deleted file mode 100644 index 6146637a7..000000000 --- a/neurom/check/neuron_checks.py +++ /dev/null @@ -1,8 +0,0 @@ -"""For backward compatibility only.""" -# pylint: skip-file - -from neurom.check.morphology_checks import * # pragma: no cover -from neurom.utils import deprecated_module # pragma: no cover - -deprecated_module('Module `neurom.check.neuron_checks` is deprecated. Use' - '`neurom.check.morphology_checks` instead.') # pragma: no cover diff --git a/neurom/check/runner.py b/neurom/check/runner.py index 807a46cbe..4d034f87e 100644 --- a/neurom/check/runner.py +++ b/neurom/check/runner.py @@ -34,9 +34,8 @@ from neurom import load_morphology from neurom.check import check_wrapper -from neurom.exceptions import ConfigError +from neurom.exceptions import ConfigError, NeuroMError from neurom.io import utils -from neurom.utils import warn_deprecated L = logging.getLogger(__name__) @@ -47,8 +46,9 @@ class CheckRunner: def __init__(self, config): """Initialize a CheckRunner object.""" self._config = CheckRunner._sanitize_config(config) - self._check_modules = dict((k, import_module('neurom.check.%s' % k)) - for k in config['checks']) + self._check_modules = dict( + (k, import_module('neurom.check.%s' % k)) for k in config['checks'] + ) def run(self, path): """Test a bunch of files and return a summary JSON report.""" @@ -83,8 +83,7 @@ def _do_check(self, obj, check_module, check_str): try: if out.info: - L.debug('%s: %d failing ids detected: %s', - out.title, len(out.info), out.info) + L.debug('%s: %d failing ids detected: %s', out.title, len(out.info), out.info) except TypeError: # pragma: no cover pass @@ -136,8 +135,7 @@ def _log_msg(self, msg, ok): LOG_LEVELS = {False: logging.ERROR, True: logging.INFO} # pylint: disable=logging-not-lazy - L.log(LOG_LEVELS[ok], - '%35s %s' + CEND, msg, CGREEN + 'PASS' if ok else CRED + 'FAIL') + L.log(LOG_LEVELS[ok], '%35s %s' + CEND, msg, CGREEN + 'PASS' if ok else CRED + 'FAIL') @staticmethod def _sanitize_config(config): @@ -146,10 +144,10 @@ def _sanitize_config(config): checks = config['checks'] if 'morphology_checks' not in checks: checks['morphology_checks'] = [] - if 'neuron_checks' in checks: - warn_deprecated('"neuron_checks" is deprecated, use "morphology_checks" instead ' - 'for the config of `neurom.check`') # pragma: no cover - checks['morphology_checks'] = config['neuron_checks'] # pragma: no cover + if 'neuron_checks' in checks: # pragma: no cover + raise NeuroMError( + "'neuron_checks' is not supported. Please rename it into 'morphology_checks'" + ) else: raise ConfigError('Need to have "checks" in the config') diff --git a/neurom/core/__init__.py b/neurom/core/__init__.py index 2fddabf9d..eac93e4e5 100644 --- a/neurom/core/__init__.py +++ b/neurom/core/__init__.py @@ -28,7 +28,8 @@ """Core functionality and data types of NeuroM.""" +from neurom.core.morphology import Morphology, Neurite, Section +from neurom.core.population import Population + # those imports here for backward compatibility from neurom.core.soma import Soma -from neurom.core.morphology import Section, Neurite, Morphology, Neuron -from neurom.core.population import Population diff --git a/neurom/core/dataformat.py b/neurom/core/dataformat.py index d529c4926..113c69a85 100644 --- a/neurom/core/dataformat.py +++ b/neurom/core/dataformat.py @@ -35,6 +35,7 @@ class COLS: """Column labels for internal data representation.""" + COL_COUNT = _COL_COUNT (X, Y, Z, R) = range(_COL_COUNT) XY = slice(0, 2) diff --git a/neurom/core/morphology.py b/neurom/core/morphology.py index 1a8404a8a..1431eda6b 100644 --- a/neurom/core/morphology.py +++ b/neurom/core/morphology.py @@ -28,17 +28,20 @@ """Morphology classes and functions.""" -from collections import deque import warnings +from collections import deque import morphio import numpy as np +from cached_property import cached_property + from neurom import morphmath -from neurom.core.soma import make_soma from neurom.core.dataformat import COLS -from neurom.core.types import NeuriteIter, NeuriteType from neurom.core.population import Population -from neurom.utils import flatten, warn_deprecated +from neurom.core.soma import make_soma +from neurom.core.types import NeuriteIter, NeuriteType +from neurom.exceptions import NeuroMError +from neurom.utils import flatten class Section: @@ -46,34 +49,30 @@ class Section: def __init__(self, morphio_section): """The section constructor.""" - self.morphio_section = morphio_section + self._morphio_section = morphio_section + + def to_morphio(self): + """Returns the morphio section.""" + return self._morphio_section @property def id(self): """Returns the section ID.""" - return self.morphio_section.id + return self._morphio_section.id @property def parent(self): """Returns the parent section if non root section else None.""" - if self.morphio_section.is_root: - return None - return Section(self.morphio_section.parent) + return None if self.is_root() else Section(self._morphio_section.parent) @property def children(self): """Returns a list of child section.""" - return [Section(child) for child in self.morphio_section.children] - - def append_section(self, section): - """Appends a section to the current section object. + return [Section(child) for child in self._morphio_section.children] - Args: - section (morphio.Section|morphio.mut.Section|Section|morphio.PointLevel): a section - """ - if isinstance(section, Section): - return self.morphio_section.append_section(section.morphio_section) - return self.morphio_section.append_section(section) + def is_homogeneous_point(self): + """A section is homogeneous if it has the same type with its children.""" + return all(c.type == self.type for c in self.children) def is_forking_point(self): """Is this section a forking point?""" @@ -89,11 +88,11 @@ def is_leaf(self): def is_root(self): """Is tree the root node?""" - return self.parent is None + return self._morphio_section.is_root def ipreorder(self): """Depth-first pre-order iteration of tree nodes.""" - children = deque((self, )) + children = deque((self,)) while children: cur_node = children.pop() children.extend(reversed(cur_node.children)) @@ -101,7 +100,9 @@ def ipreorder(self): def ipostorder(self): """Depth-first post-order iteration of tree nodes.""" - children = [self, ] + children = [ + self, + ] seen = set() while children: cur_node = children[-1] @@ -112,12 +113,27 @@ def ipostorder(self): children.pop() yield cur_node - def iupstream(self): - """Iterate from a tree node to the root nodes.""" - t = self - while t is not None: - yield t - t = t.parent + def iupstream(self, stop_node=None): + """Iterate from a tree node to the root nodes. + + Args: + stop_node: Node to stop the upstream traversal. If None, it stops when parent is None. + """ + if stop_node is None: + + def stop_condition(section): + return section.is_root() + + else: + + def stop_condition(section): + return section.is_root() or section == stop_node + + current_section = self + while not stop_condition(current_section): + yield current_section + current_section = current_section.parent + yield current_section def ileaf(self): """Iterator to all leaves of a tree.""" @@ -141,35 +157,29 @@ def ibifurcation_point(self, iter_mode=ipreorder): def __eq__(self, other): """Equal when its morphio section is equal.""" - return self.morphio_section == other.morphio_section + return self.to_morphio().has_same_shape(other.to_morphio()) def __hash__(self): """Hash of its id.""" return self.id - def __nonzero__(self): - """If has children.""" - return self.morphio_section is not None - - __bool__ = __nonzero__ + @property + def segments(self): + """The array of all segments of the neurite.""" + return list(iter_segments(self)) @property def points(self): """Returns the section list of points the NeuroM way (points + radius).""" - return np.concatenate((self.morphio_section.points, - self.morphio_section.diameters[:, np.newaxis] / 2.), - axis=1) - - @points.setter - def points(self, value): - """Set the points.""" - self.morphio_section.points = np.copy(value[:, COLS.XYZ]) - self.morphio_section.diameters = np.copy(value[:, COLS.R]) * 2 + return np.concatenate( + (self._morphio_section.points, self._morphio_section.diameters[:, np.newaxis] / 2.0), + axis=1, + ) @property def type(self): """Returns the section type.""" - return NeuriteType(int(self.morphio_section.type)) + return NeuriteType(int(self._morphio_section.type)) @property def length(self): @@ -197,18 +207,22 @@ def volume(self): def __repr__(self): """Text representation.""" parent_id = None if self.parent is None else self.parent.id - return (f'Section(id={self.id}, type={self.type}, n_points={len(self.points)})' - f'') + return ( + f'Section(id={self.id}, type={self.type}, n_points={len(self.points)})' + f'' + ) # NRN simulator iteration order # See: # https://github.com/neuronsimulator/nrn/blob/2dbf2ebf95f1f8e5a9f0565272c18b1c87b2e54c/share/lib/hoc/import3d/import3d_gui.hoc#L874 -NRN_ORDER = {NeuriteType.soma: 0, - NeuriteType.axon: 1, - NeuriteType.basal_dendrite: 2, - NeuriteType.apical_dendrite: 3, - NeuriteType.undefined: 4} +NRN_ORDER = { + NeuriteType.soma: 0, + NeuriteType.axon: 1, + NeuriteType.basal_dendrite: 2, + NeuriteType.apical_dendrite: 3, + NeuriteType.undefined: 4, +} def iter_neurites(obj, mapfun=None, filt=None, neurite_order=NeuriteIter.FileOrder): @@ -229,34 +243,60 @@ def iter_neurites(obj, mapfun=None, filt=None, neurite_order=NeuriteIter.FileOrd >>> from neurom.core.morphology import iter_neurites >>> from neurom import load_morphologies - >>> pop = load_morphologies('path/to/morphologies') - >>> n_points = [n for n in iter_neurites(pop, lambda x : len(x.points))] + >>> pop = load_morphologies("tests/data/valid_set") + >>> n_points = [n for n in iter_neurites(pop, lambda x, section_type: len(x.points))] Get the number of points in each axon in a morphology population >>> import neurom as nm >>> from neurom.core.morphology import iter_neurites + >>> from neurom import load_morphologies + >>> pop = load_morphologies("tests/data/valid_set") >>> filter = lambda n : n.type == nm.AXON - >>> mapping = lambda n : len(n.points) + >>> mapping = lambda n, section_type: len(n.points) >>> n_points = [n for n in iter_neurites(pop, mapping, filter)] """ - neurites = ((obj,) if isinstance(obj, Neurite) else - obj.neurites if hasattr(obj, 'neurites') else obj) + if isinstance(obj, Neurite): + neurites = (obj,) + elif hasattr(obj, "neurites"): + neurites = obj.neurites + else: + neurites = obj + if neurite_order == NeuriteIter.NRN: if isinstance(obj, Population): - warnings.warn('`iter_neurites` with `neurite_order` over Population orders neurites' - 'within the whole population, not within each morphology separately.') + warnings.warn( + '`iter_neurites` with `neurite_order` over Population orders neurites' + 'within the whole population, not within each morphology separately.' + ) last_position = max(NRN_ORDER.values()) + 1 neurites = sorted(neurites, key=lambda neurite: NRN_ORDER.get(neurite.type, last_position)) neurite_iter = iter(neurites) if filt is None else filter(filt, neurites) - return neurite_iter if mapfun is None else map(mapfun, neurite_iter) + + if mapfun is None: + return neurite_iter + + return ( + ( + mapfun( + neurite, + section_type=filt.type if filt is not None else None, + ) + if neurite.process_subtrees + else mapfun(neurite, section_type=NeuriteType.all) + ) + for neurite in neurite_iter + ) -def iter_sections(neurites, - iterator_type=Section.ipreorder, - neurite_filter=None, - neurite_order=NeuriteIter.FileOrder): +def iter_sections( + neurites, + iterator_type=Section.ipreorder, + neurite_filter=None, + neurite_order=NeuriteIter.FileOrder, + section_filter=None, +): """Iterator to the sections in a neurite, morphology or morphology population. Arguments: @@ -272,6 +312,8 @@ def iter_sections(neurites, neurite_order (NeuriteIter): order upon which neurites should be iterated - NeuriteIter.FileOrder: order of appearance in the file - NeuriteIter.NRN: NRN simulator order: soma -> axon -> basal -> apical + section_filter: optional section level filter. Please note that neurite_filter takes + precedence over the section_filter. Examples: @@ -282,13 +324,18 @@ def iter_sections(neurites, >>> filter = lambda n : n.type == nm.AXON >>> n_points = [len(s.points) for s in iter_sections(pop, neurite_filter=filter)] """ - return flatten( - iterator_type(neurite.root_node) - for neurite in iter_neurites(neurites, filt=neurite_filter, neurite_order=neurite_order) - ) - - -def iter_segments(obj, neurite_filter=None, neurite_order=NeuriteIter.FileOrder): + neurites = iter_neurites(neurites, filt=neurite_filter, neurite_order=neurite_order) + sections = flatten(iterator_type(neurite.root_node) for neurite in neurites) + return sections if section_filter is None else filter(section_filter, sections) + + +def iter_segments( + obj, + neurite_filter=None, + neurite_order=NeuriteIter.FileOrder, + section_filter=None, + section_iterator=Section.ipreorder, +): """Return an iterator to the segments in a collection of neurites. Arguments: @@ -297,58 +344,130 @@ def iter_segments(obj, neurite_filter=None, neurite_order=NeuriteIter.FileOrder) neurite_order: order upon which neurite should be iterated. Values: - NeuriteIter.FileOrder: order of appearance in the file - NeuriteIter.NRN: NRN simulator order: soma -> axon -> basal -> apical + section_filter: optional section level filter + section_iterator: section iteration order within a given neurite. Must be one of: + Section.ipreorder: Depth-first pre-order iteration of tree nodes + Section.ipostorder: Depth-first post-order iteration of tree nodes + Section.iupstream: Iterate from a tree node to the root nodes + Section.ibifurcation_point: Iterator to bifurcation points + Section.ileaf: Iterator to all leaves of a tree Note: This is a convenience function provided for generic access to morphology segments. It may have a performance overhead WRT custom-made segment analysis functions that leverage numpy and section-wise iteration. """ - sections = iter((obj,) if isinstance(obj, Section) else - iter_sections(obj, - neurite_filter=neurite_filter, - neurite_order=neurite_order)) - - return flatten( - zip(section.points[:-1], section.points[1:]) - for section in sections + sections = iter( + (obj,) + if isinstance(obj, Section) + else iter_sections( + obj, + iterator_type=section_iterator, + neurite_filter=neurite_filter, + neurite_order=neurite_order, + section_filter=section_filter, + ) ) + return flatten(zip(section.points[:-1], section.points[1:]) for section in sections) + + +def iter_points(obj, neurite_filter=None, neurite_order=NeuriteIter.FileOrder, section_filter=None): + """Return an iterator to the points in a population, morphology, neurites, or section. + + Args: + obj: population, morphology, neurite, section or iterable containing + neurite_filter: optional top level filter on properties of neurite neurite objects + neurite_order: order upon which neurite should be iterated. Values: + - NeuriteIter.FileOrder: order of appearance in the file + - NeuriteIter.NRN: NRN simulator order: soma -> axon -> basal -> apical + section_filter: optional section level filter + """ + sections = ( + iter((obj,)) + if isinstance(obj, Section) + else iter_sections( + obj, + neurite_filter=neurite_filter, + neurite_order=neurite_order, + section_filter=section_filter, + ) + ) + + return flatten(s.points[:, COLS.XYZ] for s in sections) + def graft_morphology(section): """Returns a morphology starting at section.""" assert isinstance(section, Section) m = morphio.mut.Morphology() - m.append_root_section(section.morphio_section) + m.append_root_section(section.to_morphio()) return Morphology(m) -def graft_neuron(section): - """Deprecated in favor of ``graft_morphology``.""" - warn_deprecated('`neurom.core.neuron.graft_neuron` is deprecated in favor of ' - '`neurom.core.morphology.graft_morphology`') # pragma: no cover - return graft_morphology(section) # pragma: no cover - - class Neurite: """Class representing a neurite tree.""" - def __init__(self, root_node): + def __init__(self, root_node, *, process_subtrees=False): """Constructor. Args: root_node (morphio.Section): root section + process_subtrees (bool): enable mixed tree processing if set to True """ - self.morphio_root_node = root_node + self._root_node = root_node + self._process_subtrees = process_subtrees + + @property + def process_subtrees(self): + """Enable mixed tree processing if set to True.""" + return self._process_subtrees + + @process_subtrees.setter + def process_subtrees(self, value): + self._process_subtrees = value + if "type" in vars(self): + del vars(self)["type"] + + @property + def morphio_root_node(self): + """Returns the morphio root section.""" + return self._root_node @property def root_node(self): """The first section of the neurite.""" return Section(self.morphio_root_node) - @property + @cached_property def type(self): - """The type of the root node.""" - return self.root_node.type + """The type of the Neurite (which can be composite).""" + return NeuriteType(self.subtree_types) + + @cached_property + def subtree_types(self): + """The types of the subtrees.""" + if not self._process_subtrees: + return NeuriteType(self.morphio_root_node.type) + + it = self.root_node.ipreorder() + subtree_types = [next(it).to_morphio().type] + + for section in it: + if section.type != section.parent.type: + subtree_types.append(NeuriteType(section.to_morphio().type)) + + return subtree_types + + @property + def sections(self): + """The array of all sections.""" + return list(iter_sections(self)) + + @property + def segments(self): + """The array of all segments of the neurite.""" + return list(iter_segments(self)) @property def points(self): @@ -368,7 +487,10 @@ def length(self): The length is defined as the sum of lengths of the sections. """ - return sum(s.length for s in self.iter_sections()) + # pylint: disable=import-outside-toplevel + from neurom.features.neurite import total_length + + return total_length(self) @property def area(self): @@ -376,7 +498,10 @@ def area(self): The area is defined as the sum of area of the sections. """ - return sum(s.area for s in self.iter_sections()) + # pylint: disable=import-outside-toplevel + from neurom.features.neurite import total_area + + return total_area(self) @property def volume(self): @@ -384,108 +509,121 @@ def volume(self): The volume is defined as the sum of volumes of the sections. """ - return sum(s.volume for s in self.iter_sections()) - - def iter_sections(self, order=Section.ipreorder, neurite_order=NeuriteIter.FileOrder): - """Iteration over section nodes. - - Arguments: - order: section iteration order within a given neurite. Must be one of: - Section.ipreorder: Depth-first pre-order iteration of tree nodes - Section.ipostorder: Depth-first post-order iteration of tree nodes - Section.iupstream: Iterate from a tree node to the root nodes - Section.ibifurcation_point: Iterator to bifurcation points - Section.ileaf: Iterator to all leaves of a tree - - neurite_order: order upon which neurites should be iterated. Values: - - NeuriteIter.FileOrder: order of appearance in the file - - NeuriteIter.NRN: NRN simulator order: soma -> axon -> basal -> apical - """ - return iter_sections(self, iterator_type=order, neurite_order=neurite_order) + # pylint: disable=import-outside-toplevel + from neurom.features.neurite import total_volume - def __nonzero__(self): - """If has root node.""" - return bool(self.morphio_root_node) + return total_volume(self) + + def is_heterogeneous(self) -> bool: + """Returns true if the neurite consists of more that one section types.""" + return self.morphio_root_node.is_heterogeneous() def __eq__(self, other): """If root node ids and types are equal.""" - return self.type == other.type and self.morphio_root_node.id == other.morphio_root_node.id + return ( + self.type == other.type + and self.morphio_root_node.id == other.morphio_root_node.id + and self.process_subtrees == other.process_subtrees + ) def __hash__(self): """Hash is made of tuple of type and root_node.""" - return hash((self.type, self.root_node)) - - __bool__ = __nonzero__ + return hash((self.type, self.root_node, self.process_subtrees)) def __repr__(self): """Return a string representation.""" return 'Neurite ' % self.type -class Morphology(morphio.mut.Morphology): +class Morphology: """Class representing a simple morphology.""" - def __init__(self, filename, name=None): + def __init__(self, morphio_morph, name=None, *, process_subtrees=False): """Morphology constructor. Args: - filename (str|Path): a filename - name (str): a option morphology name + morphio_morph (morphio.Morphology|morphio.mut.Morphology): a morphio object + name (str): an optional morphology name + process_subtrees (bool): enable mixed tree processing if set to True """ - super().__init__(filename) + if not isinstance(morphio_morph, (morphio.Morphology, morphio.mut.Morphology)): + raise NeuroMError( + f"Expected morphio Morphology object but got: {morphio_morph}.\n" + f"Use neurom.load_morphology() to load from file." + ) + + self._morphio_morph = morphio_morph + self.name = name if name else 'Morphology' - self.morphio_soma = super().soma - self.neurom_soma = make_soma(self.morphio_soma) + self.soma = make_soma(self._morphio_morph.soma) - @property - def soma(self): - """Corresponding soma.""" - return self.neurom_soma + self.process_subtrees = process_subtrees + + def to_morphio(self): + """Returns the morphio morphology object.""" + return self._morphio_morph + + def copy(self): + """Returns a shallow copy of the morphio morphology object.""" + return Morphology(self.to_morphio(), name=self.name, process_subtrees=self.process_subtrees) @property def neurites(self): """The list of neurites.""" - return [Neurite(root_section) for root_section in self.root_sections] + return [ + Neurite(root_section, process_subtrees=self.process_subtrees) + for root_section in self._morphio_morph.root_sections + ] + + def section(self, section_id): + """Returns the section with the given id.""" + return Section(self._morphio_morph.section(section_id)) @property def sections(self): """The array of all sections, excluding the soma.""" return list(iter_sections(self)) + @property + def segments(self): + """The array of all segments of the sections.""" + return list(iter_segments(self)) + @property def points(self): """Returns the list of points.""" - return np.concatenate( - [section.points for section in iter_sections(self)]) + return np.concatenate([section.points for section in iter_sections(self)]) def transform(self, trans): """Return a copy of this morphology with a 3D transformation applied.""" - obj = Morphology(self) - obj.morphio_soma.points = trans(obj.morphio_soma.points) + morph = self._morphio_morph + + is_immutable = hasattr(morph, 'as_mutable') - for section in obj.sections: - section.morphio_section.points = trans(section.morphio_section.points) - return obj + # make copy or convert to mutable if immutable + if is_immutable: + morph = morph.as_mutable() + else: + morph = morphio.mut.Morphology(morph) + + morph.soma.points = trans(morph.soma.points) + + for section in morph.iter(): + section.points = trans(section.points) + + if is_immutable: + return Morphology(morph.as_immutable()) + return Morphology(morph) def __copy__(self): """Creates a deep copy of Morphology instance.""" - return Morphology(self, self.name) + return Morphology(self.to_morphio(), self.name) def __deepcopy__(self, memodict={}): """Creates a deep copy of Morphology instance.""" # pylint: disable=dangerous-default-value - return Morphology(self, self.name) + return Morphology(self.to_morphio(), self.name) def __repr__(self): """Return a string representation.""" - return 'Morphology ' % \ - (self.soma, len(self.neurites)) - - -class Neuron(Morphology): - """Deprecated ``Neuron`` class. Use ``Morphology`` instead.""" - def __init__(self, filename, name=None): - """Dont use me.""" - super().__init__(filename, name) # pragma: no cover - warn_deprecated('`neurom.core.neuron.Neuron` is deprecated in favor of ' - '`neurom.core.morphology.Morphology`') # pragma: no cover + return 'Morphology ' % (self.soma, len(self.neurites)) diff --git a/neurom/core/neuron.py b/neurom/core/neuron.py deleted file mode 100644 index 514e0eaf3..000000000 --- a/neurom/core/neuron.py +++ /dev/null @@ -1,8 +0,0 @@ -"""For backward compatibility only.""" -# pylint: skip-file - -from neurom.core.morphology import * # pragma: no cover -from neurom.utils import deprecated_module # pragma: no cover - -deprecated_module('Module `neurom.core.neuron` is deprecated. Use `neurom.core.morphology`' - ' instead.') # pragma: no cover diff --git a/neurom/core/population.py b/neurom/core/population.py index fc29b53e4..f85d4a0cc 100644 --- a/neurom/core/population.py +++ b/neurom/core/population.py @@ -28,15 +28,22 @@ """Morphology Population Classes and Functions.""" import logging +import os +from pathlib import Path from morphio import MorphioError + import neurom from neurom.exceptions import NeuroMError - L = logging.getLogger(__name__) +def _resolve_if_morphology_paths(files_or_objects): + """Resolve the files in the list.""" + return [Path(os.path.abspath(f)) if isinstance(f, (Path, str)) else f for f in files_or_objects] + + class Population: """Morphology Population Class. @@ -44,7 +51,16 @@ class Population: It does not store the loaded morphology in memory unless the morphology has been already passed as loaded (instance of ``Morphology``). """ - def __init__(self, files, name='Population', ignored_exceptions=(), cache=False): + + def __init__( + self, + files, + name='Population', + ignored_exceptions=(), + *, + cache=False, + process_subtrees=False, + ): """Construct a morphology population. Arguments: @@ -57,13 +73,34 @@ def __init__(self, files, name='Population', ignored_exceptions=(), cache=False) will be loaded everytime it is accessed within the population. Which is good when population is big. If true then all morphs will be loaded upon the construction and kept in memory. + process_subtrees (bool): enable mixed tree processing if set to True + + Notes: + symlinks in paths are not resolved. """ self._ignored_exceptions = ignored_exceptions self.name = name + + self._files = _resolve_if_morphology_paths(files) + + self._process_subtrees = process_subtrees + if cache: - self._files = [self._load_file(f) for f in files if f is not None] - else: - self._files = files + self._reset_cache() + + def _reset_cache(self): + """Reset the internal cache.""" + self._files = [self._load_file(f) for f in self._files if f is not None] + + @property + def process_subtrees(self): + """Enable mixed tree processing if set to True.""" + return self._process_subtrees + + @process_subtrees.setter + def process_subtrees(self, value): + self._process_subtrees = value + self._reset_cache() @property def morphologies(self): @@ -82,9 +119,11 @@ def neurites(self): def _load_file(self, f): if isinstance(f, neurom.core.morphology.Morphology): - return f + new_morph = f.copy() + new_morph.process_subtrees = self.process_subtrees + return new_morph try: - return neurom.load_morphology(f) + return neurom.load_morphology(f, process_subtrees=self.process_subtrees) except (NeuroMError, MorphioError) as e: if isinstance(e, self._ignored_exceptions): L.info('Ignoring exception "%s" for file %s', e, f.name) @@ -108,7 +147,8 @@ def __getitem__(self, idx): """Get morphology at index idx.""" if idx > len(self): raise ValueError( - f'no {idx} index in "{self.name}" population, max possible index is {len(self)}') + f'no {idx} index in "{self.name}" population, max possible index is {len(self)}' + ) return self._load_file(self._files[idx]) def __str__(self): diff --git a/neurom/core/soma.py b/neurom/core/soma.py index 0aa030e6f..412af3960 100755 --- a/neurom/core/soma.py +++ b/neurom/core/soma.py @@ -30,8 +30,10 @@ import math import warnings +import morphio import numpy as np from morphio import SomaError, SomaType + from neurom import morphmath from neurom.core.dataformat import COLS @@ -50,15 +52,21 @@ def __init__(self, morphio_soma): morphio_soma (morphio.Soma): instance of soma of MorphIO class """ self._morphio_soma = morphio_soma - # this radius is used only for `volume` method, please avoid using it for anything else. - self.radius = 0 + + def to_morphio(self): + """Return morphio instance.""" + return self._morphio_soma @property def center(self): """Obtain the center from the first stored point.""" - if len(self._morphio_soma.points) > 0: - return self._morphio_soma.points[0] - return None + return get_center(self) + + @property + def radius(self): + """Return radius of soma.""" + # this radius is used only for `volume` method, please avoid using it for anything else. + return get_radius(self) def iter(self): """Iterator to soma contents.""" @@ -67,31 +75,23 @@ def iter(self): @property def points(self): """Get the set of (x, y, z, r) points this soma.""" - return np.concatenate((self._morphio_soma.points, - self._morphio_soma.diameters[:, np.newaxis] / 2.), - axis=1) + return np.concatenate( + (self._morphio_soma.points, self._morphio_soma.diameters[:, np.newaxis] / 2.0), axis=1 + ) - @points.setter - def points(self, values): - """Set the points.""" - values = np.asarray(values) - self._morphio_soma.points = np.copy(values[:, COLS.XYZ]) - self._morphio_soma.diameters = np.copy(values[:, COLS.R]) * 2 + @property + def area(self): + """Calculate soma area.""" + return get_area(self) @property def volume(self): - """Gets soma volume assuming it is a sphere.""" - warnings.warn('Approximating soma volume by a sphere. {}'.format(self)) - return 4. / 3 * math.pi * self.radius ** 3 + """Calculate soma volume.""" + return get_volume(self) def overlaps(self, points, exclude_boundary=False): """Check that the given points are located inside the soma.""" - points = np.atleast_2d(np.asarray(points, dtype=np.float64)) - if exclude_boundary: - mask = np.linalg.norm(points - self.center, axis=1) < self.radius - else: - mask = np.linalg.norm(points - self.center, axis=1) <= self.radius - return mask + return check_overlaps(self, points, exclude_boundary=exclude_boundary) class SomaSinglePoint(Soma): @@ -100,15 +100,13 @@ class SomaSinglePoint(Soma): Represented by a single point. """ - def __init__(self, morphio_soma): - """Initialize a SomaSinglePoint object.""" - super().__init__(morphio_soma) - self.radius = self.points[0][COLS.R] - def __str__(self): """Return a string representation.""" - return ('SomaSinglePoint(%s) ' % - (repr(self.points), self.center, self.radius)) + return 'SomaSinglePoint(%s) ' % ( + repr(self.points), + self.center, + self.radius, + ) class SomaCylinders(Soma): @@ -135,51 +133,13 @@ class SomaCylinders(Soma): the area calculation """ - def __init__(self, morphio_soma): - """Initialize a SomaCyliners object.""" - super().__init__(morphio_soma) - self.area = sum(morphmath.segment_area((p0, p1)) - for p0, p1 in zip(self.points, self.points[1:])) - self.radius = math.sqrt(self.area / (4. * math.pi)) - - @property - def center(self): - """Obtain the center from the first stored point.""" - return self.points[0][COLS.XYZ] - - @property - def volume(self): - """Return the volume of soma.""" - return sum(morphmath.segment_volume((p0, p1)) - for p0, p1 in zip(self.points, self.points[1:])) - def __str__(self): """Return a string representation.""" - return ('SomaCylinders(%s) ' % - (repr(self.points), self.center, self.radius)) - - def overlaps(self, points, exclude_boundary=False): - """Check that the given points are located inside the soma.""" - points = np.atleast_2d(np.asarray(points, dtype=np.float64)) - mask = np.ones(len(points)).astype(bool) - for p1, p2 in zip(self.points[:-1], self.points[1:]): - vec = p2[COLS.XYZ] - p1[COLS.XYZ] - vec_norm = np.linalg.norm(vec) - dot = (points[mask] - p1[COLS.XYZ]).dot(vec) / vec_norm - - cross = np.linalg.norm(np.cross(vec, points[mask]), axis=1) / vec_norm - dot_clipped = np.clip(dot / vec_norm, a_min=0, a_max=1) - radii = p1[COLS.R] * (1 - dot_clipped) + p2[COLS.R] * dot_clipped - - if exclude_boundary: - in_cylinder = (dot > 0) & (dot < vec_norm) & (cross < radii) - else: - in_cylinder = (dot >= 0) & (dot <= vec_norm) & (cross <= radii) - mask[np.where(mask)] = ~in_cylinder - if not mask.any(): - break - - return ~mask + return 'SomaCylinders(%s) ' % ( + repr(self.points), + self.center, + self.radius, + ) class SomaNeuromorphoThreePointCylinders(SomaCylinders): @@ -205,31 +165,26 @@ class SomaNeuromorphoThreePointCylinders(SomaCylinders): def __init__(self, morphio_soma): """Initialize a SomaNeuromorphoThreePointCylinders object.""" super().__init__(morphio_soma) - # X Y Z R P # xs ys zs rs -1 # xs (ys-rs) zs rs 1 # xs (ys+rs) zs rs 1 - r = self.points[0, COLS.R] + r1, r2, r3 = 0.5 * morphio_soma.diameters + # make sure the above invariant holds - assert (np.isclose(r, self.points[1, COLS.R]) and np.isclose(r, self.points[2, COLS.R])), \ - 'All radii must be the same' - if r < 1e-5: - warnings.warn('Zero radius for {}'.format(self)) - h = morphmath.point_dist(self.points[1, COLS.XYZ], self.points[2, COLS.XYZ]) - self.area = 2.0 * math.pi * r * h # ignores the 'end-caps' of the cylinder - self.radius = math.sqrt(self.area / (4. * math.pi)) + assert np.isclose(r1, r2) and np.isclose(r1, r3), 'All radii must be the same' - @property - def volume(self): - """Return the volume of the soma.""" - return 2 * math.pi * self.radius ** 3 + if r1 < 1e-5: + warnings.warn('Zero radius for {}'.format(self)) def __str__(self): """Return a string representation.""" - return ('SomaNeuromorphoThreePointCylinders(%s) ' % - (repr(self.points), self.center, self.radius)) + return 'SomaNeuromorphoThreePointCylinders(%s) ' % ( + repr(self.points), + self.center, + self.radius, + ) class SomaSimpleContour(Soma): @@ -243,71 +198,297 @@ class SomaSimpleContour(Soma): the radii of the points are not taken into account. """ - def __init__(self, morphio_soma): - """Initialize a SomaSimpleContour object.""" - super().__init__(morphio_soma) - self.radius = morphmath.average_points_dist( - self.center, self.points[:, COLS.XYZ]) - - @property - def center(self): - """Obtain the center from the average of all points.""" - return np.mean(self.points[:, COLS.XYZ], axis=0) - def __str__(self): """Return a string representation.""" - return ('SomaSimpleContour(%s) ' % - (repr(self.points), self.center, self.radius)) + return 'SomaSimpleContour(%s) ' % ( + repr(self.points), + self.center, + self.radius, + ) + + +def _morphio_soma(soma): + """Return morphio soma.""" + if isinstance(soma, Soma): + return soma.to_morphio() + if isinstance(soma, (morphio.Soma, morphio.mut.Soma)): + return soma + raise TypeError(f"Unknown soma type {type(soma)}") + + +def _dispatch_soma_functions(soma, dispatch_mapping, **kwargs): + morphio_soma = _morphio_soma(soma) + soma_algo = dispatch_mapping[morphio_soma.type] + return soma_algo(morphio_soma, **kwargs) + + +def get_center(soma): + """Calculate soma center.""" + dispatch_mapping = { + SomaType.SOMA_UNDEFINED: _first_point_or_none, + SomaType.SOMA_SINGLE_POINT: _first_point, + SomaType.SOMA_CYLINDERS: _first_point, + SomaType.SOMA_NEUROMORPHO_THREE_POINT_CYLINDERS: _first_point, + SomaType.SOMA_SIMPLE_CONTOUR: _centroid, + } + return _dispatch_soma_functions(soma, dispatch_mapping) - def overlaps(self, points, exclude_boundary=False): - """Check that the given points are located inside the soma. - The contour is supposed to be in the plane XY, the Z component is ignored. - """ - # pylint: disable=too-many-locals - points = np.atleast_2d(np.asarray(points, dtype=np.float64)) - - # Convert points to angles from the center - relative_pts = points - self.center - pt_angles = np.arctan2(relative_pts[:, COLS.Y], relative_pts[:, COLS.X]) - - # Convert soma points to angles from the center - relative_soma_pts = self.points[:, COLS.XYZ] - self.center - soma_angles = np.arctan2(relative_soma_pts[:, COLS.Y], relative_soma_pts[:, COLS.X]) - - # Order the soma points by ascending angles - soma_angle_order = np.argsort(soma_angles) - ordered_soma_angles = soma_angles[soma_angle_order] - ordered_relative_soma_pts = relative_soma_pts[soma_angle_order] - - # Find the two soma points which form the segment crossed by the one from the center - # to the point - angles = np.atleast_2d(pt_angles).T - ordered_soma_angles - closest_indices = np.argmin(np.abs(angles), axis=1) - neighbors = np.ones_like(closest_indices) - neighbors[angles[np.arange(len(closest_indices)), closest_indices] < 0] = -1 - signs = (neighbors == 1) * 2. - 1. - neighbors[ - (closest_indices >= len(relative_soma_pts) - 1) - & (neighbors == 1) - ] = -len(relative_soma_pts) + 1 - - # Compute the cross product and multiply by neighbors to get the same result as if all - # vectors were clockwise - cross_z = np.cross( +def _first_point(morphio_soma): + """Return the first point.""" + return morphio_soma.points[0] + + +def _first_point_or_none(morphio_soma): + """Return first point if there is at least one point or None otherwise.""" + points = morphio_soma.points + return points[0] if len(points) > 0 else None + + +def _centroid(morphio_soma): + """Return the centroid of the soma points.""" + return np.mean(morphio_soma.points, axis=0) + + +def get_radius(soma): + """Calculate soma radius.""" + dispatch_mapping = { + SomaType.SOMA_UNDEFINED: lambda _: 0, + SomaType.SOMA_SINGLE_POINT: _soma_single_point_radius, + SomaType.SOMA_CYLINDERS: _soma_cylinders_radius, + SomaType.SOMA_NEUROMORPHO_THREE_POINT_CYLINDERS: _soma_three_point_cylinders_radius, + SomaType.SOMA_SIMPLE_CONTOUR: _soma_simple_contour_radius, + } + return _dispatch_soma_functions(soma, dispatch_mapping) + + +def _soma_single_point_radius(morphio_soma): + """Return first radius.""" + return 0.5 * morphio_soma.diameters[0] + + +def _soma_cylinders_radius(morphio_soma): + """Calculate radius calculated from the cylinder area.""" + points = np.concatenate( + (morphio_soma.points, 0.5 * morphio_soma.diameters[:, np.newaxis]), + axis=1, + ) + area = sum(morphmath.segment_area((p0, p1)) for p0, p1 in zip(points, points[1:])) + return math.sqrt(area / (4.0 * math.pi)) + + +def _soma_three_point_cylinders_radius(morphio_soma): + """Calculate three-point-cylinder radius.""" + return math.sqrt(get_area(morphio_soma) / (4.0 * math.pi)) + + +def _soma_simple_contour_radius(morphio_soma): + """Calculate average contour distance from center of soma.""" + return morphmath.average_points_dist(get_center(morphio_soma), morphio_soma.points) + + +def get_area(soma): + """Calculate soma area.""" + dispatch_mapping = { + SomaType.SOMA_UNDEFINED: _soma_undefined_area, + SomaType.SOMA_SINGLE_POINT: _soma_single_point_area, + SomaType.SOMA_CYLINDERS: _soma_cylinders_area, + SomaType.SOMA_NEUROMORPHO_THREE_POINT_CYLINDERS: _soma_three_point_cylinders_area, + SomaType.SOMA_SIMPLE_CONTOUR: _soma_single_point_area, + } + return _dispatch_soma_functions(soma, dispatch_mapping) + + +def _soma_single_point_area(morphio_soma): + """Calculate soma area as a sphere.""" + return 4.0 * math.pi * get_radius(morphio_soma) ** 2 + + +def _soma_undefined_area(morphio_soma): + """Calculate soma as a sphere.""" + warnings.warn('Approximating soma area by a sphere. {}'.format(morphio_soma)) + return _soma_single_point_area(morphio_soma) + + +def _soma_cylinders_area(morphio_soma): + """Calculate soma area as a sum of cylinder surfaces withouts caps.""" + points = np.concatenate( + ( + morphio_soma.points, + 0.5 * morphio_soma.diameters[:, np.newaxis], + ), + axis=1, + ) + return sum(morphmath.segment_area((p0, p1)) for p0, p1 in zip(points, points[1:])) + + +def _soma_three_point_cylinders_area(morphio_soma): + """Calculate soma area as a cylinder comprised of three points with the same radius.""" + r = 0.5 * morphio_soma.diameters[0] + h = morphmath.point_dist(morphio_soma.points[1], morphio_soma.points[2]) + return 2.0 * math.pi * r * h # ignores the 'end-caps' of the cylinder + + +def get_volume(soma): + """Calculate soma volume.""" + dispatch_mapping = { + SomaType.SOMA_UNDEFINED: _soma_undefined_volume, + SomaType.SOMA_SINGLE_POINT: _soma_single_point_volume, + SomaType.SOMA_CYLINDERS: _soma_cylinders_volume, + SomaType.SOMA_NEUROMORPHO_THREE_POINT_CYLINDERS: _soma_three_point_cylinders_volume, + SomaType.SOMA_SIMPLE_CONTOUR: _soma_undefined_volume, + } + return _dispatch_soma_functions(soma, dispatch_mapping) + + +def _soma_single_point_volume(morphio_soma): + """Calculate soma volume as a sphere.""" + return 4.0 / 3 * math.pi * get_radius(morphio_soma) ** 3 + + +def _soma_undefined_volume(morphio_soma): + """Calculate soma volume as a sphere.""" + warnings.warn('Approximating soma volume by a sphere. {}'.format(morphio_soma)) + return _soma_single_point_volume(morphio_soma) + + +def _soma_cylinders_volume(morphio_soma): + """Calculate soma volume as a sum of cylinder volumes.""" + points = np.concatenate( + ( + morphio_soma.points, + 0.5 * morphio_soma.diameters[:, np.newaxis], + ), + axis=1, + ) + return sum(morphmath.segment_volume((p0, p1)) for p0, p1 in zip(points, points[1:])) + + +def _soma_three_point_cylinders_volume(morphio_soma): + """Calculate soma volume as a cylinder of three points and same radius.""" + return 2.0 * math.pi * get_radius(morphio_soma) ** 3 + + +def check_overlaps(soma, points, exclude_boundary=False): + """Check if soma overlaps with points.""" + dispatch_mapping = { + SomaType.SOMA_UNDEFINED: _soma_undefined_overlaps, + SomaType.SOMA_SINGLE_POINT: _soma_undefined_overlaps, + SomaType.SOMA_CYLINDERS: _soma_cylinders_overlaps, + SomaType.SOMA_NEUROMORPHO_THREE_POINT_CYLINDERS: _soma_cylinders_overlaps, + SomaType.SOMA_SIMPLE_CONTOUR: _soma_simple_contour_overlaps, + } + return _dispatch_soma_functions( + soma, + dispatch_mapping, + points=points, + exclude_boundary=exclude_boundary, + ) + + +def _soma_undefined_overlaps(morphio_soma, points, exclude_boundary): + """Check if points overlap with soma approximated as a sphere.""" + points = np.atleast_2d(np.asarray(points, dtype=np.float64)) + + center = get_center(morphio_soma) + radius = get_radius(morphio_soma) + + if exclude_boundary: + return np.linalg.norm(points - center, axis=1) < radius + + return np.linalg.norm(points - center, axis=1) <= radius + + +def _soma_cylinders_overlaps(morphio_soma, points, exclude_boundary): + """Check if points overlap with soma approximated as a collection of cylinders.""" + points = np.atleast_2d(np.asarray(points, dtype=np.float64)) + + soma_points = np.concatenate( + ( + morphio_soma.points, + 0.5 * morphio_soma.diameters[:, np.newaxis], + ), + axis=1, + ) + + mask = np.ones(len(points)).astype(bool) + for p1, p2 in zip(soma_points[:-1], soma_points[1:]): + vec = p2[COLS.XYZ] - p1[COLS.XYZ] + vec_norm = np.linalg.norm(vec) + dot = (points[mask] - p1[COLS.XYZ]).dot(vec) / vec_norm + + cross = np.linalg.norm(np.cross(vec, points[mask]), axis=1) / vec_norm + dot_clipped = np.clip(dot / vec_norm, a_min=0, a_max=1) + radii = p1[COLS.R] * (1 - dot_clipped) + p2[COLS.R] * dot_clipped + + if exclude_boundary: + in_cylinder = (dot > 0) & (dot < vec_norm) & (cross < radii) + else: + in_cylinder = (dot >= 0) & (dot <= vec_norm) & (cross <= radii) + mask[np.where(mask)] = ~in_cylinder + if not mask.any(): + break + + return ~mask + + +def _soma_simple_contour_overlaps(morphio_soma, points, exclude_boundary): + """Check if points overlap with soma approximated as a contour.""" + # pylint: disable=too-many-locals + soma_points = np.concatenate( + ( + morphio_soma.points, + 0.5 * morphio_soma.diameters[:, np.newaxis], + ), + axis=1, + ) + center = get_center(morphio_soma) + + points = np.atleast_2d(np.asarray(points, dtype=np.float64)) + + # Convert points to angles from the center + relative_pts = points - center + pt_angles = np.arctan2(relative_pts[:, COLS.Y], relative_pts[:, COLS.X]) + + # Convert soma points to angles from the center + relative_soma_pts = soma_points[:, COLS.XYZ] - center + soma_angles = np.arctan2(relative_soma_pts[:, COLS.Y], relative_soma_pts[:, COLS.X]) + + # Order the soma points by ascending angles + soma_angle_order = np.argsort(soma_angles) + ordered_soma_angles = soma_angles[soma_angle_order] + ordered_relative_soma_pts = relative_soma_pts[soma_angle_order] + + # Find the two soma points which form the segment crossed by the one from the center + # to the point + angles = np.atleast_2d(pt_angles).T - ordered_soma_angles + closest_indices = np.argmin(np.abs(angles), axis=1) + neighbors = np.ones_like(closest_indices) + neighbors[angles[np.arange(len(closest_indices)), closest_indices] < 0] = -1 + signs = (neighbors == 1) * 2.0 - 1.0 + neighbors[(closest_indices >= len(relative_soma_pts) - 1) & (neighbors == 1)] = ( + -len(relative_soma_pts) + 1 + ) + + # Compute the cross product and multiply by neighbors to get the same result as if all + # vectors were clockwise + cross_z = ( + np.cross( ( ordered_relative_soma_pts[closest_indices + neighbors] - ordered_relative_soma_pts[closest_indices] ), relative_pts - ordered_relative_soma_pts[closest_indices], - )[:, COLS.Z] * signs + )[:, COLS.Z] + * signs + ) - if exclude_boundary: - interior_side = cross_z > 0 - else: - interior_side = cross_z >= 0 + if exclude_boundary: + interior_side = cross_z > 0 + else: + interior_side = cross_z >= 0 - return interior_side + return interior_side def make_soma(morphio_soma): diff --git a/neurom/core/types.py b/neurom/core/types.py index b269209b3..d5387460f 100644 --- a/neurom/core/types.py +++ b/neurom/core/types.py @@ -27,15 +27,21 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Type enumerations.""" +import collections.abc +from enum import Enum, EnumMeta, unique -from enum import IntEnum, unique from morphio import SectionType + from neurom.utils import OrderedEnum +_SOMA_SUBTYPE = 31 +_ALL_SUBTYPE = 32 + @unique class NeuriteIter(OrderedEnum): """Neurite iteration orders.""" + FileOrder = 1 # Order in which neurites appear in the file # NRN simulator order: soma -> axon -> basal -> apical @@ -44,15 +50,79 @@ class NeuriteIter(OrderedEnum): NRN = 2 +def is_composite_type(subtype): + """Check that the given type is composite.""" + return NeuriteType(subtype).is_composite() + + +def _is_sequence(obj): + return isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str) + + +def _int_or_tuple(values): + if isinstance(values, Enum): + return _int_or_tuple(values.value) + + if isinstance(values, (int, SectionType)): + return int(values) + + if _is_sequence(values): + if len(values) == 1: + return _int_or_tuple(values[0]) + return tuple(_int_or_tuple(v) for v in values) + + raise ValueError(f"Could not cast {values} to int or tuple of ints.") + + +# pylint: disable=redefined-builtin +class _ArgsIntsOrTuples(EnumMeta): + def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1): + try: + value = _int_or_tuple(value) + except ValueError: + pass + kwargs = {} + if names is not None: + # Keep default value of EnumMeta for Python>=3.12.3 + kwargs["names"] = names # pragma: no cover + return super().__call__( + value, module=module, qualname=qualname, type=type, start=start, **kwargs + ) + + +def _create_neurite_type(cls, value, name=None): + """Construct and return a cls type.""" + obj = object.__new__(cls) + + # this is an optimization to avoid checks during runtime + if _is_sequence(value): + subtypes = value + root_type = value[0] + else: + subtypes = (value,) + root_type = value + + setattr(obj, "_value_", value) + + if name: + setattr(obj, "_name_", name) + + obj.subtypes = subtypes + obj.root_type = root_type + + return obj + + # for backward compatibility with 'v1' version -class NeuriteType(IntEnum): +class NeuriteType(Enum, metaclass=_ArgsIntsOrTuples): """Type of neurite.""" + axon = SectionType.axon apical_dendrite = SectionType.apical_dendrite basal_dendrite = SectionType.basal_dendrite undefined = SectionType.undefined - soma = 31 - all = 32 + soma = SectionType.soma + all = SectionType.all custom5 = SectionType.custom5 custom6 = SectionType.custom6 custom7 = SectionType.custom7 @@ -69,11 +139,43 @@ class NeuriteType(IntEnum): custom18 = SectionType.custom18 custom19 = SectionType.custom19 + axon_carrying_dendrite = SectionType.basal_dendrite, SectionType.axon + + def __new__(cls, *values): + """Construct a NeuriteType from class definitions.""" + return _create_neurite_type(cls, value=_int_or_tuple(values)) + + def __hash__(self): + """Return the has of the type.""" + return hash(self._value_) + + def is_composite(self): + """Return True if the type consists of more than 1 subtypes.""" + return len(self.subtypes) > 1 + + def __eq__(self, other): + """Equal operator.""" + if not isinstance(other, NeuriteType): + try: + other = NeuriteType(other) + except ValueError: + return False + + if self.is_composite(): + if other.is_composite(): + is_eq = self.subtypes == other.subtypes + else: + is_eq = other.root_type in self.subtypes + else: + if other.is_composite(): + is_eq = self.root_type in other.subtypes + else: + is_eq = self.root_type == other.root_type + return is_eq + #: Collection of all neurite types -NEURITES = (NeuriteType.axon, - NeuriteType.apical_dendrite, - NeuriteType.basal_dendrite) +NEURITES = (NeuriteType.axon, NeuriteType.apical_dendrite, NeuriteType.basal_dendrite) ROOT_ID = -1 @@ -92,27 +194,29 @@ def tree_type_checker(*ref): Ex: >>> import neurom >>> from neurom.core.types import NeuriteType, tree_type_checker - >>> from neurom.core.morphology import Section - >>> m = neurom.load_morphology('path') + >>> from neurom.core.morphology import Section, iter_neurites + >>> m = neurom.load_morphology("tests/data/swc/Neuron.swc") >>> >>> tree_filter = tree_type_checker(NeuriteType.axon, NeuriteType.basal_dendrite) - >>> m.i_neurites(Section.ipreorder, tree_filter=tree_filter) + >>> it = iter_neurites(m, filt=tree_filter) >>> >>> tree_filter = tree_type_checker((NeuriteType.axon, NeuriteType.basal_dendrite)) - >>> m.i_neurites(Section.ipreorder, tree_filter=tree_filter) + >>> it = iter_neurites(m, filt=tree_filter) """ ref = tuple(ref) - if len(ref) == 1 and isinstance(ref[0], tuple): + if len(ref) == 1 and isinstance(ref[0], (list, tuple)): # if `ref` is passed as a tuple of types ref = ref[0] # validate that all values are of NeuriteType - for t in ref: - NeuriteType(t) + ref = [NeuriteType(t) for t in ref] if NeuriteType.all in ref: + def check_tree_type(_): """Always returns true.""" return True + else: + def check_tree_type(tree): """Check whether tree has the same type as ref. @@ -121,6 +225,8 @@ def check_tree_type(tree): """ return tree.type in ref + check_tree_type.type = ref + return check_tree_type diff --git a/neurom/features/__init__.py b/neurom/features/__init__.py index 924de8d1a..87a679152 100644 --- a/neurom/features/__init__.py +++ b/neurom/features/__init__.py @@ -32,17 +32,22 @@ Obtain some morphometrics >>> import neurom >>> from neurom import features - >>> m = neurom.load_morphology('path/to/morphology') + >>> m = neurom.load_morphology("tests/data/swc/Neuron.swc") >>> ap_seg_len = features.get('segment_lengths', m, neurite_type=neurom.APICAL_DENDRITE) >>> ax_sec_len = features.get('section_lengths', m, neurite_type=neurom.AXON) """ + +import inspect import operator from enum import Enum -from functools import reduce +from functools import partial, reduce, wraps + +import numpy as np -from neurom.core import Population, Morphology, Neurite +from neurom.core import Morphology, Neurite, Population from neurom.core.morphology import iter_neurites -from neurom.core.types import NeuriteType, tree_type_checker as is_type +from neurom.core.types import NeuriteType +from neurom.core.types import tree_type_checker as is_type from neurom.exceptions import NeuroMError _NEURITE_FEATURES = {} @@ -52,6 +57,7 @@ class NameSpace(Enum): """The level of morphology abstraction that feature applies to.""" + NEURITE = 'neurite' NEURON = 'morphology' POPULATION = 'population' @@ -64,12 +70,21 @@ def _flatten_feature(feature_shape, feature_value): return reduce(operator.concat, feature_value, []) -def _get_neurites_feature_value(feature_, obj, neurite_filter, kwargs): +def _get_neurites_feature_value(feature_, obj, neurite_filter, **kwargs): """Collects neurite feature values appropriately to feature's shape.""" kwargs.pop('neurite_type', None) # there is no 'neurite_type' arg in _NEURITE_FEATURES - return reduce(operator.add, - (feature_(n, **kwargs) for n in iter_neurites(obj, filt=neurite_filter)), - 0 if feature_.shape == () else []) + + return reduce( + operator.add, + ( + iter_neurites( + obj, + mapfun=partial(feature_, **kwargs), + filt=neurite_filter, + ) + ), + 0 if feature_.shape == () else [], + ) def _get_feature_value_and_func(feature_name, obj, **kwargs): @@ -87,8 +102,10 @@ def _get_feature_value_and_func(feature_name, obj, **kwargs): # pylint: disable=too-many-branches is_obj_list = isinstance(obj, (list, tuple)) if not isinstance(obj, (Neurite, Morphology, Population)) and not is_obj_list: - raise NeuroMError('Only Neurite, Morphology, Population or list, tuple of Neurite,' - ' Morphology can be used for feature calculation') + raise NeuroMError( + "Only Neurite, Morphology, Population or list, tuple of Neurite, Morphology" + f"can be used for feature calculation. Got: {obj}" + ) neurite_filter = is_type(kwargs.get('neurite_type', NeuriteType.all)) res, feature_ = None, None @@ -96,38 +113,55 @@ def _get_feature_value_and_func(feature_name, obj, **kwargs): if isinstance(obj, Neurite) or (is_obj_list and isinstance(obj[0], Neurite)): # input is a neurite or a list of neurites if feature_name in _NEURITE_FEATURES: - assert 'neurite_type' not in kwargs, 'Cant apply "neurite_type" arg to a neurite with' \ - ' a neurite feature' + if 'neurite_type' in kwargs: + raise NeuroMError( + 'Can not apply "neurite_type" arg to a Neurite with a neurite feature' + ) + feature_ = _NEURITE_FEATURES[feature_name] + if isinstance(obj, Neurite): res = feature_(obj, **kwargs) else: res = [feature_(s, **kwargs) for s in obj] + elif isinstance(obj, Morphology): # input is a morphology + if 'section_type' in kwargs: + raise NeuroMError('Can not apply "section_type" arg to a Morphology') if feature_name in _MORPHOLOGY_FEATURES: feature_ = _MORPHOLOGY_FEATURES[feature_name] + res = feature_(obj, **kwargs) + elif feature_name in _NEURITE_FEATURES: feature_ = _NEURITE_FEATURES[feature_name] - res = _get_neurites_feature_value(feature_, obj, neurite_filter, kwargs) + res = _get_neurites_feature_value(feature_, obj, neurite_filter, **kwargs) + elif isinstance(obj, Population) or (is_obj_list and isinstance(obj[0], Morphology)): # input is a morphology population or a list of morphs + if 'section_type' in kwargs: + raise NeuroMError('Can not apply "section_type" arg to a Population') if feature_name in _POPULATION_FEATURES: feature_ = _POPULATION_FEATURES[feature_name] + res = feature_(obj, **kwargs) elif feature_name in _MORPHOLOGY_FEATURES: feature_ = _MORPHOLOGY_FEATURES[feature_name] + res = _flatten_feature(feature_.shape, [feature_(n, **kwargs) for n in obj]) elif feature_name in _NEURITE_FEATURES: feature_ = _NEURITE_FEATURES[feature_name] res = _flatten_feature( feature_.shape, - [_get_neurites_feature_value(feature_, n, neurite_filter, kwargs) for n in obj]) + [_get_neurites_feature_value(feature_, n, neurite_filter, **kwargs) for n in obj], + ) if res is None or feature_ is None: - raise NeuroMError(f'Cant apply "{feature_name}" feature. Please check that it exists, ' - 'and can be applied to your input. See the features documentation page.') + raise NeuroMError( + f'Cant apply "{feature_name}" feature. Please check that it exists, ' + 'and can be applied to your input. See the features documentation page.' + ) return res, feature_ @@ -163,9 +197,11 @@ def _register_feature(namespace: NameSpace, name, func, shape): shape(tuple): the expected shape of the feature values """ setattr(func, 'shape', shape) - _map = {NameSpace.NEURITE: _NEURITE_FEATURES, - NameSpace.NEURON: _MORPHOLOGY_FEATURES, - NameSpace.POPULATION: _POPULATION_FEATURES} + _map = { + NameSpace.NEURITE: _NEURITE_FEATURES, + NameSpace.NEURON: _MORPHOLOGY_FEATURES, + NameSpace.POPULATION: _POPULATION_FEATURES, + } if name in _map[namespace]: raise NeuroMError(f'A feature is already registered under "{name}"') _map[namespace][name] = func @@ -174,6 +210,8 @@ def _register_feature(namespace: NameSpace, name, func, shape): def feature(shape, namespace: NameSpace, name=None): """Feature decorator to automatically register the feature in the appropriate namespace. + This decorator also ensures that the results of the features are casted to built-in types. + Arguments: shape(tuple): the expected shape of the feature values namespace(string): a namespace, see :class:`NameSpace` @@ -181,15 +219,36 @@ def feature(shape, namespace: NameSpace, name=None): """ def inner(func): - _register_feature(namespace, name or func.__name__, func, shape) - return func + @wraps(func) + def scalar_wrapper(*args, **kwargs): + res = func(*args, **kwargs) + try: + return res.tolist() + except AttributeError: + return res + + @wraps(func) + def matrix_wrapper(*args, **kwargs): + res = func(*args, **kwargs) + return np.array(res).tolist() + + if shape == (): + decorated_func = scalar_wrapper + else: + decorated_func = matrix_wrapper + + _register_feature(namespace, name or func.__name__, decorated_func, shape) + return decorated_func return inner # These imports are necessary in order to register the features -from neurom.features import neurite, morphology, \ - population # noqa, pylint: disable=wrong-import-position +# pylint: disable=wrong-import-position +from neurom.features import neurite # noqa, isort: skio + +from neurom.features import morphology # noqa, isort: skip +from neurom.features import population # noqa, isort: skip def _features_catalogue(): diff --git a/neurom/features/bifurcation.py b/neurom/features/bifurcation.py index 423a3ef2d..3fadaf6b3 100644 --- a/neurom/features/bifurcation.py +++ b/neurom/features/bifurcation.py @@ -29,18 +29,20 @@ """Bifurcation point functions.""" import numpy as np + +import neurom.features.section from neurom import morphmath -from neurom.exceptions import NeuroMError from neurom.core.dataformat import COLS from neurom.core.morphology import Section -from neurom.features.section import section_mean_radius +from neurom.exceptions import NeuroMError def _raise_if_not_bifurcation(section): n_children = len(section.children) if n_children != 2: - raise NeuroMError('A bifurcation point must have exactly 2 children, found {}'.format( - n_children)) + raise NeuroMError( + 'A bifurcation point must have exactly 2 children, found {}'.format(n_children) + ) def local_bifurcation_angle(bif_point): @@ -51,12 +53,13 @@ def local_bifurcation_angle(bif_point): The bifurcation angle is defined as the angle between the first non-zero length segments of a bifurcation point. """ + def skip_0_length(sec): """Return the first point with non-zero distance to first point.""" p0 = sec[0] cur = sec[1] for i, p in enumerate(sec[1:]): - if not np.all(p[:COLS.R] == p0[:COLS.R]): + if not np.all(p[: COLS.R] == p0[: COLS.R]): cur = sec[i + 1] break @@ -64,8 +67,10 @@ def skip_0_length(sec): _raise_if_not_bifurcation(bif_point) - ch0, ch1 = (skip_0_length(bif_point.children[0].points), - skip_0_length(bif_point.children[1].points)) + ch0, ch1 = ( + skip_0_length(bif_point.children[0].points), + skip_0_length(bif_point.children[1].points), + ) return morphmath.angle_3points(bif_point.points[-1], ch0, ch1) @@ -80,9 +85,9 @@ def remote_bifurcation_angle(bif_point): """ _raise_if_not_bifurcation(bif_point) - return morphmath.angle_3points(bif_point.points[-1], - bif_point.children[0].points[-1], - bif_point.children[1].points[-1]) + return morphmath.angle_3points( + bif_point.points[-1], bif_point.children[0].points[-1], bif_point.children[1].points[-1] + ) def bifurcation_partition(bif_point, iterator_type=Section.ipreorder): @@ -156,8 +161,8 @@ def sibling_ratio(bif_point, method='first'): n = bif_point.children[0].points[1, COLS.R] m = bif_point.children[1].points[1, COLS.R] if method == 'mean': - n = section_mean_radius(bif_point.children[0]) - m = section_mean_radius(bif_point.children[1]) + n = neurom.features.section.section_mean_radius(bif_point.children[0]) + m = neurom.features.section.section_mean_radius(bif_point.children[1]) return min(n, m) / max(n, m) @@ -182,7 +187,35 @@ def diameter_power_relation(bif_point, method='first'): d_child1 = bif_point.children[0].points[1, COLS.R] d_child2 = bif_point.children[1].points[1, COLS.R] if method == 'mean': - d_child = section_mean_radius(bif_point) - d_child1 = section_mean_radius(bif_point.children[0]) - d_child2 = section_mean_radius(bif_point.children[1]) - return (d_child / d_child1)**(1.5) + (d_child / d_child2)**(1.5) + d_child = neurom.features.section.section_mean_radius(bif_point) + d_child1 = neurom.features.section.section_mean_radius(bif_point.children[0]) + d_child2 = neurom.features.section.section_mean_radius(bif_point.children[1]) + return (d_child / d_child1) ** (1.5) + (d_child / d_child2) ** (1.5) + + +def downstream_pathlength_asymmetry( + bif_point, normalization_length=1.0, iterator_type=Section.ipreorder +): + """Calculates the downstream pathlength asymmetry at a bifurcation point. + + Args: + bif_point: Bifurcation section. + normalization_length: Constant to divide the result with. + iterator_type: Iterator type that specifies how the two subtrees are traversed. + + Returns: + The absolute difference between the downstream path distances of the two children, divided + by the normalization length. + """ + _raise_if_not_bifurcation(bif_point) + return ( + abs( + neurom.features.section.downstream_pathlength( + bif_point.children[0], iterator_type=iterator_type + ) + - neurom.features.section.downstream_pathlength( + bif_point.children[1], iterator_type=iterator_type + ), + ) + / normalization_length + ) diff --git a/neurom/features/bifurcationfunc.py b/neurom/features/bifurcationfunc.py deleted file mode 100644 index 52490a2ca..000000000 --- a/neurom/features/bifurcationfunc.py +++ /dev/null @@ -1,8 +0,0 @@ -"""For backward compatibility only.""" -# pylint: skip-file - -from neurom.features.bifurcation import * # pragma: no cover -from neurom.utils import deprecated_module # pragma: no cover - -deprecated_module('Module `neurom.features.bifurcationfunc` is deprecated. Use' - '`neurom.features.bifurcation` instead.') # pragma: no cover diff --git a/neurom/features/morphology.py b/neurom/features/morphology.py index 4b2c4de21..f7af93121 100644 --- a/neurom/features/morphology.py +++ b/neurom/features/morphology.py @@ -35,30 +35,38 @@ >>> import neurom >>> from neurom import features ->>> m = neurom.load_morphology('path/to/morphology') ->>> features.get('soma_surface_area', m) ->>> population = neurom.load_morphologies('path/to/morphs') ->>> features.get('sholl_crossings', population) +>>> m = neurom.load_morphology("tests/data/swc/Neuron.swc") +>>> result = features.get('soma_surface_area', m) +>>> population = neurom.load_morphologies("tests/data/valid_set") +>>> result = features.get('sholl_crossings', population) For more details see :ref:`features`. """ import warnings - +from collections.abc import Iterable from functools import partial -import math + import numpy as np +import neurom.core.soma from neurom import morphmath -from neurom.core.morphology import iter_neurites, iter_segments, Morphology -from neurom.core.types import tree_type_checker as is_type from neurom.core.dataformat import COLS +from neurom.core.morphology import ( + Morphology, + iter_neurites, + iter_points, + iter_sections, + iter_segments, +) from neurom.core.types import NeuriteType +from neurom.core.types import tree_type_checker as is_type from neurom.exceptions import NeuroMError -from neurom.features import feature, NameSpace, neurite as nf, section as sf -from neurom.utils import str_to_plane +from neurom.features import NameSpace, feature +from neurom.features import neurite as nf +from neurom.features import section as sf from neurom.morphmath import convex_hull - +from neurom.utils import flatten, str_to_plane feature = partial(feature, namespace=NameSpace.NEURON) @@ -68,18 +76,52 @@ def _assert_soma_center(morph): raise NeuroMError( f"The morphology named '{morph.name}' has no soma so the feature can not be computed." ) + return morph def _map_neurites(function, morph, neurite_type): return list( - iter_neurites(morph, mapfun=function, filt=is_type(neurite_type)) + iter_neurites( + obj=morph, + mapfun=function, + filt=is_type(neurite_type), + ) ) +def _map_neurite_root_nodes(function, morph, neurite_type): + if neurite_type == NeuriteType.all: + filt = None + else: + + def filt(neurite): + return neurite_type == neurite.type.root_type + + return [function(trunk.root_node) for trunk in iter_neurites(obj=morph, filt=filt)] + + +def _filter_mode(obj, neurite_type): + if obj.process_subtrees: + return {"section_filter": is_type(neurite_type)} + return {"neurite_filter": is_type(neurite_type)} + + +def _get_sections(morph, neurite_type): + return list(iter_sections(morph, **_filter_mode(morph, neurite_type))) + + +def _get_segments(morph, neurite_type): + return list(iter_segments(morph, **_filter_mode(morph, neurite_type))) + + +def _get_points(morph, neurite_type): + return list(iter_points(morph, **_filter_mode(morph, neurite_type))) + + @feature(shape=()) def soma_volume(morph): """Get the volume of a morphology's soma.""" - return morph.soma.volume + return neurom.core.soma.get_volume(morph.soma) @feature(shape=()) @@ -89,23 +131,90 @@ def soma_surface_area(morph): Note: The surface area is calculated by assuming the soma is spherical. """ - return 4.0 * math.pi * morph.soma.radius ** 2 + return neurom.core.soma.get_area(morph.soma) @feature(shape=()) def soma_radius(morph): """Get the radius of a morphology's soma.""" - return morph.soma.radius + return neurom.core.soma.get_radius(morph.soma) @feature(shape=()) -def max_radial_distance(morph, neurite_type=NeuriteType.all): +def max_radial_distance(morph, origin=None, neurite_type=NeuriteType.all): """Get the maximum radial distances of the termination sections.""" - term_radial_distances = _map_neurites(nf.max_radial_distance, morph, neurite_type) + origin = morph.soma.center if origin is None else origin + term_radial_distances = _map_neurites( + partial(nf.max_radial_distance, origin=origin), morph, neurite_type + ) return max(term_radial_distances) if term_radial_distances else 0.0 +@feature(shape=(...,)) +def section_radial_distances(morph, origin=None, neurite_type=NeuriteType.all): + """Section radial distances.""" + origin = morph.soma.center if origin is None else origin + + return list( + flatten( + _map_neurites( + partial(nf.section_radial_distances, origin=origin), + morph=morph, + neurite_type=neurite_type, + ) + ) + ) + + +@feature(shape=(...,)) +def section_term_radial_distances(morph, origin=None, neurite_type=NeuriteType.all): + """Get the radial distances of the termination sections.""" + origin = morph.soma.center if origin is None else origin + + return list( + flatten( + _map_neurites( + partial(nf.section_term_radial_distances, origin=origin), + morph=morph, + neurite_type=neurite_type, + ) + ) + ) + + +@feature(shape=(...,)) +def section_bif_radial_distances(morph, origin=None, neurite_type=NeuriteType.all): + """Get the radial distances of the bifurcation sections.""" + origin = morph.soma.center if origin is None else origin + + return list( + flatten( + _map_neurites( + partial(nf.section_bif_radial_distances, origin=origin), + morph=morph, + neurite_type=neurite_type, + ) + ) + ) + + +@feature(shape=(...,)) +def segment_radial_distances(morph, origin=None, neurite_type=NeuriteType.all): + """Ger the radial distances of the segments.""" + origin = morph.soma.center if origin is None else origin + + return list( + flatten( + _map_neurites( + partial(nf.segment_radial_distances, origin=origin), + morph=morph, + neurite_type=neurite_type, + ) + ) + ) + + @feature(shape=(...,)) def number_of_sections_per_neurite(morph, neurite_type=NeuriteType.all): """List of numbers of sections per neurite.""" @@ -141,13 +250,13 @@ def trunk_origin_azimuths(morph, neurite_type=NeuriteType.all): """ _assert_soma_center(morph) - def azimuth(neurite): + def azimuth(root_node): """Azimuth of a neurite trunk.""" return morphmath.azimuth_from_vector( - morphmath.vector(neurite.root_node.points[0], morph.soma.center) + morphmath.vector(root_node.points[0], morph.soma.center) ) - return _map_neurites(azimuth, morph, neurite_type) + return _map_neurite_root_nodes(azimuth, morph, neurite_type) @feature(shape=(...,)) @@ -162,13 +271,13 @@ def trunk_origin_elevations(morph, neurite_type=NeuriteType.all): """ _assert_soma_center(morph) - def elevation(neurite): + def elevation(root_node): """Elevation of a section.""" return morphmath.elevation_from_vector( - morphmath.vector(neurite.root_node.points[0], morph.soma.center) + morphmath.vector(root_node.points[0], morph.soma.center) ) - return _map_neurites(elevation, morph, neurite_type) + return _map_neurite_root_nodes(elevation, morph, neurite_type) @feature(shape=(...,)) @@ -176,10 +285,10 @@ def trunk_vectors(morph, neurite_type=NeuriteType.all): """Calculate the vectors between all the trunks of the morphology and the soma center.""" _assert_soma_center(morph) - def vector_to_root_node(neurite): - return morphmath.vector(neurite.root_node.points[0], morph.soma.center) + def vector_from_soma_to_root(root_node): + return morphmath.vector(root_node.points[0], morph.soma.center) - return _map_neurites(vector_to_root_node, morph, neurite_type) + return _map_neurite_root_nodes(vector_from_soma_to_root, morph, neurite_type) @feature(shape=(...,)) @@ -226,7 +335,8 @@ def trunk_angles( morphmath.angle_between_projections(i / np.linalg.norm(i), [0, 1]) for i in vectors[:, sort_coords] ), - dtype=float) + dtype=float, + ) ) vectors = vectors[order] @@ -239,10 +349,13 @@ def trunk_angles( n_vectors = len(vectors) cycling_vectors = np.vstack([vectors, vectors]) angles = [ - (num_i, [ - morphmath.angle_between_vectors(i, j) - for j in cycling_vectors[num_i: num_i + n_vectors] - ]) + ( + num_i, + [ + morphmath.angle_between_vectors(i, j) + for j in cycling_vectors[num_i : num_i + n_vectors] + ], + ) for num_i, i in enumerate(vectors) ] @@ -299,9 +412,9 @@ def trunk_angles_inter_types( for i, source in enumerate(source_vectors): for j, target in enumerate(target_vectors): angles[i, j, 0] = morphmath.angle_between_vectors(source, target) - angles[i, j, [1, 2]] = ( - morphmath.spherical_from_vector(target) - morphmath.spherical_from_vector(source) - ) + angles[i, j, [1, 2]] = morphmath.spherical_from_vector( + target + ) - morphmath.spherical_from_vector(source) # Ensure elevation differences are in [-pi, pi] angles[:, :, 1] = morphmath.angles_to_pi_interval(angles[:, :, 1]) @@ -311,8 +424,7 @@ def trunk_angles_inter_types( if closest_component is not None: angles = angles[ - np.arange(len(angles)), - np.argmin(np.abs(angles[:, :, closest_component]), axis=1) + np.arange(len(angles)), np.argmin(np.abs(angles[:, :, closest_component]), axis=1) ][:, np.newaxis, :] return angles.tolist() @@ -351,9 +463,9 @@ def trunk_angles_from_vector( angles = np.empty((len(vectors), 3), dtype=float) for i, i_vec in enumerate(vectors): angles[i, 0] = morphmath.angle_between_vectors(vector, i_vec) - angles[i, (1, 2)] = ( - morphmath.spherical_from_vector(i_vec) - morphmath.spherical_from_vector(vector) - ) + angles[i, (1, 2)] = morphmath.spherical_from_vector( + i_vec + ) - morphmath.spherical_from_vector(vector) # Ensure elevation difference are in [-pi, pi] angles[:, 1] = morphmath.angles_to_pi_interval(angles[:, 1]) @@ -392,10 +504,6 @@ def trunk_origin_radii( * else the mean radius of the points between the given ``min_length_filter`` and ``max_length_filter`` are returned. """ - if max_length_filter is None and min_length_filter is None: - return [n.root_node.points[0][COLS.R] - for n in iter_neurites(morph, filt=is_type(neurite_type))] - if min_length_filter is not None and min_length_filter <= 0: raise NeuroMError( "In 'trunk_origin_radii': the 'min_length_filter' value must be strictly greater " @@ -418,11 +526,16 @@ def trunk_origin_radii( "'max_length_filter' value." ) - def _mean_radius(neurite): - points = neurite.root_node.points + def trunk_first_radius(root_node): + return root_node.points[0][COLS.R] + + def trunk_mean_radius(root_node): + points = root_node.points + interval_lengths = morphmath.interval_lengths(points) path_lengths = np.insert(np.cumsum(interval_lengths), 0, 0) valid_pts = np.ones(len(path_lengths), dtype=bool) + if min_length_filter is not None: valid_pts = valid_pts & (path_lengths >= min_length_filter) if not valid_pts.any(): @@ -432,6 +545,7 @@ def _mean_radius(neurite): "point is returned." ) return points[-1, COLS.R] + if max_length_filter is not None: valid_max = path_lengths <= max_length_filter valid_pts = valid_pts & valid_max @@ -443,24 +557,28 @@ def _mean_radius(neurite): ) # pylint: disable=invalid-unary-operand-type return points[~valid_max, COLS.R][0] + return points[valid_pts, COLS.R].mean() - return _map_neurites(_mean_radius, morph, neurite_type) + function = ( + trunk_first_radius + if max_length_filter is None and min_length_filter is None + else trunk_mean_radius + ) + + return _map_neurite_root_nodes(function, morph, neurite_type) @feature(shape=(...,)) def trunk_section_lengths(morph, neurite_type=NeuriteType.all): """List of lengths of trunk sections of neurites in a morph.""" - def trunk_section_length(neurite): - return morphmath.section_length(neurite.root_node.points) - - return _map_neurites(trunk_section_length, morph, neurite_type) + return _map_neurite_root_nodes(sf.section_length, morph, neurite_type) @feature(shape=()) def number_of_neurites(morph, neurite_type=NeuriteType.all): """Number of neurites in a morph.""" - return len(_map_neurites(lambda n: n, morph, neurite_type)) + return len(_map_neurites(lambda x, section_type: 1, morph, neurite_type)) @feature(shape=(...,)) @@ -491,31 +609,45 @@ def sholl_crossings(morph, neurite_type=NeuriteType.all, center=None, radii=None center=morph.soma.center, radii=np.arange(0, 1000, 100)) """ - def _count_crossings(neurite, radius): - """Used to count_crossings of segments in neurite with radius.""" - r2 = radius ** 2 + + def count_crossings(section, radius): + """Used to count crossings of segments in neurite with radius.""" + r2 = radius**2 count = 0 - for start, end in iter_segments(neurite): - start_dist2, end_dist2 = (morphmath.point_dist2(center, start), - morphmath.point_dist2(center, end)) + for start, end in iter_segments(section): + start_dist2, end_dist2 = ( + morphmath.point_dist2(center, start), + morphmath.point_dist2(center, end), + ) - count += int(start_dist2 <= r2 <= end_dist2 or - end_dist2 <= r2 <= start_dist2) + if start_dist2 <= r2 <= end_dist2 or end_dist2 <= r2 <= start_dist2: + count += 1 return count if center is None or radii is None: - assert isinstance(morph, Morphology) and morph.soma, \ - '`sholl_crossings` input error. If `center` or `radii` is not set then `morph` is ' \ + assert isinstance(morph, Morphology) and morph.soma, ( + '`sholl_crossings` input error. If `center` or `radii` is not set then `morph` is ' 'expected to be an instance of Morphology and have a soma.' + ) if center is None: _assert_soma_center(morph) center = morph.soma.center if radii is None: radii = [morph.soma.radius] - return [sum(_count_crossings(neurite, r) - for neurite in iter_neurites(morph, filt=is_type(neurite_type))) - for r in radii] + + if isinstance(morph, Iterable): + sections = filter(is_type(neurite_type), morph) + else: + sections = _get_sections(morph, neurite_type) + + counts_per_radius = [0 for _ in range(len(radii))] + + for section in sections: + for i, radius in enumerate(radii): + counts_per_radius[i] += count_crossings(section, radius) + + return counts_per_radius @feature(shape=(...,)) @@ -540,20 +672,21 @@ def sholl_frequency(morph, neurite_type=NeuriteType.all, step_size=10, bins=None list will be returned. """ _assert_soma_center(morph) - neurite_filter = is_type(neurite_type) if bins is None: min_soma_edge = morph.soma.radius - max_radius_per_neurite = [ - np.max(np.linalg.norm(n.points[:, COLS.XYZ] - morph.soma.center, axis=1)) - for n in morph.neurites if neurite_filter(n) + sections = _get_sections(morph, neurite_type) + + max_radius_per_section = [ + np.max(np.linalg.norm(section.points[:, COLS.XYZ] - morph.soma.center, axis=1)) + for section in sections ] - if not max_radius_per_neurite: + if not max_radius_per_section: return [] - bins = np.arange(min_soma_edge, min_soma_edge + max(max_radius_per_neurite), step_size) + bins = np.arange(min_soma_edge, min_soma_edge + max(max_radius_per_section), step_size) return sholl_crossings(morph, neurite_type, morph.soma.center, bins) @@ -564,34 +697,30 @@ def _extent_along_axis(morph, axis, neurite_type): The morphology is filtered by neurite type and the extent is calculated along the coordinate axis direction (e.g. COLS.X). """ - it_points = ( - p - for n in iter_neurites(morph, filt=is_type(neurite_type)) - for p in n.points[:, axis] - ) - try: - return abs(np.ptp(np.fromiter(it_points, dtype=np.float32))) - except ValueError: - # a ValueError is thrown when there are no points passed to ptp + points = _get_points(morph, neurite_type) + + if not points: return 0.0 + return abs(np.ptp(np.asarray(points)[:, axis])) + @feature(shape=()) def total_width(morph, neurite_type=NeuriteType.all): """Extent of morphology along axis x.""" - return _extent_along_axis(morph, axis=COLS.X, neurite_type=neurite_type) + return _extent_along_axis(morph, COLS.X, neurite_type) @feature(shape=()) def total_height(morph, neurite_type=NeuriteType.all): """Extent of morphology along axis y.""" - return _extent_along_axis(morph, axis=COLS.Y, neurite_type=neurite_type) + return _extent_along_axis(morph, COLS.Y, neurite_type) @feature(shape=()) def total_depth(morph, neurite_type=NeuriteType.all): """Extent of morphology along axis z.""" - return _extent_along_axis(morph, axis=COLS.Z, neurite_type=neurite_type) + return _extent_along_axis(morph, COLS.Z, neurite_type) @feature(shape=()) @@ -604,12 +733,7 @@ def volume_density(morph, neurite_type=NeuriteType.all): .. note:: Returns `np.nan` if the convex hull computation fails or there are not points available due to neurite type filtering. """ - # note: duplicate points are present but do not affect convex hull calculation - points = [ - point - for point_list in iter_neurites(morph, mapfun=sf.section_points, filt=is_type(neurite_type)) - for point in point_list - ] + points = _get_points(morph, neurite_type) if not points: return np.nan @@ -619,28 +743,24 @@ def volume_density(morph, neurite_type=NeuriteType.all): if morph_hull is None: return np.nan - total_volume = sum(iter_neurites(morph, mapfun=nf.total_volume, filt=is_type(neurite_type))) + total_volume = sum(total_volume_per_neurite(morph, neurite_type=neurite_type)) return total_volume / morph_hull.volume -def _unique_projected_points(morph, projection_plane, neurite_type): - +def _unique_projected_points(morph, projection_plane, neurite_type): key = "".join(sorted(projection_plane.lower())) try: axes = {"xy": COLS.XY, "xz": COLS.XZ, "yz": COLS.YZ}[key] except KeyError as e: - raise NeuroMError( f"Invalid 'projection_plane' argument {projection_plane}. " f"Please select 'xy', 'xz', or 'yz'." ) from e - points = list( - iter_neurites(morph, mapfun=sf.section_points, filt=is_type(neurite_type)) - ) + points = _get_points(morph, neurite_type) if len(points) == 0: return np.empty(shape=(0, 3), dtype=np.float32) @@ -723,7 +843,8 @@ def length_fraction_above_soma(morph, neurite_type=NeuriteType.all, up="Y"): raise NeuroMError(f"Unknown axis {axis}. Please choose 'X', 'Y', or 'Z'.") col = getattr(COLS, axis) - segments = list(iter_segments(morph, neurite_filter=is_type(neurite_type))) + + segments = _get_segments(morph, neurite_type) if not segments: return np.nan diff --git a/neurom/features/neurite.py b/neurom/features/neurite.py index 6aac5e9f6..1875105a9 100644 --- a/neurom/features/neurite.py +++ b/neurom/features/neurite.py @@ -35,10 +35,11 @@ >>> import neurom >>> from neurom import features ->>> m = neurom.load_morphology('path/to/morphology') ->>> features.get('max_radial_distance', m.neurites[0]) ->>> features.get('max_radial_distance', m) ->>> features.get('number_of_segments', m.neurites, neurite_type=neurom.AXON) +>>> m = neurom.load_morphology("tests/data/swc/Neuron.swc") +>>> max_radial_distances1 = features.get('max_radial_distance', m.neurites) +>>> max_radial_distances2 = features.get('max_radial_distance', m.neurites[0]) +>>> max_radial_distances3 = features.get('max_radial_distance', m) +>>> n_segments = features.get('number_of_segments', m, neurite_type=neurom.AXON) For more details see :ref:`features`. """ @@ -47,10 +48,16 @@ from functools import partial import numpy as np -from neurom import morphmath -from neurom.core.morphology import Section + +from neurom import morphmath, utils from neurom.core.dataformat import COLS -from neurom.features import NameSpace, feature, bifurcation as bf, section as sf +from neurom.core.morphology import Section, iter_points +from neurom.core.types import NeuriteType, is_composite_type +from neurom.core.types import tree_type_checker as is_type +from neurom.features import NameSpace +from neurom.features import bifurcation as bf +from neurom.features import feature +from neurom.features import section as sf from neurom.morphmath import convex_hull feature = partial(feature, namespace=NameSpace.NEURITE) @@ -58,114 +65,129 @@ L = logging.getLogger(__name__) -def _map_sections(fun, neurite, iterator_type=Section.ipreorder): +def _map_sections(fun, neurite, iterator_type=Section.ipreorder, section_type=NeuriteType.all): """Map `fun` to all the sections.""" - return list(map(fun, iterator_type(neurite.root_node))) + check_type = is_type(section_type) + if ( + section_type != NeuriteType.all + and not any(is_composite_type(i) for i in check_type.type) + and iterator_type in {Section.ibifurcation_point, Section.iforking_point} + ): -@feature(shape=()) -def max_radial_distance(neurite): - """Get the maximum radial distances of the termination sections.""" - term_radial_distances = section_term_radial_distances(neurite) - return max(term_radial_distances) if term_radial_distances else 0. + def filt(section): + return check_type(section) and Section.is_homogeneous_point(section) + + else: + filt = check_type + + return list(map(fun, filter(filt, iterator_type(neurite.root_node)))) @feature(shape=()) -def number_of_segments(neurite): +def number_of_segments(neurite, section_type=NeuriteType.all): """Number of segments.""" - return sum(_map_sections(sf.number_of_segments, neurite)) + return sum(_map_sections(sf.number_of_segments, neurite, section_type=section_type)) @feature(shape=()) -def number_of_sections(neurite, iterator_type=Section.ipreorder): +def number_of_sections(neurite, iterator_type=Section.ipreorder, section_type=NeuriteType.all): """Number of sections. For a morphology it will be a sum of all neurites sections numbers.""" - return len(_map_sections(lambda s: s, neurite, iterator_type=iterator_type)) + return len( + _map_sections(lambda x: 1, neurite, iterator_type=iterator_type, section_type=section_type) + ) @feature(shape=()) -def number_of_bifurcations(neurite): +def number_of_bifurcations(neurite, section_type=NeuriteType.all): """Number of bf points.""" - return number_of_sections(neurite, iterator_type=Section.ibifurcation_point) + return number_of_sections( + neurite, iterator_type=Section.ibifurcation_point, section_type=section_type + ) @feature(shape=()) -def number_of_forking_points(neurite): +def number_of_forking_points(neurite, section_type=NeuriteType.all): """Number of forking points.""" - return number_of_sections(neurite, iterator_type=Section.iforking_point) + return number_of_sections( + neurite, iterator_type=Section.iforking_point, section_type=section_type + ) @feature(shape=()) -def number_of_leaves(neurite): +def number_of_leaves(neurite, section_type=NeuriteType.all): """Number of leaves points.""" - return number_of_sections(neurite, iterator_type=Section.ileaf) + return number_of_sections(neurite, iterator_type=Section.ileaf, section_type=section_type) @feature(shape=()) -def total_length(neurite): +def total_length(neurite, section_type=NeuriteType.all): """Neurite length. For a morphology it will be a sum of all neurite lengths.""" - return sum(_map_sections(sf.section_length, neurite)) + return sum(_map_sections(sf.section_length, neurite, section_type=section_type)) @feature(shape=()) -def total_area(neurite): +def total_area(neurite, section_type=NeuriteType.all): """Neurite surface area. For a morphology it will be a sum of all neurite areas. The area is defined as the sum of the area of the sections. """ - return sum(_map_sections(sf.section_area, neurite)) + return sum(_map_sections(sf.section_area, neurite, section_type=section_type)) @feature(shape=()) -def total_volume(neurite): +def total_volume(neurite, section_type=NeuriteType.all): """Neurite volume. For a morphology it will be a sum of neurites volumes.""" - return sum(_map_sections(sf.section_volume, neurite)) + return sum(_map_sections(sf.section_volume, neurite, section_type=section_type)) @feature(shape=(...,)) -def section_lengths(neurite): +def section_lengths(neurite, section_type=NeuriteType.all): """Section lengths.""" - return _map_sections(sf.section_length, neurite) + return _map_sections(sf.section_length, neurite, section_type=section_type) @feature(shape=(...,)) -def section_term_lengths(neurite): +def section_term_lengths(neurite, section_type=NeuriteType.all): """Termination section lengths.""" - return _map_sections(sf.section_length, neurite, Section.ileaf) + return _map_sections(sf.section_length, neurite, Section.ileaf, section_type) @feature(shape=(...,)) -def section_bif_lengths(neurite): +def section_bif_lengths(neurite, section_type=NeuriteType.all): """Bifurcation section lengths.""" - return _map_sections(sf.section_length, neurite, Section.ibifurcation_point) + return _map_sections(sf.section_length, neurite, Section.ibifurcation_point, section_type) @feature(shape=(...,)) -def section_branch_orders(neurite): +def section_branch_orders(neurite, section_type=NeuriteType.all): """Section branch orders.""" - return _map_sections(sf.branch_order, neurite) + return _map_sections(sf.branch_order, neurite, section_type=section_type) @feature(shape=(...,)) -def section_bif_branch_orders(neurite): +def section_bif_branch_orders(neurite, section_type=NeuriteType.all): """Bifurcation section branch orders.""" - return _map_sections(sf.branch_order, neurite, Section.ibifurcation_point) + return _map_sections( + sf.branch_order, neurite, Section.ibifurcation_point, section_type=section_type + ) @feature(shape=(...,)) -def section_term_branch_orders(neurite): +def section_term_branch_orders(neurite, section_type=NeuriteType.all): """Termination section branch orders.""" - return _map_sections(sf.branch_order, neurite, Section.ileaf) + return _map_sections(sf.branch_order, neurite, Section.ileaf, section_type=section_type) @feature(shape=(...,)) -def section_path_distances(neurite): +def section_path_distances(neurite, iterator_type=Section.ipreorder, section_type=NeuriteType.all): """Path lengths.""" - - def pl2(node): - """Calculate the path length using cached section lengths.""" - return sum(n.length for n in node.iupstream()) - - return _map_sections(pl2, neurite) + return _map_sections( + partial(sf.section_path_length, stop_node=neurite.root_node), + neurite, + iterator_type=iterator_type, + section_type=section_type, + ) ################################################################################ @@ -173,120 +195,124 @@ def pl2(node): ################################################################################ -def _map_segments(func, neurite): +def _map_segments(func, neurite, section_type=NeuriteType.all): """Map `func` to all the segments. `func` accepts a section and returns list of values corresponding to each segment. """ - return [ - segment_value - for section in Section.ipreorder(neurite.root_node) - for segment_value in func(section) - ] + return list(utils.flatten(_map_sections(func, neurite, section_type=section_type))) @feature(shape=(...,)) -def segment_lengths(neurite): +def segment_lengths(neurite, section_type=NeuriteType.all): """Lengths of the segments.""" - return _map_segments(sf.segment_lengths, neurite) + return _map_segments(sf.segment_lengths, neurite, section_type=section_type) @feature(shape=(...,)) -def segment_areas(neurite): +def segment_areas(neurite, section_type=NeuriteType.all): """Areas of the segments.""" - return _map_segments(sf.segment_areas, neurite) + return _map_segments(sf.segment_areas, neurite, section_type=section_type) @feature(shape=(...,)) -def segment_volumes(neurite): +def segment_volumes(neurite, section_type=NeuriteType.all): """Volumes of the segments.""" - return _map_segments(sf.segment_volumes, neurite) + return _map_segments(sf.segment_volumes, neurite, section_type=section_type) @feature(shape=(...,)) -def segment_radii(neurite): +def segment_radii(neurite, section_type=NeuriteType.all): """Arithmetic mean of the radii of the points in segments.""" - return _map_segments(sf.segment_mean_radii, neurite) + return _map_segments(sf.segment_mean_radii, neurite, section_type=section_type) @feature(shape=(...,)) -def segment_taper_rates(neurite): +def segment_taper_rates(neurite, section_type=NeuriteType.all): """Diameters taper rates of the segments. The taper rate is defined as the absolute radii differences divided by length of the section """ - return _map_segments(sf.segment_taper_rates, neurite) + return _map_segments(sf.segment_taper_rates, neurite, section_type=section_type) @feature(shape=(...,)) -def section_taper_rates(neurite): +def section_taper_rates(neurite, section_type=NeuriteType.all): """Diameter taper rates of the sections from root to tip. Taper rate is defined here as the linear fit along a section. It is expected to be negative for morphologies. """ - return _map_sections(sf.taper_rate, neurite) + return _map_sections(sf.taper_rate, neurite, section_type=section_type) @feature(shape=(...,)) -def segment_meander_angles(neurite): +def segment_meander_angles(neurite, section_type=NeuriteType.all): """Inter-segment opening angles in a section.""" - return _map_segments(sf.section_meander_angles, neurite) + return _map_segments(sf.section_meander_angles, neurite, section_type=section_type) @feature(shape=(..., 3)) -def segment_midpoints(neurite): +def segment_midpoints(neurite, section_type=NeuriteType.all): """Return a list of segment mid-points.""" - return _map_segments(sf.segment_midpoints, neurite) + return _map_segments(sf.segment_midpoints, neurite, section_type=section_type) @feature(shape=(...,)) -def segment_path_lengths(neurite): +def segment_path_lengths(neurite, section_type=NeuriteType.all): """Returns pathlengths between all non-root points and their root point.""" pathlength = {} - def segments_pathlength(section): + def segments_path_length(section): if section.id not in pathlength: - if section.parent: - pathlength[section.id] = section.parent.length + pathlength[section.parent.id] - else: - pathlength[section.id] = 0 + pathlength[section.id] = ( + 0.0 + if section.id == neurite.root_node.id + else section.parent.length + pathlength[section.parent.id] + ) + return pathlength[section.id] + np.cumsum(sf.segment_lengths(section)) - return _map_segments(segments_pathlength, neurite) + return _map_segments(segments_path_length, neurite, section_type=section_type) @feature(shape=(...,)) -def segment_radial_distances(neurite, origin=None): +def segment_radial_distances(neurite, origin=None, section_type=NeuriteType.all): """Returns the list of distances between all segment mid points and origin.""" - pos = neurite.root_node.points[0] if origin is None else origin - - def radial_distances(section): - """List of distances between the mid point of each segment and pos.""" - mid_pts = 0.5 * (section.points[:-1, COLS.XYZ] + section.points[1:, COLS.XYZ]) - return np.linalg.norm(mid_pts - pos[COLS.XYZ], axis=1) - - return _map_segments(radial_distances, neurite) + origin = neurite.root_node.points[0, COLS.XYZ] if origin is None else origin + return _map_segments( + func=partial(sf.segment_midpoint_radial_distances, origin=origin), + neurite=neurite, + section_type=section_type, + ) @feature(shape=(...,)) -def local_bifurcation_angles(neurite): +def local_bifurcation_angles(neurite, section_type=NeuriteType.all): """Get a list of local bf angles.""" - return _map_sections(bf.local_bifurcation_angle, - neurite, - iterator_type=Section.ibifurcation_point) + return _map_sections( + bf.local_bifurcation_angle, + neurite, + iterator_type=Section.ibifurcation_point, + section_type=section_type, + ) @feature(shape=(...,)) -def remote_bifurcation_angles(neurite): +def remote_bifurcation_angles(neurite, section_type=NeuriteType.all): """Get a list of remote bf angles.""" - return _map_sections(bf.remote_bifurcation_angle, - neurite, - iterator_type=Section.ibifurcation_point) + return _map_sections( + bf.remote_bifurcation_angle, + neurite, + iterator_type=Section.ibifurcation_point, + section_type=section_type, + ) @feature(shape=(...,)) -def partition_asymmetry(neurite, variant='branch-order', method='petilla'): +def partition_asymmetry( + neurite, variant='branch-order', method='petilla', section_type=NeuriteType.all +): """Partition asymmetry at bf points. Variant: length is a different definition, as the absolute difference in @@ -295,46 +321,58 @@ def partition_asymmetry(neurite, variant='branch-order', method='petilla'): :func:`neurom.features.bifurcationfunc.partition_asymmetry` """ if variant not in {'branch-order', 'length'}: - raise ValueError('Please provide a valid variant for partition asymmetry,' - f'found {variant}') + raise ValueError( + "Please provide a valid variant for partition asymmetry. " + f"Expected 'branch-order' or 'length', got {variant}." + ) if method not in {'petilla', 'uylings'}: - raise ValueError('Please provide a valid method for partition asymmetry,' - 'either "petilla" or "uylings"') + raise ValueError( + "Please provide a valid method for partition asymmetry. " + f"Expected 'petilla' or 'uylings', got {method}." + ) + + # create a downstream iterator that is filtered by the section type + it_type = utils.filtered_iterator(is_type(section_type), Section.ipreorder) if variant == 'branch-order': return _map_sections( - partial(bf.partition_asymmetry, uylings=method == 'uylings'), + partial(bf.partition_asymmetry, uylings=method == 'uylings', iterator_type=it_type), neurite, - Section.ibifurcation_point) + iterator_type=Section.ibifurcation_point, + section_type=section_type, + ) - asymmetries = [] - neurite_length = total_length(neurite) - for section in Section.ibifurcation_point(neurite.root_node): - pathlength_diff = abs(sf.downstream_pathlength(section.children[0]) - - sf.downstream_pathlength(section.children[1])) - asymmetries.append(pathlength_diff / neurite_length) - return asymmetries + return _map_sections( + partial( + bf.downstream_pathlength_asymmetry, + normalization_length=total_length(neurite, section_type=section_type), + iterator_type=it_type, + ), + neurite, + iterator_type=Section.ibifurcation_point, + section_type=section_type, + ) @feature(shape=(...,)) -def partition_asymmetry_length(neurite, method='petilla'): +def partition_asymmetry_length(neurite, method='petilla', section_type=NeuriteType.all): """'partition_asymmetry' feature with `variant='length'`. Because it is often used, it has a dedicated feature. """ - return partition_asymmetry(neurite, 'length', method) + return partition_asymmetry(neurite, 'length', method, section_type=section_type) @feature(shape=(...,)) -def bifurcation_partitions(neurite): +def bifurcation_partitions(neurite, section_type=NeuriteType.all): """Partition at bf points.""" - return _map_sections(bf.bifurcation_partition, - neurite, - Section.ibifurcation_point) + return _map_sections( + bf.bifurcation_partition, neurite, Section.ibifurcation_point, section_type=section_type + ) @feature(shape=(...,)) -def sibling_ratios(neurite, method='first'): +def sibling_ratios(neurite, method='first', section_type=NeuriteType.all): """Sibling ratios at bf points. The sibling ratio is the ratio between the diameters of the @@ -342,25 +380,28 @@ def sibling_ratios(neurite, method='first'): 0 and 1. Method argument allows one to consider mean diameters along the child section instead of diameter of the first point. """ - return _map_sections(partial(bf.sibling_ratio, method=method), - neurite, - Section.ibifurcation_point) + return _map_sections( + partial(bf.sibling_ratio, method=method), + neurite, + Section.ibifurcation_point, + section_type=section_type, + ) @feature(shape=(..., 2)) -def partition_pairs(neurite): +def partition_pairs(neurite, section_type=NeuriteType.all): """Partition pairs at bf points. Partition pair is defined as the number of bifurcations at the two daughters of the bifurcating section """ - return _map_sections(bf.partition_pair, - neurite, - Section.ibifurcation_point) + return _map_sections( + bf.partition_pair, neurite, Section.ibifurcation_point, section_type=section_type + ) @feature(shape=(...,)) -def diameter_power_relations(neurite, method='first'): +def diameter_power_relations(neurite, method='first', section_type=NeuriteType.all): """Calculate the diameter power relation at a bf point. Diameter power relation is defined in https://www.ncbi.nlm.nih.gov/pubmed/18568015 @@ -368,44 +409,65 @@ def diameter_power_relations(neurite, method='first'): This quantity gives an indication of how far the branching is from the Rall ratio (when =1). """ - return _map_sections(partial(bf.diameter_power_relation, method=method), - neurite, - Section.ibifurcation_point) + return _map_sections( + partial(bf.diameter_power_relation, method=method), + neurite, + Section.ibifurcation_point, + section_type=section_type, + ) + + +def _radial_distances(neurite, origin, iterator_type, section_type): + if origin is None: + origin = neurite.root_node.points[0] + + return _map_sections( + partial(sf.section_radial_distance, origin=origin), + neurite=neurite, + iterator_type=iterator_type, + section_type=section_type, + ) @feature(shape=(...,)) -def section_radial_distances(neurite, origin=None, iterator_type=Section.ipreorder): +def section_radial_distances(neurite, origin=None, section_type=NeuriteType.all): """Section radial distances. The iterator_type can be used to select only terminal sections (ileaf) or only bifurcations (ibifurcation_point). """ - pos = neurite.root_node.points[0] if origin is None else origin - return _map_sections(partial(sf.section_radial_distance, origin=pos), - neurite, - iterator_type) + return _radial_distances(neurite, origin, Section.ipreorder, section_type) @feature(shape=(...,)) -def section_term_radial_distances(neurite, origin=None): +def section_term_radial_distances(neurite, origin=None, section_type=NeuriteType.all): """Get the radial distances of the termination sections.""" - return section_radial_distances(neurite, origin, Section.ileaf) + return _radial_distances(neurite, origin, Section.ileaf, section_type) + + +@feature(shape=()) +def max_radial_distance(neurite, origin=None, section_type=NeuriteType.all): + """Get the maximum radial distances of the termination sections.""" + term_radial_distances = section_term_radial_distances( + neurite, origin=origin, section_type=section_type + ) + return max(term_radial_distances) if term_radial_distances else 0.0 @feature(shape=(...,)) -def section_bif_radial_distances(neurite, origin=None): +def section_bif_radial_distances(neurite, origin=None, section_type=NeuriteType.all): """Get the radial distances of the bf sections.""" - return section_radial_distances(neurite, origin, Section.ibifurcation_point) + return _radial_distances(neurite, origin, Section.ibifurcation_point, section_type) @feature(shape=(...,)) -def terminal_path_lengths(neurite): +def terminal_path_lengths(neurite, section_type=NeuriteType.all): """Get the path lengths to each terminal point.""" - return _map_sections(sf.section_path_length, neurite, Section.ileaf) + return section_path_distances(neurite, iterator_type=Section.ileaf, section_type=section_type) @feature(shape=()) -def volume_density(neurite): +def volume_density(neurite, section_type=NeuriteType.all): """Get the volume density. The volume density is defined as the ratio of the neurite volume and @@ -416,46 +478,57 @@ def volume_density(neurite): .. note:: Returns `np.nan` if the convex hull computation fails. """ - neurite_hull = convex_hull(neurite.points[:, COLS.XYZ]) - return neurite.volume / neurite_hull.volume if neurite_hull is not None else np.nan + neurite_volume = total_volume(neurite, section_type=section_type) + + def get_points(section): + return section.points[:, COLS.XYZ].tolist() + + # note: duplicate points included but not affect the convex hull calculation + points = list(utils.flatten(_map_sections(get_points, neurite, section_type=section_type))) + + hull = convex_hull(points) + + return neurite_volume / hull.volume if hull is not None else np.nan @feature(shape=(...,)) -def section_volumes(neurite): +def section_volumes(neurite, section_type=NeuriteType.all): """Section volumes.""" - return _map_sections(sf.section_volume, neurite) + return _map_sections(sf.section_volume, neurite, section_type=section_type) @feature(shape=(...,)) -def section_areas(neurite): +def section_areas(neurite, section_type=NeuriteType.all): """Section areas.""" - return _map_sections(sf.section_area, neurite) + return _map_sections(sf.section_area, neurite, section_type=section_type) @feature(shape=(...,)) -def section_tortuosity(neurite): +def section_tortuosity(neurite, section_type=NeuriteType.all): """Section tortuosities.""" - return _map_sections(sf.section_tortuosity, neurite) + return _map_sections(sf.section_tortuosity, neurite, section_type=section_type) @feature(shape=(...,)) -def section_end_distances(neurite): +def section_end_distances(neurite, section_type=NeuriteType.all): """Section end to end distances.""" - return _map_sections(sf.section_end_distance, neurite) + return _map_sections(sf.section_end_distance, neurite, section_type=section_type) @feature(shape=(...,)) -def principal_direction_extents(neurite, direction=0): +def principal_direction_extents(neurite, direction=0, section_type=NeuriteType.all): """Principal direction extent of neurites in morphologies. Note: Principal direction extents are always sorted in descending order. Therefore, by default the maximal principal direction extent is returned. """ - return [morphmath.principal_direction_extent(neurite.points[:, COLS.XYZ])[direction]] + points = list(iter_points(neurite, section_filter=is_type(section_type))) + + return [morphmath.principal_direction_extent(np.unique(points, axis=0))[direction]] @feature(shape=(...,)) -def section_strahler_orders(neurite): +def section_strahler_orders(neurite, section_type=NeuriteType.all): """Inter-segment opening angles in a section.""" - return _map_sections(sf.strahler_order, neurite) + return _map_sections(sf.strahler_order, neurite, section_type=section_type) diff --git a/neurom/features/population.py b/neurom/features/population.py index dfd0a7eb3..edc188885 100644 --- a/neurom/features/population.py +++ b/neurom/features/population.py @@ -33,22 +33,24 @@ >>> import neurom >>> from neurom import features ->>> pop = neurom.load_morphologies('path/to/morphs') ->>> features.get('sholl_frequency', pop) +>>> pop = neurom.load_morphologies("tests/data/valid_set") +>>> frequencies = features.get('sholl_frequency', pop) For more details see :ref:`features`. """ from functools import partial + import numpy as np from neurom.core.dataformat import COLS +from neurom.core.morphology import iter_sections from neurom.core.types import NeuriteType from neurom.core.types import tree_type_checker as is_type -from neurom.features import feature, NameSpace +from neurom.features import NameSpace, feature +from neurom.features import morphology as mf from neurom.features.morphology import _assert_soma_center -from neurom.features.morphology import sholl_crossings feature = partial(feature, namespace=NameSpace.POPULATION) @@ -63,6 +65,7 @@ def sholl_frequency(morphs, neurite_type=NeuriteType.all, step_size=10, bins=Non step_size(float): step size between Sholl radii bins(iterable of floats): custom binning to use for the Sholl radii. If None, it uses intervals of step_size between min and max radii of ``morphs``. + use_subtrees (bool): Enable mixed subtree processing. Note: Given a population, the concentric circles range from the smallest soma radius to the @@ -73,14 +76,25 @@ def sholl_frequency(morphs, neurite_type=NeuriteType.all, step_size=10, bins=Non neurite_filter = is_type(neurite_type) if bins is None: + section_iterator = partial( + iter_sections, neurite_filter=neurite_filter, section_filter=neurite_filter + ) + + max_radius_per_section = [ + np.max(np.linalg.norm(section.points[:, COLS.XYZ] - morph.soma.center, axis=1)) + for morph in map(_assert_soma_center, morphs) + for section in section_iterator(morph) + ] + + if not max_radius_per_section: + return [] + min_soma_edge = min(n.soma.radius for n in morphs) - max_radii = max(np.max(np.linalg.norm(n.points[:, COLS.XYZ], axis=1)) - for m in morphs - for n in m.neurites if neurite_filter(n)) - bins = np.arange(min_soma_edge, min_soma_edge + max_radii, step_size) + + bins = np.arange(min_soma_edge, min_soma_edge + max(max_radius_per_section), step_size) def _sholl_crossings(morph): _assert_soma_center(morph) - return sholl_crossings(morph, neurite_type, morph.soma.center, bins) + return mf.sholl_crossings(morph, neurite_type, morph.soma.center, bins) - return np.array([_sholl_crossings(m) for m in morphs]).sum(axis=0) + return np.array([_sholl_crossings(m) for m in morphs]).sum(axis=0).tolist() diff --git a/neurom/features/section.py b/neurom/features/section.py index 259699f32..346899fc8 100644 --- a/neurom/features/section.py +++ b/neurom/features/section.py @@ -32,7 +32,7 @@ from neurom import morphmath as mm from neurom.core.dataformat import COLS -from neurom.core.morphology import iter_segments +from neurom.core.morphology import Section, iter_segments from neurom.morphmath import interval_lengths @@ -41,9 +41,14 @@ def section_points(section): return section.points[:, COLS.XYZ] -def section_path_length(section): - """Path length from section to root.""" - return sum(s.length for s in section.iupstream()) +def section_path_length(section, stop_node=None): + """Path length from section to root. + + Args: + section: Section object. + stop_node: Node to stop the upstream traversal. If None, it stops when no parent is found. + """ + return sum(map(section_length, section.iupstream(stop_node=stop_node))) def section_length(section): @@ -137,6 +142,13 @@ def segment_midpoints(section): return np.divide(np.add(pts[:-1], pts[1:]), 2.0).tolist() +def segment_midpoint_radial_distances(section, origin=None): + """Returns the list of segment midpoint radial distances to the origin.""" + origin = np.zeros(3, dtype=float) if origin is None else origin + midpoints = np.array(segment_midpoints(section)) + return np.linalg.norm(midpoints - origin, axis=1).tolist() + + def segment_taper_rates(section): """Returns the list of segment taper rates within the section.""" pts = section.points[:, COLS.XYZR] @@ -162,8 +174,7 @@ def section_radial_distance(section, origin): def section_meander_angles(section): """Inter-segment opening angles in a section.""" p = section.points - return [mm.angle_3points(p[i - 1], p[i - 2], p[i]) - for i in range(2, len(p))] + return [mm.angle_3points(p[i - 1], p[i - 2], p[i]) for i in range(2, len(p))] def strahler_order(section): @@ -213,6 +224,6 @@ def section_mean_radius(section): return np.sum(mean_radii * lengths) / np.sum(lengths) -def downstream_pathlength(section): +def downstream_pathlength(section, iterator_type=Section.ipreorder): """Compute the total downstream length starting from a section.""" - return sum(sec.length for sec in section.ipreorder()) + return sum(sec.length for sec in iterator_type(section)) diff --git a/neurom/features/sectionfunc.py b/neurom/features/sectionfunc.py deleted file mode 100644 index 82549fcaa..000000000 --- a/neurom/features/sectionfunc.py +++ /dev/null @@ -1,8 +0,0 @@ -"""For backward compatibility only.""" -# pylint: skip-file - -from neurom.features.section import * # pragma: no cover -from neurom.utils import deprecated_module # pragma: no cover - -deprecated_module('Module `neurom.features.sectionfunc` is deprecated. Use' - '`neurom.features.section` instead.') # pragma: no cover diff --git a/neurom/geom/__init__.py b/neurom/geom/__init__.py index 9eafc5d80..3ce49dd91 100644 --- a/neurom/geom/__init__.py +++ b/neurom/geom/__init__.py @@ -34,7 +34,7 @@ import neurom.morphmath from neurom.core.dataformat import COLS -from neurom.geom.transform import translate, rotate +from neurom.geom.transform import rotate, translate L = logging.getLogger(__name__) @@ -45,8 +45,9 @@ def bounding_box(obj): Returns: 2D numpy array of [[min_x, min_y, min_z], [max_x, max_y, max_z]] """ - return np.array([np.min(obj.points[:, COLS.XYZ], axis=0), - np.max(obj.points[:, COLS.XYZ], axis=0)]) + return np.array( + [np.min(obj.points[:, COLS.XYZ], axis=0), np.max(obj.points[:, COLS.XYZ], axis=0)] + ) def convex_hull(obj): diff --git a/neurom/geom/transform.py b/neurom/geom/transform.py index 726865eea..14b6bd251 100644 --- a/neurom/geom/transform.py +++ b/neurom/geom/transform.py @@ -46,6 +46,7 @@ class Transform3D: """Class representing a generic 3D transformation.""" + __doc__ += _TRANSFDOC def __call__(self, points): @@ -55,6 +56,7 @@ def __call__(self, points): class Translation(Transform3D): """Class representing a 3D translation.""" + __doc__ += _TRANSFDOC def __init__(self, translation): @@ -72,6 +74,7 @@ def __call__(self, points): class Rotation(Transform3D): """Class representing a 3D rotation.""" + __doc__ += _TRANSFDOC def __init__(self, dcm): @@ -89,6 +92,7 @@ def __call__(self, points): class PivotRotation(Rotation): """Class representing a 3D rotation about a pivot point.""" + __doc__ += _TRANSFDOC def __init__(self, dcm, pivot=None): @@ -148,7 +152,7 @@ def rotate(obj, axis, angle, origin=None): def _sin(x): """Sine with case for pi multiples.""" - return 0. if np.isclose(np.mod(x, np.pi), 0.) else np.sin(x) + return 0.0 if np.isclose(np.mod(x, np.pi), 0.0) else np.sin(x) def _rodrigues_to_dcm(axis, angle): @@ -174,8 +178,8 @@ def _rodrigues_to_dcm(axis, angle): uyz = uy * uz sn = _sin(angle) - cs = _sin(np.pi / 2. - angle) - cs1 = 1. - cs + cs = _sin(np.pi / 2.0 - angle) + cs1 = 1.0 - cs R = np.zeros([3, 3]) diff --git a/neurom/io/utils.py b/neurom/io/utils.py index 393d4f64a..0644e9527 100644 --- a/neurom/io/utils.py +++ b/neurom/io/utils.py @@ -38,10 +38,10 @@ from pathlib import Path import morphio + from neurom.core.morphology import Morphology from neurom.core.population import Population from neurom.exceptions import NeuroMError -from neurom.utils import warn_deprecated L = logging.getLogger(__name__) @@ -120,7 +120,7 @@ def _get_file(stream, extension): return temp_file -def load_morphology(morph, reader=None): +def load_morphology(morph, reader=None, *, mutable=None, process_subtrees=False): """Build section trees from a morphology or a h5, swc or asc file. Args: @@ -134,6 +134,10 @@ def load_morphology(morph, reader=None): must be passed with the corresponding file format (asc, swc and h5) reader (str): Optional, must be provided if morphology is a stream to specify the file format (asc, swc, h5) + mutable (bool|None): Whether to enforce mutability. If None and a morphio/neurom object is + passed, the initial mutability will be maintained. If None and the + morphology is loaded, then it will be immutable by default. + process_subtrees (bool): enable mixed tree processing if set to True Returns: A Morphology object @@ -156,26 +160,30 @@ def load_morphology(morph, reader=None): ) )'''), reader='asc') """ - if isinstance(morph, (Morphology, morphio.Morphology, morphio.mut.Morphology)): - return Morphology(morph) - - if reader: - return Morphology(_get_file(morph, reader)) - - return Morphology(morph, Path(morph).name) + if isinstance(morph, Morphology): + name = morph.name + morphio_morph = morph.to_morphio() + elif isinstance(morph, (morphio.Morphology, morphio.mut.Morphology)): + name = "Morphology" + morphio_morph = morph + else: + filepath = _get_file(morph, reader) if reader else morph + name = os.path.basename(filepath) + morphio_morph = morphio.Morphology(filepath) + # None does not modify existing mutability + if mutable is not None: + if mutable and isinstance(morphio_morph, morphio.Morphology): + morphio_morph = morphio_morph.as_mutable() + elif not mutable and isinstance(morphio_morph, morphio.mut.Morphology): + morphio_morph = morphio_morph.as_immutable() -def load_neuron(morph, reader=None): - """Deprecated in favor of ``load_morphology``.""" - warn_deprecated('`neurom.io.utils.load_neuron` is deprecated in favor of ' - '`neurom.io.utils.load_morphology`') # pragma: no cover - return load_morphology(morph, reader) # pragma: no cover + return Morphology(morphio_morph, name=name, process_subtrees=process_subtrees) -def load_morphologies(morphs, - name=None, - ignored_exceptions=(), - cache=False): +def load_morphologies( + morphs, name=None, ignored_exceptions=(), *, cache=False, process_subtrees=False +): """Create a population object. From all morphologies in a directory of from morphologies in a list of file names. @@ -188,6 +196,7 @@ def load_morphologies(morphs, ignored_exceptions (tuple): NeuroM and MorphIO exceptions that you want to ignore when loading morphologies cache (bool): whether to cache the loaded morphologies in memory + process_subtrees (bool): enable mixed tree processing if set to True Returns: Population: population object @@ -198,11 +207,6 @@ def load_morphologies(morphs, else: files = morphs name = name or 'Population' - return Population(files, name, ignored_exceptions, cache) - - -def load_neurons(morphs, name=None, ignored_exceptions=(), cache=False): - """Deprecated in favor of ``load_morphologies``.""" - warn_deprecated('`neurom.io.utils.load_neurons` is deprecated in favor of ' - '`neurom.io.utils.load_morphologies`') # pragma: no cover - return load_morphologies(morphs, name, ignored_exceptions, cache) # pragma: no cover + return Population( + files, name, ignored_exceptions, cache=cache, process_subtrees=process_subtrees + ) diff --git a/neurom/morphmath.py b/neurom/morphmath.py index 6c542ba01..513913d62 100644 --- a/neurom/morphmath.py +++ b/neurom/morphmath.py @@ -27,8 +27,8 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Mathematical and geometrical functions used to compute morphometrics.""" -import math import logging +import math from itertools import combinations import numpy as np @@ -43,7 +43,6 @@ from neurom.core.dataformat import COLS - L = logging.getLogger(__name__) @@ -62,9 +61,13 @@ def vector(p1, p2): def linear_interpolate(p1, p2, fraction): """Returns the point p satisfying: p1 + fraction * (p2 - p1).""" - return np.array((p1[0] + fraction * (p2[0] - p1[0]), - p1[1] + fraction * (p2[1] - p1[1]), - p1[2] + fraction * (p2[2] - p1[2]))) + return np.array( + ( + p1[0] + fraction * (p2[0] - p1[0]), + p1[1] + fraction * (p2[1] - p1[1]), + p1[2] + fraction * (p2[2] - p1[2]), + ) + ) def interpolate_radius(r1, r2, fraction): @@ -87,10 +90,12 @@ def interpolate_radius(r1, r2, fraction): Note: The fraction is assumed from point P1, not from point P2. """ + def f(a, b, c): """Returns the length of the interpolated radius calculated using similar triangles.""" return a + c * (b - a) - return f(r2, r1, 1. - fraction) if r1 > r2 else f(r1, r2, fraction) + + return f(r2, r1, 1.0 - fraction) if r1 > r2 else f(r1, r2, fraction) def interval_lengths(points, prepend_zero=False): @@ -122,7 +127,7 @@ def path_fraction_id_offset(points, fraction, relative_offset=False): Returns: (segment ID, segment offset) pair. """ - if not 0. <= fraction <= 1.0: + if not 0.0 <= fraction <= 1.0: raise ValueError("Invalid fraction: %.3f" % fraction) lengths = interval_lengths(points) cum_lengths = np.cumsum(lengths) @@ -239,8 +244,7 @@ def angle_3points(p0, p1, p2): """ vec1 = vector(p1, p0) vec2 = vector(p2, p0) - return math.atan2(np.linalg.norm(np.cross(vec1, vec2)), - np.dot(vec1, vec2)) + return math.atan2(np.linalg.norm(np.cross(vec1, vec2)), np.dot(vec1, vec2)) def angle_between_vectors(p1, p2): @@ -248,13 +252,6 @@ def angle_between_vectors(p1, p2): Normalizes the input vectors and computes the relative angle between them. - - >>> angle_between((1, 0), (0, 1)) - 1.5707963267948966 - >>> angle_between((1, 0), (1, 0)) - 0.0 - >>> angle_between((1, 0), (-1, 0)) - 3.141592653589793 """ if np.equal(p1, p2).all(): return 0.0 @@ -360,7 +357,7 @@ def segment_radius(seg): Returns: arithmetic mean of the radii of the points in seg """ - return (seg[0][COLS.R] + seg[1][COLS.R]) / 2. + return (seg[0][COLS.R] + seg[1][COLS.R]) / 2.0 def segment_x_coordinate(seg): @@ -368,7 +365,7 @@ def segment_x_coordinate(seg): Returns: arithmetic mean of the x coordinates of the points in seg """ - return (seg[0][COLS.X] + seg[1][COLS.X]) / 2. + return (seg[0][COLS.X] + seg[1][COLS.X]) / 2.0 def segment_y_coordinate(seg): @@ -376,7 +373,7 @@ def segment_y_coordinate(seg): Returns: arithmetic mean of the y coordinates of the points in seg """ - return (seg[0][COLS.Y] + seg[1][COLS.Y]) / 2. + return (seg[0][COLS.Y] + seg[1][COLS.Y]) / 2.0 def segment_z_coordinate(seg): @@ -384,7 +381,7 @@ def segment_z_coordinate(seg): Returns: arithmetic mean of the z coordinates of the points in seg """ - return (seg[0][COLS.Z] + seg[1][COLS.Z]) / 2. + return (seg[0][COLS.Z] + seg[1][COLS.Z]) / 2.0 def segment_radial_dist(seg, pos): @@ -464,7 +461,7 @@ def pca(points): def sphere_area(r): """Compute the area of a sphere with radius r.""" - return 4. * math.pi * r ** 2 + return 4.0 * math.pi * r**2 # Useful alias for path_distance @@ -513,17 +510,13 @@ def convex_hull(points): scipy.spatial.ConvexHull object if successful, otherwise None """ if len(points) == 0: - L.exception( - "Failure to compute convex hull because there are no points" - ) + L.exception("Failure to compute convex hull because there are no points") return None try: return ConvexHull(points) except QhullError: - L.exception( - "Failure to compute convex hull because of geometrical degeneracy." - ) + L.exception("Failure to compute convex hull because of geometrical degeneracy.") return None diff --git a/neurom/stats.py b/neurom/stats.py index 350172b68..3d8e203ee 100644 --- a/neurom/stats.py +++ b/neurom/stats.py @@ -31,7 +31,7 @@ Nothing fancy. Just commonly used functions using scipy functionality. """ -from collections import namedtuple, OrderedDict +from collections import OrderedDict, namedtuple from enum import Enum, unique import numpy as np @@ -43,6 +43,7 @@ @unique class StatTests(Enum): """Enum representing valid statistical tests of scipy.""" + ks = 1 wilcoxon = 2 ttest = 3 @@ -76,9 +77,11 @@ def fit_results_to_dict(fit_results, min_bound=None, max_bound=None): Supported fit types: 'norm', 'expon', 'uniform' """ type_map = {'norm': 'normal', 'expon': 'exponential', 'uniform': 'uniform'} - param_map = {'uniform': lambda p: [('min', p[0]), ('max', p[0] + p[1])], - 'norm': lambda p: [('mu', p[0]), ('sigma', p[1])], - 'expon': lambda p: [('lambda', 1.0 / p[1])]} + param_map = { + 'uniform': lambda p: [('min', p[0]), ('max', p[0] + p[1])], + 'norm': lambda p: [('mu', p[0]), ('sigma', p[1])], + 'expon': lambda p: [('lambda', 1.0 / p[1])], + } d = OrderedDict({'type': type_map[fit_results.type]}) d.update(param_map[fit_results.type](fit_results.params)) @@ -145,7 +148,6 @@ def scalar_stats(data, functions=('min', 'max', 'mean', 'std')): """ stats = {} for func in functions: - stats[func] = getattr(np, func)(data) return stats diff --git a/neurom/utils.py b/neurom/utils.py index 90ab2a4b8..87a9e0345 100644 --- a/neurom/utils.py +++ b/neurom/utils.py @@ -46,8 +46,10 @@ def warn_deprecated(msg): def deprecated(fun_name=None, msg=""): """Issue a deprecation warning for a function.""" + def _deprecated(fun): """Issue a deprecation warning for a function.""" + @wraps(fun) def _wrapper(*args, **kwargs): """Issue deprecation warning and forward arguments to fun.""" @@ -136,3 +138,13 @@ def str_to_plane(plane): def flatten(list_of_lists): """Flatten one level of nesting.""" return chain.from_iterable(list_of_lists) + + +def filtered_iterator(predicate, iterator_type): + """Returns an iterator function that is filtered by the predicate.""" + + @wraps(iterator_type) + def composed(*args, **kwargs): + return filter(predicate, iterator_type(*args, **kwargs)) + + return composed diff --git a/neurom/view/__init__.py b/neurom/view/__init__.py index 87a4bcabb..e69d8e929 100644 --- a/neurom/view/__init__.py +++ b/neurom/view/__init__.py @@ -27,7 +27,4 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """View tools to visualize morphologies.""" -from neurom.view.matplotlib_impl import (plot_morph, plot_morph3d, - plot_tree, plot_tree3d, - plot_soma, plot_soma3d, - plot_dendrogram) +from neurom.view.matplotlib_impl import plot_dendrogram, plot_morph, plot_morph3d diff --git a/neurom/view/dendrogram.py b/neurom/view/dendrogram.py index a416bb744..7a32d49c3 100644 --- a/neurom/view/dendrogram.py +++ b/neurom/view/dendrogram.py @@ -29,9 +29,10 @@ """Dendrogram helper functions and class.""" import numpy as np + from neurom import NeuriteType -from neurom.core.morphology import Neurite, Morphology from neurom.core.dataformat import COLS +from neurom.core.morphology import Morphology, Neurite from neurom.morphmath import interval_lengths @@ -49,7 +50,8 @@ def __init__(self, neurom_section): self.height = 1 self.width = 1 self.coords = self.get_coords( - np.array([0, self.height]), np.array([.5 * self.width, .5 * self.width])) + np.array([0, self.height]), np.array([0.5 * self.width, 0.5 * self.width]) + ) self.children = [Dendrogram(neurite.root_node) for neurite in neurom_section.neurites] else: if isinstance(neurom_section, Neurite): @@ -105,6 +107,7 @@ class _PositionedDendrogram: calculation we start to lay out. Each child gets its X coordinate as: parent's X + previous sibling children widths + half of this child's width. """ + HORIZONTAL_PADDING = 2 def __init__(self, dendrogram): @@ -120,10 +123,10 @@ def position_at(self, origin): # pylint: disable=missing-docstring positions = {self.dendrogram: origin} if self.children: end_point = origin + [0, self.dendrogram.height] - left_bottom_offset = [-.5 * self.total_width, 0] + left_bottom_offset = [-0.5 * self.total_width, 0] children_origin = end_point + left_bottom_offset for child in self.children: - child_origin = children_origin + [.5 * child.total_width, 0] + child_origin = children_origin + [0.5 * child.total_width, 0] positions.update(child.position_at(child_origin)) children_origin += [child.total_width + self.HORIZONTAL_PADDING, 0] return positions diff --git a/neurom/view/matplotlib_impl.py b/neurom/view/matplotlib_impl.py index b3dcbc246..200b9a8f3 100644 --- a/neurom/view/matplotlib_impl.py +++ b/neurom/view/matplotlib_impl.py @@ -29,35 +29,38 @@ """Morphology draw functions using matplotlib.""" from functools import wraps + import numpy as np from matplotlib.collections import LineCollection, PatchCollection from matplotlib.lines import Line2D from matplotlib.patches import Circle, FancyArrowPatch, Polygon, Rectangle from mpl_toolkits.mplot3d.art3d import Line3DCollection + from neurom import NeuriteType, geom +from neurom.core.dataformat import COLS from neurom.core.morphology import iter_neurites, iter_sections, iter_segments from neurom.core.soma import SomaCylinders -from neurom.core.dataformat import COLS from neurom.core.types import tree_type_checker from neurom.morphmath import segment_radius -from neurom.view.dendrogram import Dendrogram, get_size, layout_dendrogram, move_positions - from neurom.view import matplotlib_utils +from neurom.view.dendrogram import Dendrogram, get_size, layout_dendrogram, move_positions _LINEWIDTH = 1.2 _ALPHA = 0.8 _DIAMETER_SCALE = 1.0 -TREE_COLOR = {NeuriteType.basal_dendrite: 'red', - NeuriteType.apical_dendrite: 'purple', - NeuriteType.axon: 'blue', - NeuriteType.soma: 'black', - NeuriteType.undefined: 'green', - NeuriteType.custom5: 'orange', - NeuriteType.custom6: 'orange', - NeuriteType.custom7: 'orange', - NeuriteType.custom8: 'orange', - NeuriteType.custom9: 'orange', - NeuriteType.custom10: 'orange'} +TREE_COLOR = { + NeuriteType.basal_dendrite: 'red', + NeuriteType.apical_dendrite: 'purple', + NeuriteType.axon: 'blue', + NeuriteType.soma: 'black', + NeuriteType.undefined: 'green', + NeuriteType.custom5: 'orange', + NeuriteType.custom6: 'orange', + NeuriteType.custom7: 'orange', + NeuriteType.custom8: 'orange', + NeuriteType.custom9: 'orange', + NeuriteType.custom10: 'orange', +} def _implicit_ax(plot_func, params=None): @@ -86,8 +89,10 @@ def _plane2col(plane): """Take a string like 'xy', and return the indices from COLS.*.""" planes = ('xy', 'yx', 'xz', 'zx', 'yz', 'zy') assert plane in planes, 'No such plane found! Please select one of: ' + str(planes) - return (getattr(COLS, plane[0].capitalize()), - getattr(COLS, plane[1].capitalize()), ) + return ( + getattr(COLS, plane[0].capitalize()), + getattr(COLS, plane[1].capitalize()), + ) def _get_linewidth(tree, linewidth, diameter_scale): @@ -98,8 +103,7 @@ def _get_linewidth(tree, linewidth, diameter_scale): If diameter_scale is None, the linewidth is used. """ if diameter_scale is not None and tree: - linewidth = [2 * segment_radius(s) * diameter_scale - for s in iter_segments(tree)] + linewidth = [2 * segment_radius(s) * diameter_scale for s in iter_segments(tree)] return linewidth @@ -111,9 +115,16 @@ def _get_color(treecolor, tree_type): @_implicit_ax -def plot_tree(tree, ax=None, plane='xy', - diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH, - color=None, alpha=_ALPHA, realistic_diameters=False): +def plot_tree( + tree, + ax=None, + plane='xy', + diameter_scale=_DIAMETER_SCALE, + linewidth=_LINEWIDTH, + color=None, + alpha=_ALPHA, + realistic_diameters=False, +): """Plots a 2d figure of the tree's segments. Args: @@ -132,33 +143,41 @@ def plot_tree(tree, ax=None, plane='xy', """ plane0, plane1 = _plane2col(plane) - section_segment_list = [(section, segment) - for section in iter_sections(tree) - for segment in iter_segments(section)] + section_segment_list = [ + (section, segment) for section in iter_sections(tree) for segment in iter_segments(section) + ] colors = [_get_color(color, section.type) for section, _ in section_segment_list] if realistic_diameters: + def _get_rectangle(x, y, linewidth): """Draw a rectangle to represent a secgment.""" x, y = np.array(x), np.array(y) diff = y - x angle = np.arctan2(diff[1], diff[0]) % (2 * np.pi) - return Rectangle(x - linewidth / 2. * np.array([-np.sin(angle), np.cos(angle)]), - np.linalg.norm(diff), - linewidth, - angle=np.rad2deg(angle)) - - segs = [_get_rectangle((seg[0][plane0], seg[0][plane1]), - (seg[1][plane0], seg[1][plane1]), - 2 * segment_radius(seg) * diameter_scale) - for _, seg in section_segment_list] + return Rectangle( + x - linewidth / 2.0 * np.array([-np.sin(angle), np.cos(angle)]), + np.linalg.norm(diff), + linewidth, + angle=np.rad2deg(angle), + ) + + segs = [ + _get_rectangle( + (seg[0][plane0], seg[0][plane1]), + (seg[1][plane0], seg[1][plane1]), + 2 * segment_radius(seg) * diameter_scale, + ) + for _, seg in section_segment_list + ] collection = PatchCollection(segs, alpha=alpha, facecolors=colors) else: - segs = [((seg[0][plane0], seg[0][plane1]), - (seg[1][plane0], seg[1][plane1])) - for _, seg in section_segment_list] + segs = [ + ((seg[0][plane0], seg[0][plane1]), (seg[1][plane0], seg[1][plane1])) + for _, seg in section_segment_list + ] linewidth = _get_linewidth( tree, @@ -171,10 +190,9 @@ def _get_rectangle(x, y, linewidth): @_implicit_ax -def plot_soma(soma, ax=None, plane='xy', - soma_outline=True, - linewidth=_LINEWIDTH, - color=None, alpha=_ALPHA): +def plot_soma( + soma, ax=None, plane='xy', soma_outline=True, linewidth=_LINEWIDTH, color=None, alpha=_ALPHA +): """Generates a 2d figure of the soma. Args: @@ -192,14 +210,20 @@ def plot_soma(soma, ax=None, plane='xy', if isinstance(soma, SomaCylinders): for start, end in zip(soma.points, soma.points[1:]): matplotlib_utils.project_cylinder_onto_2d( - ax, (plane0, plane1), - start=start[COLS.XYZ], end=end[COLS.XYZ], - start_radius=start[COLS.R], end_radius=end[COLS.R], - color=color, alpha=alpha) + ax, + (plane0, plane1), + start=start[COLS.XYZ], + end=end[COLS.XYZ], + start_radius=start[COLS.R], + end_radius=end[COLS.R], + color=color, + alpha=alpha, + ) else: if soma_outline: - ax.add_artist(Circle(soma.center[[plane0, plane1]], soma.radius, - color=color, alpha=alpha)) + ax.add_artist( + Circle(soma.center[[plane0, plane1]], soma.radius, color=color, alpha=alpha) + ) else: points = [[p[plane0], p[plane1]] for p in soma.iter()] if points: @@ -211,19 +235,31 @@ def plot_soma(soma, ax=None, plane='xy', ax.set_ylabel(plane[1]) bounding_box = geom.bounding_box(soma) - ax.dataLim.update_from_data_xy(np.vstack(([bounding_box[0][plane0], bounding_box[0][plane1]], - [bounding_box[1][plane0], bounding_box[1][plane1]])), - ignore=False) + ax.dataLim.update_from_data_xy( + np.vstack( + ( + [bounding_box[0][plane0], bounding_box[0][plane1]], + [bounding_box[1][plane0], bounding_box[1][plane1]], + ) + ), + ignore=False, + ) # pylint: disable=too-many-arguments @_implicit_ax -def plot_morph(morph, ax=None, - neurite_type=NeuriteType.all, - plane='xy', - soma_outline=True, - diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH, - color=None, alpha=_ALPHA, realistic_diameters=False): +def plot_morph( + morph, + ax=None, + neurite_type=NeuriteType.all, + plane='xy', + soma_outline=True, + diameter_scale=_DIAMETER_SCALE, + linewidth=_LINEWIDTH, + color=None, + alpha=_ALPHA, + realistic_diameters=False, +): """Plots a 2D figure of the morphology, that contains a soma and the neurites. Args: @@ -238,13 +274,27 @@ def plot_morph(morph, ax=None, alpha(float): Transparency of plotted values realistic_diameters(bool): scale linewidths with axis data coordinates """ - plot_soma(morph.soma, ax, plane=plane, soma_outline=soma_outline, linewidth=linewidth, - color=color, alpha=alpha) + plot_soma( + morph.soma, + ax, + plane=plane, + soma_outline=soma_outline, + linewidth=linewidth, + color=color, + alpha=alpha, + ) for neurite in iter_neurites(morph, filt=tree_type_checker(neurite_type)): - plot_tree(neurite, ax, plane=plane, - diameter_scale=diameter_scale, linewidth=linewidth, - color=color, alpha=alpha, realistic_diameters=realistic_diameters) + plot_tree( + neurite, + ax, + plane=plane, + diameter_scale=diameter_scale, + linewidth=linewidth, + color=color, + alpha=alpha, + realistic_diameters=realistic_diameters, + ) ax.set_title(morph.name) ax.set_xlabel(plane[0]) @@ -254,19 +304,22 @@ def plot_morph(morph, ax=None, def _update_3d_datalim(ax, obj): """Unlike w/ 2d Axes, the dataLim isn't set by collections, so it has to be updated manually.""" min_bounding_box, max_bounding_box = geom.bounding_box(obj) - xy_bounds = np.vstack((min_bounding_box[:COLS.Z], - max_bounding_box[:COLS.Z])) + xy_bounds = np.vstack((min_bounding_box[: COLS.Z], max_bounding_box[: COLS.Z])) ax.xy_dataLim.update_from_data_xy(xy_bounds, ignore=False) - z_bounds = np.vstack(((min_bounding_box[COLS.Z], min_bounding_box[COLS.Z]), - (max_bounding_box[COLS.Z], max_bounding_box[COLS.Z]))) + z_bounds = np.vstack( + ( + (min_bounding_box[COLS.Z], min_bounding_box[COLS.Z]), + (max_bounding_box[COLS.Z], max_bounding_box[COLS.Z]), + ) + ) ax.zz_dataLim.update_from_data_xy(z_bounds, ignore=False) @_implicit_ax3d -def plot_tree3d(tree, ax=None, - diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH, - color=None, alpha=_ALPHA): +def plot_tree3d( + tree, ax=None, diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH, color=None, alpha=_ALPHA +): """Generates a figure of the tree in 3d. If the tree contains one single point the plot will be empty \ @@ -280,9 +333,9 @@ def plot_tree3d(tree, ax=None, color(str or None): Color of plotted values, None corresponds to default choice alpha(float): Transparency of plotted values """ - section_segment_list = [(section, segment) - for section in iter_sections(tree) - for segment in iter_segments(section)] + section_segment_list = [ + (section, segment) for section in iter_sections(tree) for segment in iter_segments(section) + ] segs = [(seg[0][COLS.XYZ], seg[1][COLS.XYZ]) for _, seg in section_segment_list] colors = [_get_color(color, section.type) for section, _ in section_segment_list] @@ -308,22 +361,34 @@ def plot_soma3d(soma, ax=None, color=None, alpha=_ALPHA): if isinstance(soma, SomaCylinders): for start, end in zip(soma.points, soma.points[1:]): - matplotlib_utils.plot_cylinder(ax, - start=start[COLS.XYZ], end=end[COLS.XYZ], - start_radius=start[COLS.R], end_radius=end[COLS.R], - color=color, alpha=alpha) + matplotlib_utils.plot_cylinder( + ax, + start=start[COLS.XYZ], + end=end[COLS.XYZ], + start_radius=start[COLS.R], + end_radius=end[COLS.R], + color=color, + alpha=alpha, + ) else: - matplotlib_utils.plot_sphere(ax, center=soma.center[COLS.XYZ], radius=soma.radius, - color=color, alpha=alpha) + matplotlib_utils.plot_sphere( + ax, center=soma.center[COLS.XYZ], radius=soma.radius, color=color, alpha=alpha + ) # unlike w/ 2d Axes, the dataLim isn't set by collections, so it has to be updated manually _update_3d_datalim(ax, soma) @_implicit_ax3d -def plot_morph3d(morph, ax=None, neurite_type=NeuriteType.all, - diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH, - color=None, alpha=_ALPHA): +def plot_morph3d( + morph, + ax=None, + neurite_type=NeuriteType.all, + diameter_scale=_DIAMETER_SCALE, + linewidth=_LINEWIDTH, + color=None, + alpha=_ALPHA, +): """Generates a figure of the morphology, that contains a soma and a list of trees. Args: @@ -338,9 +403,14 @@ def plot_morph3d(morph, ax=None, neurite_type=NeuriteType.all, plot_soma3d(morph.soma, ax, color=color, alpha=alpha) for neurite in iter_neurites(morph, filt=tree_type_checker(neurite_type)): - plot_tree3d(neurite, ax, - diameter_scale=diameter_scale, linewidth=linewidth, - color=color, alpha=alpha) + plot_tree3d( + neurite, + ax, + diameter_scale=diameter_scale, + linewidth=linewidth, + color=color, + alpha=alpha, + ) ax.set_title(morph.name) @@ -355,12 +425,15 @@ def _get_dendrogram_legend(dendrogram): Returns: List of legend handles. """ + def neurite_legend(neurite_type): return Line2D([0], [0], color=TREE_COLOR[neurite_type], lw=2, label=neurite_type.name) if dendrogram.neurite_type == NeuriteType.soma: - handles = {d.neurite_type: neurite_legend(d.neurite_type) - for d in [dendrogram] + dendrogram.children} + handles = { + d.neurite_type: neurite_legend(d.neurite_type) + for d in [dendrogram] + dendrogram.children + } return handles.values() return [neurite_legend(dendrogram.neurite_type)] @@ -409,9 +482,9 @@ def plot_dendrogram(obj, ax=None, show_diameters=True): dendrogram = Dendrogram(obj) positions = layout_dendrogram(dendrogram, np.array([0, 0])) w, h = get_size(positions) - positions = move_positions(positions, np.array([.5 * w, 0])) - ax.set_xlim([-.05 * w, 1.05 * w]) - ax.set_ylim([-.05 * h, 1.05 * h]) + positions = move_positions(positions, np.array([0.5 * w, 0])) + ax.set_xlim([-0.05 * w, 1.05 * w]) + ax.set_ylim([-0.05 * h, 1.05 * h]) ax.set_title('Morphology Dendrogram') ax.set_xlabel('micrometers (um)') ax.set_ylabel('micrometers (um)') diff --git a/neurom/view/matplotlib_utils.py b/neurom/view/matplotlib_utils.py index 77d2eb559..437e7268e 100644 --- a/neurom/view/matplotlib_utils.py +++ b/neurom/view/matplotlib_utils.py @@ -32,12 +32,12 @@ import numpy as np from matplotlib.patches import Polygon + # needed so that projection='3d' works with fig.add_subplot from mpl_toolkits.mplot3d import Axes3D # pylint: disable=unused-import from scipy.linalg import norm from scipy.spatial import ConvexHull - plt = None # refer to _get_plt() @@ -45,6 +45,7 @@ def _get_plt(): """Wrapper to avoid loading matplotlib.pyplot before someone has a chance to set the backend.""" global plt # pylint: disable=global-statement import matplotlib.pyplot # pylint: disable=import-outside-toplevel + plt = matplotlib.pyplot @@ -111,8 +112,17 @@ def get_figure(new_fig=True, subplot=(1, 1, 1), params=None): return fig, ax -def save_plot(fig, prefile='', postfile='', output_path='./', output_name='Figure', - output_format='png', dpi=300, transparent=False, **_): +def save_plot( + fig, + prefile='', + postfile='', + output_path='./', + output_name='Figure', + output_format='png', + dpi=300, + transparent=False, + **_, +): """Generates a figure file in the selected directory. Args: @@ -128,43 +138,49 @@ def save_plot(fig, prefile='', postfile='', output_path='./', output_name='Figur output_path = Path(output_path) output_path.mkdir(parents=True, exist_ok=True) - fig.savefig(Path(output_path, prefile + output_name + postfile + "." + output_format), - dpi=dpi, transparent=transparent) - - -def plot_style(fig, ax, # pylint: disable=too-many-arguments, too-many-locals - # plot_title - pretitle='', - title='Figure', - posttitle='', - title_fontsize=14, - title_arg=None, - # plot_labels - label_fontsize=14, - xlabel=None, - xlabel_arg=None, - ylabel=None, - ylabel_arg=None, - zlabel=None, - zlabel_arg=None, - # plot_ticks - tick_fontsize=12, - xticks=None, - xticks_args=None, - yticks=None, - yticks_args=None, - zticks=None, - zticks_args=None, - # update_plot_limits - white_space=30, - # plot_legend - no_legend=True, - legend_arg=None, - # internal - no_axes=False, - aspect_ratio='equal', - tight=False, - **_): + fig.savefig( + Path(output_path, prefile + output_name + postfile + "." + output_format), + dpi=dpi, + transparent=transparent, + ) + + +def plot_style( + fig, + ax, + # plot_title + pretitle='', + title='Figure', + posttitle='', + title_fontsize=14, + title_arg=None, + # plot_labels + label_fontsize=14, + xlabel=None, + xlabel_arg=None, + ylabel=None, + ylabel_arg=None, + zlabel=None, + zlabel_arg=None, + # plot_ticks + tick_fontsize=12, + xticks=None, + xticks_args=None, + yticks=None, + yticks_args=None, + zticks=None, + zticks_args=None, + # update_plot_limits + white_space=30, + # plot_legend + no_legend=True, + legend_arg=None, + # internal + no_axes=False, + aspect_ratio='equal', + tight=False, + **_, +): # pylint: disable=too-many-arguments, too-many-locals """Set the basic options of a matplotlib figure, to be used by viewing - plotting functions. Args: @@ -244,10 +260,16 @@ def plot_title(ax, pretitle='', title='Figure', posttitle='', title_fontsize=14, ax.set_title(current_title, fontsize=title_fontsize, **title_arg) -def plot_labels(ax, label_fontsize=14, - xlabel=None, xlabel_arg=None, - ylabel=None, ylabel_arg=None, - zlabel=None, zlabel_arg=None): +def plot_labels( + ax, + label_fontsize=14, + xlabel=None, + xlabel_arg=None, + ylabel=None, + ylabel_arg=None, + zlabel=None, + zlabel_arg=None, +): """Sets the labels options of a matplotlib plot. Args: @@ -275,10 +297,16 @@ def plot_labels(ax, label_fontsize=14, ax.set_zlabel(zlabel, fontsize=label_fontsize, **zlabel_arg) -def plot_ticks(ax, tick_fontsize=12, - xticks=None, xticks_args=None, - yticks=None, yticks_args=None, - zticks=None, zticks_args=None): +def plot_ticks( + ax, + tick_fontsize=12, + xticks=None, + xticks_args=None, + yticks=None, + yticks_args=None, + zticks=None, + zticks_args=None, +): """Function that defines the labels options of a matplotlib plot. Args: @@ -361,8 +389,9 @@ def _get_normals(v): return n1, n2 -def generate_cylindrical_points(start, end, start_radius, end_radius, - linspace_count=_LINSPACE_COUNT): +def generate_cylindrical_points( + start, end, start_radius, end_radius, linspace_count=_LINSPACE_COUNT +): """Generate a 3d mesh of a cylinder with start and end points, and varying radius. Based on: http://stackoverflow.com/a/32383775 @@ -373,22 +402,20 @@ def generate_cylindrical_points(start, end, start_radius, end_radius, n1, n2 = _get_normals(v) # pylint: disable=unbalanced-tuple-unpacking - l, theta = np.meshgrid(np.linspace(0, length, linspace_count), - np.linspace(0, 2 * np.pi, linspace_count)) + l, theta = np.meshgrid( + np.linspace(0, length, linspace_count), np.linspace(0, 2 * np.pi, linspace_count) + ) radii = np.linspace(start_radius, end_radius, linspace_count) rsin = np.multiply(radii, np.sin(theta)) rcos = np.multiply(radii, np.cos(theta)) - return np.array([start[i] + - v[i] * l + - n1[i] * rsin + n2[i] * rcos - for i in range(3)]) + return np.array([start[i] + v[i] * l + n1[i] * rsin + n2[i] * rcos for i in range(3)]) -def project_cylinder_onto_2d(ax, plane, - start, end, start_radius, end_radius, - color='black', alpha=1.): +def project_cylinder_onto_2d( + ax, plane, start, end, start_radius, end_radius, color='black', alpha=1.0 +): """Take cylinder defined by start/end, and project it onto the plane. Args: @@ -406,23 +433,31 @@ def project_cylinder_onto_2d(ax, plane, tight convex hull is found, and that is used for a filled polygon """ points = generate_cylindrical_points(start, end, start_radius, end_radius, 10) - points = np.vstack([points[plane[0]].ravel(), - points[plane[1]].ravel()]) + points = np.vstack([points[plane[0]].ravel(), points[plane[1]].ravel()]) points = points.T hull = ConvexHull(points) ax.add_patch(Polygon(points[hull.vertices], fill=True, color=color, alpha=alpha)) -def plot_cylinder(ax, start, end, start_radius, end_radius, - color='black', alpha=1., linspace_count=_LINSPACE_COUNT): +def plot_cylinder( + ax, + start, + end, + start_radius, + end_radius, + color='black', + alpha=1.0, + linspace_count=_LINSPACE_COUNT, +): """Plot a 3d cylinder.""" assert not np.all(start == end), 'Cylinder must have length' - x, y, z = generate_cylindrical_points(start, end, start_radius, end_radius, - linspace_count=linspace_count) + x, y, z = generate_cylindrical_points( + start, end, start_radius, end_radius, linspace_count=linspace_count + ) ax.plot_surface(x, y, z, color=color, alpha=alpha) -def plot_sphere(ax, center, radius, color='black', alpha=1., linspace_count=_LINSPACE_COUNT): +def plot_sphere(ax, center, radius, color='black', alpha=1.0, linspace_count=_LINSPACE_COUNT): """Plots a 3d sphere, given the center and the radius.""" u = np.linspace(0, 2 * np.pi, linspace_count) v = np.linspace(0, np.pi, linspace_count) diff --git a/neurom/view/plotly_impl.py b/neurom/view/plotly_impl.py index cdf7f7949..a922784fc 100644 --- a/neurom/view/plotly_impl.py +++ b/neurom/view/plotly_impl.py @@ -30,19 +30,18 @@ import numpy as np - try: import plotly.graph_objs as go - from plotly.offline import plot, iplot, init_notebook_mode + from plotly.offline import init_notebook_mode, iplot, plot except ImportError as e: raise ImportError( 'neurom[plotly] is not installed. Please install it by doing: pip install neurom[plotly]' ) from e -from neurom import COLS, iter_segments, iter_neurites +from neurom import COLS, iter_neurites, iter_segments from neurom.core.morphology import Morphology -from neurom.view.matplotlib_impl import TREE_COLOR from neurom.utils import flatten +from neurom.view.matplotlib_impl import TREE_COLOR def plot_morph(morph, plane='xy', inline=False, **kwargs): @@ -75,26 +74,19 @@ def _make_trace(morph, plane): segs = [(s[0][COLS.XYZ], s[1][COLS.XYZ]) for s in segments] - coords = {'x': list(flatten((p1[0], p2[0], None) for p1, p2 in segs)), - 'y': list(flatten((p1[1], p2[1], None) for p1, p2 in segs)), - 'z': list(flatten((p1[2], p2[2], None) for p1, p2 in segs)), - } + coords = { + "x": list(flatten((p1[0], p2[0], None) for p1, p2 in segs)), + "y": list(flatten((p1[1], p2[1], None) for p1, p2 in segs)), + "z": list(flatten((p1[2], p2[2], None) for p1, p2 in segs)), + } color = TREE_COLOR.get(neurite.root_node.type, 'black') if plane.lower() == '3d': plot_fun = go.Scatter3d else: plot_fun = go.Scatter - coords = {'x': coords[plane[0]], - 'y': coords[plane[1]], - } - yield plot_fun( - line={'color': color, - 'width': 2, - }, - mode='lines', - **coords - ) + coords = {"x": coords[plane[0]], "y": coords[plane[1]]} + yield plot_fun(line={"color": color, "width": 2}, mode='lines', **coords) def _fill_soma_data(morph, data, plane): @@ -114,7 +106,6 @@ def _fill_soma_data(morph, data, plane): 'y0': morph.soma.center[1] - morph.soma.radius, 'x1': morph.soma.center[0] + morph.soma.radius, 'y1': morph.soma.center[1] + morph.soma.radius, - 'line': { 'color': 'rgba(50, 171, 96, 1)', }, @@ -144,34 +135,33 @@ def get_figure(morph, plane, title): """Returns the plotly figure containing the morphology.""" data = list(_make_trace(morph, plane)) axis = { - 'gridcolor': 'rgb(255, 255, 255)', - 'zerolinecolor': 'rgb(255, 255, 255)', - 'showbackground': True, - 'backgroundcolor': 'rgb(230, 230,230)', + "gridcolor": "rgb(255, 255, 255)", + "zerolinecolor": "rgb(255, 255, 255)", + "showbackground": True, + "backgroundcolor": "rgb(230, 230,230)", } soma_2d = _fill_soma_data(morph, data, plane) layout = { - 'autosize': True, - 'title': title, - 'scene': { - 'xaxis': axis, - 'yaxis': axis, - 'zaxis': axis, - 'camera': {'up': {'x': 0, - 'y': 0, - 'z': 1, - }, - 'eye': {'x': -1.7428, - 'y': 1.0707, - 'z': 0.7100, - }, - }, - 'aspectmode': 'data', + "autosize": True, + "title": title, + "scene": { # This is used for 3D plots + "xaxis": axis, + "yaxis": axis, + "zaxis": axis, + "camera": { + "up": {"x": 0, "y": 0, "z": 1}, + "eye": { + "x": -1.7428, + "y": 1.0707, + "z": 0.7100, + }, + }, + "aspectmode": "data", }, - 'yaxis': {'scaleanchor': "x"}, # This is used for 2D plots - 'shapes': soma_2d, + "yaxis": {"scaleanchor": "x"}, # This is used for 2D plots + "shapes": soma_2d, } res = {"data": data, "layout": layout} diff --git a/neurom/viewer.py b/neurom/viewer.py deleted file mode 100644 index 3faadc62b..000000000 --- a/neurom/viewer.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project -# All rights reserved. -# -# This file is part of NeuroM -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# 3. Neither the name of the copyright holder nor the names of -# its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -"""Tools to visualize neuron morphological objects. - -Examples: - >>> from neurom import viewer - >>> m = ... # load a neuron - >>> viewer.draw(m) # 2d plot - >>> viewer.draw(m, mode='3d') # 3d plot - >>> viewer.draw(m.neurites[0]) # 2d plot of neurite tree - >>> viewer.draw(m, mode='dendrogram') # dendrogram plot -""" - -from neurom.view.matplotlib_impl import (plot_morph, plot_morph3d, - plot_tree, plot_tree3d, - plot_soma, plot_soma3d, - plot_dendrogram) -from neurom.view import matplotlib_utils -from neurom.core.morphology import Section, Neurite, Morphology -from neurom.core.soma import Soma -from neurom.utils import deprecated_module - -deprecated_module('Module `viewer` is deprecated. See the documentation\'s migration page.') - -MODES = ('2d', '3d', 'dendrogram') - -_VIEWERS = { - 'neuron_3d': plot_morph3d, - 'neuron_2d': plot_morph, - 'neuron_dendrogram': plot_dendrogram, - 'tree_3d': plot_tree3d, - 'tree_2d': plot_tree, - 'tree_dendrogram': plot_dendrogram, - 'soma_3d': plot_soma3d, - 'soma_2d': plot_soma -} - - -class ViewerError(Exception): - """Base class for viewer exceptions.""" - - -class InvalidDrawModeError(ViewerError): - """Exception class to indicate invalid draw mode.""" - - -class NotDrawableError(Exception): - """Exception class for things that aren't drawable.""" - - -def draw(obj, mode='2d', **kwargs): - """Draw a morphology object. - - Arguments: - obj: morphology object to be drawn (neuron, tree, soma). - mode (Optional[str]): drawing mode ('2d', '3d', 'dendrogram'). Defaults to '2d'. - **kwargs: keyword arguments for underlying neurom.view.view functions. - - Raises: - InvalidDrawModeError if mode is not valid - NotDrawableError if obj is not drawable - NotDrawableError if obj type and mode combination is not drawable - - Examples: - >>> from neurom import viewer, load_morphology - >>> m = load_morphology('/path/to/morphology') # load a neuron - >>> fig, _ = viewer.draw(m) # 2d plot - >>> fig.show() - >>> fig3d, _ = viewer.draw(m, mode='3d') # 3d plot - >>> fig3d.show() - >>> fig, _ = viewer.draw(m.neurites[0]) # 2d plot of neurite tree - >>> dend, _ = viewer.draw(m, mode='dendrogram') - """ - if mode not in MODES: - raise InvalidDrawModeError('Invalid drawing mode %s' % mode) - - if 'realistic_diameters' in kwargs and mode == '3d': - if kwargs['realistic_diameters']: - raise NotImplementedError('Option realistic_diameter not implemented for 3D plots') - del kwargs['realistic_diameters'] - - fig, ax = (matplotlib_utils.get_figure() if mode in ('2d', 'dendrogram') - else matplotlib_utils.get_figure(params={'projection': '3d'})) - - if isinstance(obj, Morphology): - tag = 'neuron' - elif isinstance(obj, (Section, Neurite)): - tag = 'tree' - elif isinstance(obj, Soma): - tag = 'soma' - else: - raise NotDrawableError('draw not implemented for %s' % obj.__class__) - - viewer = '%s_%s' % (tag, mode) - try: - plotter = _VIEWERS[viewer] - except KeyError as e: - raise NotDrawableError('No drawer for class %s, mode=%s' % (obj.__class__, mode)) from e - - output_path = kwargs.pop('output_path', None) - plotter(obj, ax, **kwargs) - - if mode != 'dendrogram': - matplotlib_utils.plot_style(fig=fig, ax=ax, **kwargs) - - if output_path: - matplotlib_utils.save_plot(fig=fig, output_path=output_path, **kwargs) - - return fig, ax diff --git a/pylintrc b/pylintrc index e0ccb56d4..6ebd4c1c5 100644 --- a/pylintrc +++ b/pylintrc @@ -1,4 +1,4 @@ -## look at http://docutils.sourceforge.net/sandbox/py-rest-doc/utils/pylintrc +# look at http://docutils.sourceforge.net/sandbox/py-rest-doc/utils/pylintrc # for some of the options that are available [MESSAGES CONTROL] @@ -56,4 +56,4 @@ ignore-docstrings=yes # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis. It # supports qualified module names, as well as Unix pattern matching. -ignored-modules=numpy,numpy.*,scipy.stats,scipy.spatial +ignored-modules=numpy,numpy.*,scipy,scipy.spatial,scipy.stats,scipy.spatial.qhull diff --git a/pyproject.toml b/pyproject.toml index e997c78de..3f3c5b87e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ dependencies = [ 'pyyaml>=3.10', 'scipy>=1.2.0', 'tqdm>=4.8.4', + 'cached_property>=1.5.1', 'importlib_resources>=1.3; python_version < "3.9"', ] dynamic = ["version"] @@ -67,3 +68,23 @@ namespaces = false [tool.setuptools_scm] local_scheme = "no-local-version" + +[tool.pytest.ini_options] +testpaths = [ + "tests", +] + +[tool.black] +line-length = 100 +target-version = [ + 'py38', + 'py39', + 'py310', + 'py311', +] +skip-string-normalization = true +include = 'neurom\/.*\.py$|tests\/.*\.py$|doc\/source\/conf\.py$|setup\.py$|examples\/.*\.py$' + +[tool.isort] +profile = "black" +line_length = 100 diff --git a/tests/apps/__init__.py b/tests/apps/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/apps/test_annotate.py b/tests/apps/test_annotate.py index 49a5cbfff..a99b38d11 100644 --- a/tests/apps/test_annotate.py +++ b/tests/apps/test_annotate.py @@ -9,8 +9,9 @@ def test_generate_annotation(): checker_ok = CheckResult(True) - checker_not_ok = CheckResult(False, [('section 1', [[1, 2, 3], [4, 5, 6]]), - ('section 2', [[7, 8, 9], [10, 11, 12]])]) + checker_not_ok = CheckResult( + False, [('section 1', [[1, 2, 3], [4, 5, 6]]), ('section 2', [[7, 8, 9], [10, 11, 12]])] + ) settings = {'color': 'blue', 'label': 'circle', 'name': 'dangling'} assert generate_annotation(checker_ok, settings) == "" @@ -31,7 +32,6 @@ def test_generate_annotation(): def test_annotate(): - correct_result = """ (Circle1 ; MUK_ANNOTATION @@ -41,9 +41,7 @@ def test_annotate(): ) ; MUK_ANNOTATION """ - checkers = {has_no_narrow_start: {"name": "narrow start", - "label": "Circle1", - "color": "Blue"}} + checkers = {has_no_narrow_start: {"name": "narrow start", "label": "Circle1", "color": "Blue"}} m = load_morphology(SWC_PATH / 'narrow_start.swc') results = [checker(m) for checker in checkers.keys()] diff --git a/tests/apps/test_cli.py b/tests/apps/test_cli.py index 2c6311065..896764c72 100644 --- a/tests/apps/test_cli.py +++ b/tests/apps/test_cli.py @@ -39,15 +39,12 @@ def test_viewer_plotly(mock): runner = CliRunner() filename = str(DATA / 'swc' / 'simple.swc') - result = runner.invoke(cli, ['view', filename, '--3d', - '--backend', 'plotly']) + result = runner.invoke(cli, ['view', filename, '--3d', '--backend', 'plotly']) assert result.exit_code == 0 mock.assert_called_once() mock.reset_mock() - result = runner.invoke(cli, ['view', filename, - '--backend', 'plotly', - '--plane', 'xy']) + result = runner.invoke(cli, ['view', filename, '--backend', 'plotly', '--plane', 'xy']) assert result.exit_code == 0 mock.assert_called_once() @@ -59,19 +56,26 @@ def test_morph_stat(): result = runner.invoke(cli, ['stats', str(filename), '--output', f.name]) assert result.exit_code == 0 df = pd.read_csv(f) - assert set(df.columns) == {'name', 'axon:max_section_lengths', 'axon:sum_section_lengths', - 'axon:sum_section_volumes', 'axon:max_section_branch_orders', - 'apical_dendrite:max_section_lengths', - 'apical_dendrite:sum_section_lengths', - 'apical_dendrite:sum_section_volumes', - 'apical_dendrite:max_section_branch_orders', - 'basal_dendrite:max_section_lengths', - 'basal_dendrite:sum_section_lengths', - 'basal_dendrite:sum_section_volumes', - 'basal_dendrite:max_section_branch_orders', - 'all:max_section_lengths', - 'all:sum_section_lengths', 'all:sum_section_volumes', - 'all:max_section_branch_orders', 'morphology:mean_soma_radius'} + assert set(df.columns) == { + 'name', + 'axon:max_section_lengths', + 'axon:sum_section_lengths', + 'axon:sum_section_volumes', + 'axon:max_section_branch_orders', + 'apical_dendrite:max_section_lengths', + 'apical_dendrite:sum_section_lengths', + 'apical_dendrite:sum_section_volumes', + 'apical_dendrite:max_section_branch_orders', + 'basal_dendrite:max_section_lengths', + 'basal_dendrite:sum_section_lengths', + 'basal_dendrite:sum_section_volumes', + 'basal_dendrite:max_section_branch_orders', + 'all:max_section_lengths', + 'all:sum_section_lengths', + 'all:sum_section_volumes', + 'all:max_section_branch_orders', + 'morphology:mean_soma_radius', + } @pytest.mark.filterwarnings('ignore::UserWarning') @@ -85,6 +89,18 @@ def test_morph_stat_full_config(): assert not df.empty +def test_morph_stat_full_config__subtrees(): + runner = CliRunner() + filename = DATA / 'h5/v1/Neuron.h5' + with tempfile.NamedTemporaryFile() as f: + result = runner.invoke( + cli, ['stats', str(filename), '--full-config', '--use-subtrees', '--output', f.name] + ) + assert result.exit_code == 0 + df = pd.read_csv(f) + assert not df.empty + + def test_morph_stat_invalid_config(): runner = CliRunner() with tempfile.NamedTemporaryFile('w') as config_f: @@ -125,13 +141,18 @@ def test_morph_check(): result = runner.invoke(cli, ['check', str(filename), '--output', f.name]) assert result.exit_code == 0 content = json.load(f) - assert content == {'files': { - str(filename.absolute()): {'Has basal dendrite': True, - 'Has axon': True, - 'Has apical dendrite': False, - 'Has all nonzero segment lengths': True, - 'Has all nonzero section lengths': True, - 'Has all nonzero neurite radii': False, - 'Has nonzero soma radius': True, - 'ALL': False}}, - 'STATUS': 'FAIL'} + assert content == { + 'files': { + str(filename.absolute()): { + 'Has basal dendrite': True, + 'Has axon': True, + 'Has apical dendrite': False, + 'Has all nonzero segment lengths': True, + 'Has all nonzero section lengths': True, + 'Has all nonzero neurite radii': False, + 'Has nonzero soma radius': True, + 'ALL': False, + } + }, + 'STATUS': 'FAIL', + } diff --git a/tests/apps/test_config.py b/tests/apps/test_config.py index 9e44cc9d9..cae0781d9 100644 --- a/tests/apps/test_config.py +++ b/tests/apps/test_config.py @@ -39,7 +39,15 @@ def test_get_config(): test_yaml = Path(__file__).parent.parent.parent / 'neurom/apps/config/morph_stats.yaml' - expected = {'neurite': {'section_lengths': ['max', 'sum'], 'section_volumes': ['sum'], 'section_branch_orders': ['max']}, 'neurite_type': ['AXON', 'APICAL_DENDRITE', 'BASAL_DENDRITE', 'ALL'], 'morphology': {'soma_radius': ['mean']}} + expected = { + 'neurite': { + 'section_lengths': ['max', 'sum'], + 'section_volumes': ['sum'], + 'section_branch_orders': ['max'], + }, + 'neurite_type': ['AXON', 'APICAL_DENDRITE', 'BASAL_DENDRITE', 'ALL'], + 'morphology': {'soma_radius': ['mean']}, + } config = get_config(None, test_yaml) assert config == expected diff --git a/tests/apps/test_morph_stats.py b/tests/apps/test_morph_stats.py index acaeaf8ac..dc4f56f62 100644 --- a/tests/apps/test_morph_stats.py +++ b/tests/apps/test_morph_stats.py @@ -57,7 +57,7 @@ 'morphology': { 'soma_radius': ['mean'], 'max_radial_distance': ['mean'], - } + }, } REF_CONFIG_NEW = { @@ -72,11 +72,10 @@ 'morphology': { 'soma_radius': {'modes': ['mean']}, 'max_radial_distance': {'modes': ['mean']}, - } + }, } - REF_OUT = { 'morphology': { 'mean_soma_radius': 0.13065629648763766, @@ -86,7 +85,9 @@ 'sum_section_lengths': 207.87975220908129, 'max_section_lengths': 11.018460736176685, 'max_section_branch_orders': 10, - 'raw_section_branch_orders': [0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10], + 'raw_section_branch_orders': [ + 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, + ], # fmt: skip 'sum_section_volumes': 276.73857657289523, 'max_segment_midpoints_0': 0.0, 'max_segment_midpoints_1': 0.0, @@ -97,7 +98,11 @@ 'sum_section_lengths': 840.68521442251949, 'max_section_lengths': 11.758281556059444, 'max_section_branch_orders': 10, - 'raw_section_branch_orders': [0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10], + 'raw_section_branch_orders': [ + 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 0, 1, 1, 2, 2, 3, 3, 4, + 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, + 8, 9, 9, 10, 10, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, + ], # fmt: skip 'sum_section_volumes': 1104.9077419665782, 'max_segment_midpoints_0': 64.401674984050004, 'max_segment_midpoints_1': 48.48197694465, @@ -108,7 +113,9 @@ 'sum_section_lengths': 214.37304577550353, 'max_section_lengths': 11.758281556059444, 'max_section_branch_orders': 10, - 'raw_section_branch_orders': [0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10], + 'raw_section_branch_orders': [ + 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, + ], # fmt: skip 'sum_section_volumes': 271.9412385728449, 'max_segment_midpoints_0': 64.401674984050004, 'max_segment_midpoints_1': 0.0, @@ -119,7 +126,10 @@ 'sum_section_lengths': 418.43241643793476, 'max_section_lengths': 11.652508126101711, 'max_section_branch_orders': 10, - 'raw_section_branch_orders': [0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10], + 'raw_section_branch_orders': [ + 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 0, 1, 1, 2, 2, 3, 3, 4, + 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, + ], # fmt: skip 'sum_section_volumes': 556.22792682083821, 'max_segment_midpoints_0': 64.007872333250006, 'max_segment_midpoints_1': 48.48197694465, @@ -174,7 +184,7 @@ def test_stats_new_format_set_arg(): 'neurite_type': ['AXON', 'APICAL_DENDRITE', 'BASAL_DENDRITE', 'ALL'], 'morphology': { 'soma_radius': {'modes': ['mean']}, - } + }, } initial_config = deepcopy(config) @@ -196,20 +206,19 @@ def test_extract_stats_scalar_feature(): }, 'morphology': { 'soma_volume': ['sum'], - } + }, } with warnings.catch_warnings(): # silence warning about approximating soma volume with a sphere warnings.simplefilter("ignore", category=UserWarning) res = ms.extract_stats(m, config) - - assert res == {'all': {'max_number_of_forking_points': 277}, - 'morphology': {'sum_soma_volume': 1424.4383771584492}} - + assert res == { + 'all': {'max_number_of_forking_points': 277}, + 'morphology': {'sum_soma_volume': 1424.4383771584492}, + } def test_extract_stats__kwarg_modes_multiple_features(): - m = nm.load_morphology(SWC_PATH / 'Neuron.swc') config = { 'neurite': { @@ -219,7 +228,7 @@ def test_extract_stats__kwarg_modes_multiple_features(): {"direction": 1}, {"direction": 0}, ], - 'modes': ['sum', "min"] + 'modes': ['sum', "min"], }, }, 'neurite_type': ['AXON', 'APICAL_DENDRITE', 'BASAL_DENDRITE', 'ALL'], @@ -232,7 +241,7 @@ def test_extract_stats__kwarg_modes_multiple_features(): ], 'modes': ['min', 'max'], }, - } + }, } res = ms.extract_stats(m, config) @@ -240,7 +249,6 @@ def test_extract_stats__kwarg_modes_multiple_features(): assert set(res.keys()) == {"axon", "basal_dendrite", "apical_dendrite", "all", "morphology"} for key in ("axon", "basal_dendrite", "apical_dendrite", "all"): - assert set(res[key].keys()) == { "sum_principal_direction_extents__direction:2", "min_principal_direction_extents__direction:2", @@ -287,38 +295,40 @@ def test_extract_dataframe__kwarg_modes_multiple_features(): res = ms.extract_dataframe(m, config) - expected_columns = pd.MultiIndex.from_tuples([ - ('property', 'name'), - ('axon', 'sum_principal_direction_extents__direction:2'), - ('axon', 'min_principal_direction_extents__direction:2'), - ('axon', 'sum_principal_direction_extents__direction:1'), - ('axon', 'min_principal_direction_extents__direction:1'), - ('axon', 'sum_principal_direction_extents__direction:0'), - ('axon', 'min_principal_direction_extents__direction:0'), - ('apical_dendrite', 'sum_principal_direction_extents__direction:2'), - ('apical_dendrite', 'min_principal_direction_extents__direction:2'), - ('apical_dendrite', 'sum_principal_direction_extents__direction:1'), - ('apical_dendrite', 'min_principal_direction_extents__direction:1'), - ('apical_dendrite', 'sum_principal_direction_extents__direction:0'), - ('apical_dendrite', 'min_principal_direction_extents__direction:0'), - ('basal_dendrite', 'sum_principal_direction_extents__direction:2'), - ('basal_dendrite', 'min_principal_direction_extents__direction:2'), - ('basal_dendrite', 'sum_principal_direction_extents__direction:1'), - ('basal_dendrite', 'min_principal_direction_extents__direction:1'), - ('basal_dendrite', 'sum_principal_direction_extents__direction:0'), - ('basal_dendrite', 'min_principal_direction_extents__direction:0'), - ('all', 'sum_principal_direction_extents__direction:2'), - ('all', 'min_principal_direction_extents__direction:2'), - ('all', 'sum_principal_direction_extents__direction:1'), - ('all', 'min_principal_direction_extents__direction:1'), - ('all', 'sum_principal_direction_extents__direction:0'), - ('all', 'min_principal_direction_extents__direction:0'), - ('morphology', 'mean_soma_radius'), - ('morphology', 'min_partition_asymmetry__variant:branch-order__method:petilla'), - ('morphology', 'max_partition_asymmetry__variant:branch-order__method:petilla'), - ('morphology', 'min_partition_asymmetry__variant:length__method:uylings'), - ('morphology', 'max_partition_asymmetry__variant:length__method:uylings'), - ]) + expected_columns = pd.MultiIndex.from_tuples( + [ + ('property', 'name'), + ('axon', 'sum_principal_direction_extents__direction:2'), + ('axon', 'min_principal_direction_extents__direction:2'), + ('axon', 'sum_principal_direction_extents__direction:1'), + ('axon', 'min_principal_direction_extents__direction:1'), + ('axon', 'sum_principal_direction_extents__direction:0'), + ('axon', 'min_principal_direction_extents__direction:0'), + ('apical_dendrite', 'sum_principal_direction_extents__direction:2'), + ('apical_dendrite', 'min_principal_direction_extents__direction:2'), + ('apical_dendrite', 'sum_principal_direction_extents__direction:1'), + ('apical_dendrite', 'min_principal_direction_extents__direction:1'), + ('apical_dendrite', 'sum_principal_direction_extents__direction:0'), + ('apical_dendrite', 'min_principal_direction_extents__direction:0'), + ('basal_dendrite', 'sum_principal_direction_extents__direction:2'), + ('basal_dendrite', 'min_principal_direction_extents__direction:2'), + ('basal_dendrite', 'sum_principal_direction_extents__direction:1'), + ('basal_dendrite', 'min_principal_direction_extents__direction:1'), + ('basal_dendrite', 'sum_principal_direction_extents__direction:0'), + ('basal_dendrite', 'min_principal_direction_extents__direction:0'), + ('all', 'sum_principal_direction_extents__direction:2'), + ('all', 'min_principal_direction_extents__direction:2'), + ('all', 'sum_principal_direction_extents__direction:1'), + ('all', 'min_principal_direction_extents__direction:1'), + ('all', 'sum_principal_direction_extents__direction:0'), + ('all', 'min_principal_direction_extents__direction:0'), + ('morphology', 'mean_soma_radius'), + ('morphology', 'min_partition_asymmetry__variant:branch-order__method:petilla'), + ('morphology', 'max_partition_asymmetry__variant:branch-order__method:petilla'), + ('morphology', 'min_partition_asymmetry__variant:length__method:uylings'), + ('morphology', 'max_partition_asymmetry__variant:length__method:uylings'), + ] + ) pd.testing.assert_index_equal(res.columns, expected_columns) @@ -352,10 +362,22 @@ def test_extract_dataframe(): actual = actual.drop(columns='raw_section_branch_orders', level=1) aggregated_expected = pd.concat( [ - expected[[col for col in expected.columns if col[1].startswith("mean_")]].mean().to_frame().T, - expected[[col for col in expected.columns if col[1].startswith("max_")]].max().to_frame().T, - expected[[col for col in expected.columns if col[1].startswith("min_")]].min().to_frame().T, - expected[[col for col in expected.columns if col[1].startswith("sum_")]].sum().to_frame().T, + expected[[col for col in expected.columns if col[1].startswith("mean_")]] + .mean() + .to_frame() + .T, + expected[[col for col in expected.columns if col[1].startswith("max_")]] + .max() + .to_frame() + .T, + expected[[col for col in expected.columns if col[1].startswith("min_")]] + .min() + .to_frame() + .T, + expected[[col for col in expected.columns if col[1].startswith("sum_")]] + .sum() + .to_frame() + .T, ], axis=1, ) @@ -369,10 +391,11 @@ def test_extract_dataframe(): assert REF_CONFIG_NEW == initial_config # Test with a config without the 'morphology' key - morphs = nm.load_morphologies([Path(SWC_PATH, name) - for name in ['Neuron.swc', 'simple.swc']]) - config = {'neurite': {'section_lengths': ['sum']}, - 'neurite_type': ['AXON', 'APICAL_DENDRITE', 'BASAL_DENDRITE', 'ALL']} + morphs = nm.load_morphologies([Path(SWC_PATH, name) for name in ['Neuron.swc', 'simple.swc']]) + config = { + 'neurite': {'section_lengths': ['sum']}, + 'neurite_type': ['AXON', 'APICAL_DENDRITE', 'BASAL_DENDRITE', 'ALL'], + } initial_config = deepcopy(config) actual = ms.extract_dataframe(morphs, config) idx = pd.IndexSlice @@ -387,8 +410,7 @@ def test_extract_dataframe(): assert config == initial_config # Test with a List[Morphology] argument - morphs = [nm.load_morphology(Path(SWC_PATH, name)) - for name in ['Neuron.swc', 'simple.swc']] + morphs = [nm.load_morphology(Path(SWC_PATH, name)) for name in ['Neuron.swc', 'simple.swc']] actual = ms.extract_dataframe(morphs, config) assert_frame_equal(actual, expected, check_dtype=False) assert config == initial_config @@ -404,15 +426,27 @@ def test_extract_dataframe(): initial_config = deepcopy(config) actual = ms.extract_dataframe(morphs, config) expected_columns = pd.MultiIndex.from_tuples( - [('property', 'name'), - ('axon', 'sum_total_length_per_neurite'), - ('basal_dendrite', 'sum_total_length_per_neurite'), - ('apical_dendrite', 'sum_total_length_per_neurite'), - ('all', 'sum_total_length_per_neurite')]) + [ + ('property', 'name'), + ('axon', 'sum_total_length_per_neurite'), + ('basal_dendrite', 'sum_total_length_per_neurite'), + ('apical_dendrite', 'sum_total_length_per_neurite'), + ('all', 'sum_total_length_per_neurite'), + ] + ) expected = pd.DataFrame( columns=expected_columns, - data=[['Neuron.swc', 207.87975221, 418.43241644, 214.37304578, 840.68521442], - ['simple.swc', 15., 16., 0., 31., ]]) + data=[ + ['Neuron.swc', 207.87975221, 418.43241644, 214.37304578, 840.68521442], + [ + 'simple.swc', + 15.0, + 16.0, + 0.0, + 31.0, + ], + ], + ) assert_frame_equal(actual, expected, check_dtype=False) assert config == initial_config @@ -425,7 +459,7 @@ def test_extract_dataframe_with_kwargs(): 'neurite_type': ['AXON', 'APICAL_DENDRITE', 'BASAL_DENDRITE', 'ALL'], 'morphology': { 'soma_radius': {'modes': ['mean']}, - } + }, } initial_config = deepcopy(config) @@ -446,8 +480,7 @@ def test_extract_dataframe_with_kwargs(): def test_extract_dataframe_multiproc(): - morphs = [Path(SWC_PATH, name) - for name in ['Neuron.swc', 'simple.swc']] + morphs = [Path(SWC_PATH, name) for name in ['Neuron.swc', 'simple.swc']] expected = pd.read_csv(Path(DATA_PATH, 'extracted-stats.csv'), index_col=0, header=[0, 1]) with warnings.catch_warnings(record=True) as w: @@ -485,10 +518,11 @@ def test_extract_dataframe_multiproc(): def test_get_header(): - fake_results = {'fake_name0': REF_OUT, - 'fake_name1': REF_OUT, - 'fake_name2': REF_OUT, - } + fake_results = { + 'fake_name0': REF_OUT, + 'fake_name1': REF_OUT, + 'fake_name2': REF_OUT, + } header = ms._get_header(fake_results) assert 1 + 2 + 4 * (4 + 5) == len(header) # name + everything in REF_OUT @@ -497,7 +531,6 @@ def test_get_header(): def test_get_header__with_kwargs(): - fake_results = { "fake_name0": { 'axon': { @@ -505,36 +538,37 @@ def test_get_header__with_kwargs(): 'min_principal_direction_extents__direction:2': 4.236138323156951, 'sum_principal_direction_extents__direction:1': 8.070668782620396, 'max_principal_direction_extents__direction:1': 8.070668782620396, - 'mean_principal_direction_extents__direction:0': 82.38543140446015 + 'mean_principal_direction_extents__direction:0': 82.38543140446015, }, 'apical_dendrite': { 'sum_principal_direction_extents__direction:2': 3.6493184467335213, 'min_principal_direction_extents__direction:2': 3.6493184467335213, 'sum_principal_direction_extents__direction:1': 5.5082642304864695, 'max_principal_direction_extents__direction:1': 5.5082642304864695, - 'mean_principal_direction_extents__direction:0': 99.57940514500457 + 'mean_principal_direction_extents__direction:0': 99.57940514500457, }, 'basal_dendrite': { 'sum_principal_direction_extents__direction:2': 7.32638745131256, 'min_principal_direction_extents__direction:2': 3.10141343122575, 'sum_principal_direction_extents__direction:1': 11.685447149154676, 'max_principal_direction_extents__direction:1': 6.410958014733595, - 'mean_principal_direction_extents__direction:0': 87.2112016874677 + 'mean_principal_direction_extents__direction:0': 87.2112016874677, }, 'all': { 'sum_principal_direction_extents__direction:2': 15.211844221203034, 'min_principal_direction_extents__direction:2': 3.10141343122575, 'sum_principal_direction_extents__direction:1': 25.26438016226154, 'max_principal_direction_extents__direction:1': 8.070668782620396, - 'mean_principal_direction_extents__direction:0': 89.09680998110002 + 'mean_principal_direction_extents__direction:0': 89.09680998110002, }, 'morphology': { 'mean_soma_radius': 0.13065629977308288, 'min_partition_asymmetry__variant:branch-order__method:petilla': 0.0, 'max_partition_asymmetry__variant:branch-order__method:petilla': 0.9, 'min_partition_asymmetry__variant:length__method:uylings': 0.00030289197373727377, - 'max_partition_asymmetry__variant:length__method:uylings': 0.8795344229855895} - } + 'max_partition_asymmetry__variant:length__method:uylings': 0.8795344229855895, + }, + } } assert ms._get_header(fake_results) == [ @@ -563,15 +597,16 @@ def test_get_header__with_kwargs(): 'morphology:min_partition_asymmetry__variant:branch-order__method:petilla', 'morphology:max_partition_asymmetry__variant:branch-order__method:petilla', 'morphology:min_partition_asymmetry__variant:length__method:uylings', - 'morphology:max_partition_asymmetry__variant:length__method:uylings' + 'morphology:max_partition_asymmetry__variant:length__method:uylings', ] def test_generate_flattened_dict(): - fake_results = {'fake_name0': REF_OUT, - 'fake_name1': REF_OUT, - 'fake_name2': REF_OUT, - } + fake_results = { + 'fake_name0': REF_OUT, + 'fake_name1': REF_OUT, + 'fake_name2': REF_OUT, + } header = ms._get_header(fake_results) rows = list(ms._generate_flattened_dict(header, fake_results)) assert 3 == len(rows) # one for fake_name[0-2] @@ -579,7 +614,6 @@ def test_generate_flattened_dict(): def test_generate_flattened_dict__with_kwargs(): - results = { 'axon': { 'sum_principal_direction_extents__direction:2': 0.0, @@ -615,7 +649,7 @@ def test_generate_flattened_dict__with_kwargs(): 'max_partition_asymmetry__variant:branch-order__method:petilla': 4.0, 'min_partition_asymmetry__variant:length__method:uylings': 5.0, 'max_partition_asymmetry__variant:length__method:uylings': 6.0, - } + }, } fake_results = { @@ -627,11 +661,15 @@ def test_generate_flattened_dict__with_kwargs(): assert list(ms._generate_flattened_dict(header, fake_results)) == [ [ - 'fake_name0', 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 1.0, 2.0, 3.0, 4.0, - 5.0, 6.0, 7.0, 8.0, 9.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0], + 'fake_name0', + 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, + 8.0, 9.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, + ], # fmt: skip [ - 'fake_name1', 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 1.0, 2.0, 3.0, 4.0, - 5.0, 6.0, 7.0, 8.0, 9.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + 'fake_name1', + 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, + 8.0, 9.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, + ], # fmt: skip ] @@ -686,7 +724,6 @@ def test_standardize_layout(): def test_sanitize_config(): - new_config = ms._sanitize_config({}) # empty assert 3 == len(new_config) # neurite & morphology & population created @@ -694,12 +731,10 @@ def test_sanitize_config(): 'neurite': { 'section_lengths': ['max', 'sum'], 'section_volumes': ['sum'], - 'section_branch_orders': ['max'] + 'section_branch_orders': ['max'], }, 'neurite_type': ['AXON', 'APICAL_DENDRITE', 'BASAL_DENDRITE', 'ALL'], - 'morphology': { - 'soma_radius': ['mean'] - } + 'morphology': {'soma_radius': ['mean']}, } new_config = ms._sanitize_config(full_config) @@ -738,16 +773,17 @@ def test_multidimensional_features(): """ m = nm.load_morphology(Path(SWC_PATH, 'no-axon.swc')) - config = {'neurite': {'segment_midpoints': ['max']}, - 'neurite_type': ['AXON']} + config = {'neurite': {'segment_midpoints': ['max']}, 'neurite_type': ['AXON']} actual = ms.extract_dataframe(m, config) - assert_array_equal(actual['axon'][['max_segment_midpoints_0', - 'max_segment_midpoints_1', - 'max_segment_midpoints_2']].values, - [[None, None, None]]) + assert_array_equal( + actual['axon'][ + ['max_segment_midpoints_0', 'max_segment_midpoints_1', 'max_segment_midpoints_2'] + ].values, + [[None, None, None]], + ) config = {'neurite': {'partition_pairs': ['max']}} actual = ms.extract_dataframe(m, config) - assert_array_equal(actual['axon'][['max_partition_pairs_0', - 'max_partition_pairs_1']].values, - [[None, None]]) + assert_array_equal( + actual['axon'][['max_partition_pairs_0', 'max_partition_pairs_1']].values, [[None, None]] + ) diff --git a/tests/check/__init__.py b/tests/check/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/check/test_morphology_checks.py b/tests/check/test_morphology_checks.py index a3d252a20..15013a393 100644 --- a/tests/check/test_morphology_checks.py +++ b/tests/check/test_morphology_checks.py @@ -53,18 +53,23 @@ def _load_morphology(name): return name, load_morphology(path) - -NEURONS = dict([_load_morphology(n) for n in ['Neuron.h5', - 'Neuron_2_branch.h5', - 'Neuron.swc', - 'Neuron_small_radius.swc', - 'Neuron_zero_length_sections.swc', - 'Neuron_zero_length_segments.swc', - 'Neuron_zero_radius.swc', - 'Single_apical.swc', - 'Single_axon.swc', - 'Single_basal.swc', - ]]) +NEURONS = dict( + [ + _load_morphology(n) + for n in [ + 'Neuron.h5', + 'Neuron_2_branch.h5', + 'Neuron.swc', + 'Neuron_small_radius.swc', + 'Neuron_zero_length_sections.swc', + 'Neuron_zero_length_segments.swc', + 'Neuron_zero_radius.swc', + 'Single_apical.swc', + 'Single_axon.swc', + 'Single_basal.swc', + ] + ] +) def _pick(files): @@ -72,11 +77,12 @@ def _pick(files): def test_has_axon_good_data(): - files = ['Neuron.swc', - 'Neuron_small_radius.swc', - 'Single_axon.swc', - 'Neuron.h5', - ] + files = [ + 'Neuron.swc', + 'Neuron_small_radius.swc', + 'Single_axon.swc', + 'Neuron.h5', + ] for m in _pick(files): assert morphology_checks.has_axon(m) @@ -88,10 +94,7 @@ def test_has_axon_bad_data(): def test_has_apical_dendrite_good_data(): - files = ['Neuron.swc', - 'Neuron_small_radius.swc', - 'Single_apical.swc', - 'Neuron.h5'] + files = ['Neuron.swc', 'Neuron_small_radius.swc', 'Single_apical.swc', 'Neuron.h5'] for m in _pick(files): assert morphology_checks.has_apical_dendrite(m) @@ -104,11 +107,13 @@ def test_has_apical_dendrite_bad_data(): def test_has_basal_dendrite_good_data(): - files = ['Neuron.swc', - 'Neuron_small_radius.swc', - 'Single_basal.swc', - 'Neuron_2_branch.h5', - 'Neuron.h5'] + files = [ + 'Neuron.swc', + 'Neuron_small_radius.swc', + 'Single_basal.swc', + 'Neuron_2_branch.h5', + 'Neuron.h5', + ] for m in _pick(files): assert morphology_checks.has_basal_dendrite(m) @@ -134,12 +139,13 @@ def test_has_no_flat_neurites(): def test_nonzero_neurite_radii_good_data(): - files = ['Neuron.swc', - 'Single_apical.swc', - 'Single_basal.swc', - 'Single_axon.swc', - 'Neuron_2_branch.h5', - ] + files = [ + 'Neuron.swc', + 'Single_apical.swc', + 'Single_basal.swc', + 'Single_axon.swc', + 'Neuron_2_branch.h5', + ] for m in _pick(files): ids = morphology_checks.has_all_nonzero_neurite_radii(m) @@ -170,18 +176,18 @@ def test_nonzero_segment_lengths_good_data(): def test_nonzero_segment_lengths_bad_data(): - files = ['Neuron_zero_length_segments.swc', - 'Single_apical.swc', - 'Single_basal.swc', - 'Single_axon.swc', - ] + files = [ + 'Neuron_zero_length_segments.swc', + 'Single_apical.swc', + 'Single_basal.swc', + 'Single_axon.swc', + ] bad_ids = [[0, 21, 42, 63], [0], [0], [0], [0]] for i, m in enumerate(_pick(files)): ids = morphology_checks.has_all_nonzero_segment_lengths(m) - assert (ids.info == - [(id, 0) for id in bad_ids[i]]) + assert ids.info == [(id, 0) for id in bad_ids[i]] def test_nonzero_segment_lengths_threshold(): @@ -194,16 +200,16 @@ def test_nonzero_segment_lengths_threshold(): ids = morphology_checks.has_all_nonzero_segment_lengths(m, threshold=0.25) bad_ids = [(0, 0), (21, 0), (36, 9), (42, 0), (52, 7), (60, 2), (63, 0), (70, 4), (76, 6)] - assert (ids.info == - [(id, val) for id, val in bad_ids]) + assert ids.info == [(id, val) for id, val in bad_ids] def test_nonzero_section_lengths_good_data(): - files = ['Neuron.swc', - 'Single_apical.swc', - 'Single_basal.swc', - 'Single_axon.swc', - ] + files = [ + 'Neuron.swc', + 'Single_apical.swc', + 'Single_basal.swc', + 'Single_axon.swc', + ] for i, m in enumerate(_pick(files)): ids = morphology_checks.has_all_nonzero_section_lengths(m) @@ -226,7 +232,7 @@ def test_nonzero_section_lengths_threshold(): assert ids.status assert len(ids.info) == 0 - ids = morphology_checks.has_all_nonzero_section_lengths(m, threshold=15.) + ids = morphology_checks.has_all_nonzero_section_lengths(m, threshold=15.0) assert not ids.status assert len(ids.info) == 84 @@ -281,7 +287,6 @@ def test_has_no_narrow_start(): def test_has_nonzero_soma_radius_threshold(): - class Dummy: pass @@ -308,7 +313,8 @@ def test_has_no_jumps(): def test_has_no_narrow_dendritic_section(): - swc_content = StringIO(u""" + swc_content = StringIO( + u""" # index, type, x, y, z, radius, parent 1 1 0 0 0 10. -1 2 2 0 0 0 10. 1 @@ -319,21 +325,22 @@ def test_has_no_narrow_dendritic_section(): 7 3 0 -4 0 5. 6 8 3 6 -4 0 10. 7 9 3 -5 -4 0 10. 7 -""") +""" + ) m = load_morphology(swc_content, reader='swc') - res = morphology_checks.has_no_narrow_neurite_section(m, - dendrite_filter, - radius_threshold=5, - considered_section_min_length=0) + res = morphology_checks.has_no_narrow_neurite_section( + m, dendrite_filter, radius_threshold=5, considered_section_min_length=0 + ) assert res.status - res = morphology_checks.has_no_narrow_neurite_section(m, dendrite_filter, - radius_threshold=7, - considered_section_min_length=0) + res = morphology_checks.has_no_narrow_neurite_section( + m, dendrite_filter, radius_threshold=7, considered_section_min_length=0 + ) assert not res.status - swc_content = StringIO(u""" + swc_content = StringIO( + u""" # index, type, x, y, z, radius, parent 1 1 0 0 0 10. -1 2 2 0 0 0 5 1 # narrow soma @@ -344,12 +351,15 @@ def test_has_no_narrow_dendritic_section(): 7 3 0 -4 0 10. 6 8 3 6 -4 0 10. 7 9 3 -5 -4 0 10. 7 -""") +""" + ) m = load_morphology(swc_content, reader='swc') - res = morphology_checks.has_no_narrow_neurite_section(m, dendrite_filter, - radius_threshold=5, - considered_section_min_length=0) - assert res.status, 'Narrow soma or axons should not raise bad status when checking for narrow dendrites' + res = morphology_checks.has_no_narrow_neurite_section( + m, dendrite_filter, radius_threshold=5, considered_section_min_length=0 + ) + assert ( + res.status + ), 'Narrow soma or axons should not raise bad status when checking for narrow dendrites' def test_has_no_dangling_branch(): @@ -357,15 +367,13 @@ def test_has_no_dangling_branch(): res = morphology_checks.has_no_dangling_branch(m) assert not res.status assert len(res.info) == 1 - assert_array_equal(res.info[0][1][0][COLS.XYZ], - [0., 49., 0.]) + assert_array_equal(res.info[0][1][0][COLS.XYZ], [0.0, 49.0, 0.0]) _, m = _load_morphology('dangling_dendrite.swc') res = morphology_checks.has_no_dangling_branch(m) assert not res.status assert len(res.info) == 1 - assert_array_equal(res.info[0][1][0][COLS.XYZ], - [0., 49., 0.]) + assert_array_equal(res.info[0][1][0][COLS.XYZ], [0.0, 49.0, 0.0]) _, m = _load_morphology('axon-sprout-from-dendrite.asc') res = morphology_checks.has_no_dangling_branch(m) @@ -384,9 +392,10 @@ def test__bool__(): assert c.__bool__() == c.__nonzero__() - def test_has_multifurcation(): - m = load_morphology(StringIO(u""" + m = load_morphology( + StringIO( + u""" ((CellBody) (-1 0 0 2) (1 0 0 2)) ( (Color Blue) (Axon) @@ -407,7 +416,10 @@ def test_has_multifurcation(): (4 13 0 2) ) ) -"""), reader='asc') +""" + ), + reader='asc', + ) check_ = morphology_checks.has_multifurcation(m) assert not check_.status @@ -417,7 +429,9 @@ def test_has_multifurcation(): def test_has_unifurcation(): - m = load_morphology(StringIO(u""" + m = load_morphology( + StringIO( + u""" ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) @@ -432,7 +446,10 @@ def test_has_unifurcation(): (6 5 0 3) ) ) -"""), reader='asc') +""" + ), + reader='asc', + ) check_ = morphology_checks.has_unifurcation(m) assert not check_.status @@ -442,7 +459,8 @@ def test_has_unifurcation(): def test_single_children(): - m = load_morphology(""" + m = load_morphology( + """ ( (Color Blue) (Axon) (0 5 0 2) @@ -454,14 +472,17 @@ def test_single_children(): (6 13 0 2) ) ) -""", "asc") +""", + "asc", + ) result = morphology_checks.has_no_single_children(m) assert result.status is False assert result.info == [0] def test_has_no_back_tracking(): - m = load_morphology(""" + m = load_morphology( + """ ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) @@ -480,7 +501,9 @@ def test_has_no_back_tracking(): (3 -5 0 0.2) (4 -6 0 0.2) )) -""", "asc") +""", + "asc", + ) result = morphology_checks.has_no_back_tracking(m) assert result.status is False info = result.info @@ -491,7 +514,8 @@ def test_has_no_back_tracking(): def test_has_no_overlapping_point(): - m = load_morphology(""" + m = load_morphology( + """ ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) @@ -510,7 +534,9 @@ def test_has_no_overlapping_point(): (0 1 0 0.2) (4 -6 0 0.2) )) -""", "asc") +""", + "asc", + ) result = morphology_checks.has_no_overlapping_point(m) assert result.status is False info = result.info diff --git a/tests/check/test_morphtree.py b/tests/check/test_morphtree.py index a3e1ce2b6..b359db283 100644 --- a/tests/check/test_morphtree.py +++ b/tests/check/test_morphtree.py @@ -40,7 +40,9 @@ def _generate_back_track_tree(n, dev): points = np.array(dev) + np.array([1, 3 if n == 0 else -3, 0]) - m = load_morphology(StringIO(u""" + m = load_morphology( + StringIO( + u""" ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) @@ -59,14 +61,21 @@ def _generate_back_track_tree(n, dev): (3 -5 0 0.2) (4 -6 0 0.2) )) - """.format(*points.tolist())), reader='asc') + """.format( + *points.tolist() + ) + ), + reader='asc', + ) return m def test_is_monotonic(): # tree with decreasing radii - m = load_morphology(StringIO(u""" + m = load_morphology( + StringIO( + u""" ((Dendrite) (0 0 0 1.0) (0 0 0 0.99) @@ -76,11 +85,16 @@ def test_is_monotonic(): | (0 0 0 0.5) (0 0 0 0.2) - ))"""), reader='asc') + ))""" + ), + reader='asc', + ) assert mt.is_monotonic(m.neurites[0], 1e-6) # tree with equal radii - m = load_morphology(StringIO(u""" + m = load_morphology( + StringIO( + u""" ((Dendrite) (0 0 0 1.0) (0 0 0 1.0) @@ -90,11 +104,16 @@ def test_is_monotonic(): | (0 0 0 1.0) (0 0 0 1.0) - ))"""), reader='asc') + ))""" + ), + reader='asc', + ) assert mt.is_monotonic(m.neurites[0], 1e-6) # tree with increasing radii - m = load_morphology(StringIO(u""" + m = load_morphology( + StringIO( + u""" ((Dendrite) (0 0 0 1.0) (0 0 0 1.0) @@ -104,11 +123,16 @@ def test_is_monotonic(): | (0 0 0 0.3) (0 0 0 0.1) - ))"""), reader='asc') + ))""" + ), + reader='asc', + ) assert not mt.is_monotonic(m.neurites[0], 1e-6) # Tree with larger child initial point - m = load_morphology(StringIO(u""" + m = load_morphology( + StringIO( + u""" ((Dendrite) (0 0 0 1.0) (0 0 0 0.75) @@ -118,7 +142,10 @@ def test_is_monotonic(): (0 0 0 0.375) (0 0 0 0.125) (0 0 0 0.625) - ))"""), reader='asc') + ))""" + ), + reader='asc', + ) assert not mt.is_monotonic(m.neurites[0], 1e-6) @@ -154,7 +181,7 @@ def test_back_tracking_segments(): def test_is_back_tracking(): # case 1: a back-track falls directly on a previous node - t = _generate_back_track_tree(1, (0., 0., 0.)) + t = _generate_back_track_tree(1, (0.0, 0.0, 0.0)) assert mt.is_back_tracking(t.neurites[0]) # case 2: a zigzag is close to another segment @@ -166,7 +193,7 @@ def test_is_back_tracking(): assert mt.is_back_tracking(t.neurites[0]) # case 4: a zigzag far from civilization - t = _generate_back_track_tree(1, (10., -10., 10.)) + t = _generate_back_track_tree(1, (10.0, -10.0, 10.0)) assert not mt.is_back_tracking(t.neurites[0]) # case 5: a zigzag on another section diff --git a/tests/check/test_runner.py b/tests/check/test_runner.py index 6c7e8336e..b6b9fa825 100644 --- a/tests/check/test_runner.py +++ b/tests/check/test_runner.py @@ -44,14 +44,14 @@ 'has_all_nonzero_segment_lengths', 'has_all_nonzero_section_lengths', 'has_all_nonzero_neurite_radii', - 'has_nonzero_soma_radius' + 'has_nonzero_soma_radius', ] }, 'options': { 'has_nonzero_soma_radius': 0.0, "has_all_nonzero_neurite_radii": 0.007, "has_all_nonzero_segment_lengths": 0.01, - "has_all_nonzero_section_lengths": [0.01] + "has_all_nonzero_section_lengths": [0.01], }, } CONFIG_COLOR = copy(CONFIG) @@ -63,102 +63,108 @@ def _run_test(path, ref, config=CONFIG, should_pass=False): and compare the results to 'ref'""" results = CheckRunner(config).run(path) assert dict(results['files'][str(path)]) == ref - assert (results['STATUS'] == - ("PASS" if should_pass else "FAIL")) - -ref = dict([ - ("Has basal dendrite", True), - ("Has axon", True), - ("Has apical dendrite", True), - ("Has all nonzero segment lengths", True), - ("Has all nonzero section lengths", True), - ("Has all nonzero neurite radii", True), - ("Has nonzero soma radius", True), - ("ALL", True) -]) + assert results['STATUS'] == ("PASS" if should_pass else "FAIL") + + +ref = dict( + [ + ("Has basal dendrite", True), + ("Has axon", True), + ("Has apical dendrite", True), + ("Has all nonzero segment lengths", True), + ("Has all nonzero section lengths", True), + ("Has all nonzero neurite radii", True), + ("Has nonzero soma radius", True), + ("ALL", True), + ] +) + def test_ok_morphology(): - _run_test(SWC_PATH / 'Neuron.swc', - ref, - should_pass=True) + _run_test(SWC_PATH / 'Neuron.swc', ref, should_pass=True) + def test_ok_morphology_color(): - _run_test(SWC_PATH / 'Neuron.swc', - ref, - CONFIG_COLOR, - should_pass=True) + _run_test(SWC_PATH / 'Neuron.swc', ref, CONFIG_COLOR, should_pass=True) def test_zero_length_sections_morphology(): - expected = dict([ - ("Has basal dendrite", True), - ("Has axon", True), - ("Has apical dendrite", True), - ("Has all nonzero segment lengths", False), - ("Has all nonzero section lengths", False), - ("Has all nonzero neurite radii", True), - ("Has nonzero soma radius", True), - ("ALL", False) - ]) - _run_test(SWC_PATH / 'Neuron_zero_length_sections.swc', - expected) + expected = dict( + [ + ("Has basal dendrite", True), + ("Has axon", True), + ("Has apical dendrite", True), + ("Has all nonzero segment lengths", False), + ("Has all nonzero section lengths", False), + ("Has all nonzero neurite radii", True), + ("Has nonzero soma radius", True), + ("ALL", False), + ] + ) + _run_test(SWC_PATH / 'Neuron_zero_length_sections.swc', expected) def test_single_apical_morphology(): - expected = dict([ - ("Has basal dendrite", False), - ("Has axon", False), - ("Has apical dendrite", True), - ("Has all nonzero segment lengths", False), - ("Has all nonzero section lengths", True), - ("Has all nonzero neurite radii", True), - ("Has nonzero soma radius", True), - ("ALL", False) - ]) - _run_test(SWC_PATH / 'Single_apical.swc', - expected) + expected = dict( + [ + ("Has basal dendrite", False), + ("Has axon", False), + ("Has apical dendrite", True), + ("Has all nonzero segment lengths", False), + ("Has all nonzero section lengths", True), + ("Has all nonzero neurite radii", True), + ("Has nonzero soma radius", True), + ("ALL", False), + ] + ) + _run_test(SWC_PATH / 'Single_apical.swc', expected) def test_single_basal_morphology(): expected = dict( - ([ - ("Has basal dendrite", True), - ("Has axon", False), - ("Has apical dendrite", False), - ("Has all nonzero segment lengths", False), - ("Has all nonzero section lengths", True), - ("Has all nonzero neurite radii", True), - ("Has nonzero soma radius", True), - ("ALL", False) - ])) - _run_test(SWC_PATH / 'Single_basal.swc', - expected) + ( + [ + ("Has basal dendrite", True), + ("Has axon", False), + ("Has apical dendrite", False), + ("Has all nonzero segment lengths", False), + ("Has all nonzero section lengths", True), + ("Has all nonzero neurite radii", True), + ("Has nonzero soma radius", True), + ("ALL", False), + ] + ) + ) + _run_test(SWC_PATH / 'Single_basal.swc', expected) def test_single_axon_morphology(): - expected = dict([ - ("Has basal dendrite", False), - ("Has axon", True), - ("Has apical dendrite", False), - ("Has all nonzero segment lengths", False), - ("Has all nonzero section lengths", True), - ("Has all nonzero neurite radii", True), - ("Has nonzero soma radius", True), - ("ALL", False) - ]) - _run_test(SWC_PATH / 'Single_axon.swc', - expected) + expected = dict( + [ + ("Has basal dendrite", False), + ("Has axon", True), + ("Has apical dendrite", False), + ("Has all nonzero segment lengths", False), + ("Has all nonzero section lengths", True), + ("Has all nonzero neurite radii", True), + ("Has nonzero soma radius", True), + ("ALL", False), + ] + ) + _run_test(SWC_PATH / 'Single_axon.swc', expected) def test_single_apical_no_soma(): - expected = {'ALL': False, - 'Has all nonzero neurite radii': True, - 'Has all nonzero section lengths': True, - 'Has all nonzero segment lengths': False, - 'Has apical dendrite': True, - 'Has axon': False, - 'Has basal dendrite': False, - 'Has nonzero soma radius': False} + expected = { + 'ALL': False, + 'Has all nonzero neurite radii': True, + 'Has all nonzero section lengths': True, + 'Has all nonzero segment lengths': False, + 'Has apical dendrite': True, + 'Has axon': False, + 'Has basal dendrite': False, + 'Has nonzero soma radius': False, + } _run_test(SWC_PATH / 'Single_apical_no_soma.swc', expected) @@ -182,13 +188,13 @@ def test__sanitize_config(): # creates minimal config new_config = CheckRunner._sanitize_config({'checks': {}}) - assert new_config == {'checks': - { - 'morphology_checks': [], - }, - 'options': {}, - 'color': False, - } + assert new_config == { + 'checks': { + 'morphology_checks': [], + }, + 'options': {}, + 'color': False, + } # makes no changes to already filled out config new_config = CheckRunner._sanitize_config(CONFIG) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..a1da9db4b --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,92 @@ +"""Configuration for the pytest test suite.""" + +import warnings +from pathlib import Path + +import morphio +import neurom as nm +import pytest + + +def _load_morph_no_warning(filename): + all_warnings = [ + j + for j in [getattr(morphio.Warning, i) for i in dir(morphio.Warning)] + if isinstance(j, morphio._morphio.Warning) + ] + morphio.set_ignored_warning(all_warnings, True) + morph = nm.load_morphology(filename) + morphio.set_ignored_warning(all_warnings, False) + return morph + + +@pytest.fixture +def DATA_PATH(): + return Path(__file__).parent / "data" + + +@pytest.fixture +def H5_PATH(DATA_PATH): + return DATA_PATH / "h5" / "v1" + + +@pytest.fixture +def ASC_PATH(DATA_PATH): + return DATA_PATH / "neurolucida" + + +@pytest.fixture +def SWC_PATH(DATA_PATH): + return DATA_PATH / "swc" + + +@pytest.fixture +def SIMPLE_MORPHOLOGY(SWC_PATH): + return _load_morph_no_warning(SWC_PATH / "simple.swc") + + +@pytest.fixture +def SIMPLE_TRUNK_MORPHOLOGY(SWC_PATH): + return _load_morph_no_warning(SWC_PATH / "simple_trunk.swc") + + +@pytest.fixture +def SWC_MORPHOLOGY(SWC_PATH): + return _load_morph_no_warning(SWC_PATH / "Neuron.swc") + + +@pytest.fixture +def H5_MORPHOLOGY(H5_PATH): + return _load_morph_no_warning(H5_PATH / "Neuron.h5") + + +@pytest.fixture +def SWC_MORPHOLOGY_3PT(SWC_PATH): + return _load_morph_no_warning(SWC_PATH / 'soma' / 'three_pt_soma.swc') + + +@pytest.fixture +def MORPHOLOGY(SWC_PATH): + return _load_morph_no_warning(SWC_PATH / "test_morph.swc") + + +@pytest.fixture +def NEURITE(MORPHOLOGY): + return MORPHOLOGY.neurites[0] + + +@pytest.fixture +def SECTION(NEURITE): + return NEURITE.sections[0] + + +@pytest.fixture +def NRN_FILES(DATA_PATH): + return [ + DATA_PATH / "h5/v1" / f for f in ("Neuron.h5", "Neuron_2_branch.h5", "bio_neuron-001.h5") + ] + + +@pytest.fixture +def POP(SIMPLE_MORPHOLOGY): + return nm.load_morphologies([SIMPLE_MORPHOLOGY, SIMPLE_MORPHOLOGY]) diff --git a/tests/core/__init__.py b/tests/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/test_iter.py b/tests/core/test_iter.py index d7c86d08c..0118e05af 100644 --- a/tests/core/test_iter.py +++ b/tests/core/test_iter.py @@ -39,10 +39,12 @@ NRN1 = load_morphology(DATA_PATH / 'swc/Neuron.swc') -NEURONS = [NRN1, - load_morphology(DATA_PATH / 'swc/Single_basal.swc'), - load_morphology(DATA_PATH / 'swc/Neuron_small_radius.swc'), - load_morphology(DATA_PATH / 'swc/Neuron_3_random_walker_branches.swc'), ] +NEURONS = [ + NRN1, + load_morphology(DATA_PATH / 'swc/Single_basal.swc'), + load_morphology(DATA_PATH / 'swc/Neuron_small_radius.swc'), + load_morphology(DATA_PATH / 'swc/Neuron_3_random_walker_branches.swc'), +] TOT_NEURITES = sum(len(N.neurites) for N in NEURONS) SIMPLE = load_morphology(DATA_PATH / 'swc/simple.swc') @@ -66,7 +68,6 @@ def test_iter_neurites_nrn_order(): def test_iter_neurites_filter(): - for ntyp in nm.NEURITE_TYPES: a = [n for n in POP.neurites if n.type == ntyp] b = [n for n in iter_neurites(POP, filt=lambda n: n.type == ntyp)] @@ -74,16 +75,18 @@ def test_iter_neurites_filter(): def test_iter_neurites_mapping(): - - n = [n for n in iter_neurites(POP, mapfun=lambda n: len(n.points))] + n = [n for n in iter_neurites(POP, mapfun=lambda n, section_type: len(n.points))] ref = [211, 211, 211, 211, 211, 211, 211, 211, 211, 500, 500, 500] assert n == ref def test_iter_neurites_filter_mapping(): - n = [n for n in iter_neurites(POP, - mapfun=lambda n: len(n.points), - filt=lambda n: len(n.points) > 250)] + n = [ + n + for n in iter_neurites( + POP, mapfun=lambda n, section_type: len(n.points), filt=lambda n: len(n.points) > 250 + ) + ] ref = [500, 500, 500] assert n == ref @@ -98,49 +101,90 @@ def test_iter_population(): def test_iter_sections_default(): + ref = [s.id for n in POP.neurites for s in n.sections] + assert ref == [n.id for n in iter_sections(POP)] - ref = [s for n in POP.neurites for s in n.iter_sections()] - assert (ref == - [n for n in iter_sections(POP)]) def test_iter_sections_default_pop(): - ref = [s.id for n in POP.neurites for s in n.iter_sections()] + ref = [s.id for n in POP.neurites for s in n.sections] assert ref == [n.id for n in iter_sections(POP)] def test_iter_sections_filter(): for ntyp in nm.NEURITE_TYPES: - a = [s.id for n in filter(lambda nn: nn.type == ntyp, POP.neurites) - for s in n.iter_sections()] + a = [s.id for n in filter(lambda nn: nn.type == ntyp, POP.neurites) for s in n.sections] b = [n.id for n in iter_sections(POP, neurite_filter=lambda n: n.type == ntyp)] assert a == b + def test_iter_sections_inrnorder(): - assert ([s.id for n in POP.neurites for s in n.iter_sections(neurite_order=NeuriteIter.NRN)] == - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 0, 1, 2]) + assert [s.id for s in iter_sections(POP, neurite_order=NeuriteIter.NRN)] == [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 0, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 0, 21, 22, 23, 24, 25, 26, 27, 28, + 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, + 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, + 60, 61, 62, 1, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, + 82, 83, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, + 2, + ] # fmt: skip + def test_iter_sections_ipreorder(): - assert ([s.id for n in POP.neurites for s in n.iter_sections(Section.ipreorder)] == - [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 0, 1, 2]) + assert [s.id for n in POP.neurites for s in iter_sections(n, Section.ipreorder)] == [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, + 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, + 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, 20, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, + 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 0, + 1, 2, + ] # fmt: skip + def test_iter_sections_ipostorder(): - assert ([s.id for n in POP.neurites for s in n.iter_sections(Section.ipostorder)] == - [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 18, 16, 14, 12, 10, 8, 6, 4, 2, 0, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 41, 39, 37, 35, 33, 31, 29, 27, 25, 23, 21, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 62, 60, 58, 56, 54, 52, 50, 48, 46, 44, 42, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 83, 81, 79, 77, 75, 73, 71, 69, 67, 65, 63, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 18, 16, 14, 12, 10, 8, 6, 4, 2, 0, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 18, 16, 14, 12, 10, 8, 6, 4, 2, 0, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 41, 39, 37, 35, 33, 31, 29, 27, 25, 23, 21, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 62, 60, 58, 56, 54, 52, 50, 48, 46, 44, 42, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 83, 81, 79, 77, 75, 73, 71, 69, 67, 65, 63, 0, 1, 2]) + assert [s.id for n in POP.neurites for s in iter_sections(n, Section.ipostorder)] == [ + 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 18, 16, 14, 12, 10, 8, 6, 4, 2, 0, 22, 24, 26, 28, + 30, 32, 34, 36, 38, 40, 41, 39, 37, 35, 33, 31, 29, 27, 25, 23, 21, 43, 45, 47, 49, 51, 53, + 55, 57, 59, 61, 62, 60, 58, 56, 54, 52, 50, 48, 46, 44, 42, 64, 66, 68, 70, 72, 74, 76, 78, + 80, 82, 83, 81, 79, 77, 75, 73, 71, 69, 67, 65, 63, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, + 18, 16, 14, 12, 10, 8, 6, 4, 2, 0, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 18, 16, 14, 12, + 10, 8, 6, 4, 2, 0, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 41, 39, 37, 35, 33, 31, 29, 27, + 25, 23, 21, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 62, 60, 58, 56, 54, 52, 50, 48, 46, 44, + 42, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 83, 81, 79, 77, 75, 73, 71, 69, 67, 65, 63, 0, + 1, 2, + ] # fmt: skip def test_iter_sections_ibifurcation(): - assert ([s.id for n in POP.neurites for s in n.iter_sections(Section.ibifurcation_point)] == - [0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81]) + assert [s.id for n in POP.neurites for s in iter_sections(n, Section.ibifurcation_point)] == [ + 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 42, 44, 46, 48, + 50, 52, 54, 56, 58, 60, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 0, 2, 4, 6, 8, 10, 12, 14, + 16, 18, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 42, 44, + 46, 48, 50, 52, 54, 56, 58, 60, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, + ] # fmt: skip def test_iter_sections_iforking(): - assert ([s.id for n in POP.neurites for s in n.iter_sections(Section.iforking_point)] == - [0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81]) + assert [s.id for n in POP.neurites for s in iter_sections(n, Section.iforking_point)] == [ + 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 42, 44, 46, 48, + 50, 52, 54, 56, 58, 60, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 0, 2, 4, 6, 8, 10, 12, 14, + 16, 18, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 42, 44, + 46, 48, 50, 52, 54, 56, 58, 60, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, + ] # fmt: skip def test_iter_sections_ileaf(): - assert ([s.id for n in POP.neurites for s in n.iter_sections(Section.ileaf)] == - [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 83, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 83, 0, 1, 2]) + assert [s.id for n in POP.neurites for s in iter_sections(n, Section.ileaf)] == [ + 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 41, 43, 45, + 47, 49, 51, 53, 55, 57, 59, 61, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 83, 1, 3, 5, 7, + 9, 11, 13, 15, 17, 19, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 20, 22, 24, 26, 28, 30, 32, + 34, 36, 38, 40, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 62, 64, 66, 68, 70, 72, 74, 76, + 78, 80, 82, 83, 0, 1, 2, + ] # fmt: skip def test_iter_section_morph(): @@ -159,7 +203,57 @@ def test_iter_section_morph(): def test_iter_segments_morph(): ref = list(iter_segments(SIMPLE)) - assert len(ref) == 6 + assert_array_equal(SIMPLE.segments, ref) + assert_array_equal( + ref, + [ + [[0.0, 0.0, 0.0, 1.0], [0.0, 5.0, 0.0, 1.0]], + [[0.0, 5.0, 0.0, 1.0], [-5.0, 5.0, 0.0, 0.0]], + [[0.0, 5.0, 0.0, 1.0], [6.0, 5.0, 0.0, 0.0]], + [[0.0, 0.0, 0.0, 1.0], [0.0, -4.0, 0.0, 1.0]], + [[0.0, -4.0, 0.0, 1.0], [6.0, -4.0, 0.0, 0.0]], + [[0.0, -4.0, 0.0, 1.0], [-5.0, -4.0, 0.0, 0.0]], + ], + ) + assert_array_equal( + list(iter_segments(SIMPLE, neurite_filter=lambda n: n.type == nm.BASAL_DENDRITE)), + [ + [[0.0, 0.0, 0.0, 1.0], [0.0, 5.0, 0.0, 1.0]], + [[0.0, 5.0, 0.0, 1.0], [-5.0, 5.0, 0.0, 0.0]], + [[0.0, 5.0, 0.0, 1.0], [6.0, 5.0, 0.0, 0.0]], + ], + ) + assert_array_equal( + list(iter_segments(SIMPLE, section_filter=lambda sec: sec.type == nm.BASAL_DENDRITE)), + [ + [[0.0, 0.0, 0.0, 1.0], [0.0, 5.0, 0.0, 1.0]], + [[0.0, 5.0, 0.0, 1.0], [-5.0, 5.0, 0.0, 0.0]], + [[0.0, 5.0, 0.0, 1.0], [6.0, 5.0, 0.0, 0.0]], + ], + ) + assert_array_equal( + list( + iter_segments( + SIMPLE, + neurite_filter=lambda n: n.type == nm.AXON, + section_filter=lambda sec: sec.type == nm.BASAL_DENDRITE, + ) + ), + [], + ) + + ref = list(iter_segments(SIMPLE, section_iterator=Section.ipostorder)) + assert_array_equal( + ref, + [ + [[0.0, 5.0, 0.0, 1.0], [-5.0, 5.0, 0.0, 0.0]], + [[0.0, 5.0, 0.0, 1.0], [6.0, 5.0, 0.0, 0.0]], + [[0.0, 0.0, 0.0, 1.0], [0.0, 5.0, 0.0, 1.0]], + [[0.0, -4.0, 0.0, 1.0], [6.0, -4.0, 0.0, 0.0]], + [[0.0, -4.0, 0.0, 1.0], [-5.0, -4.0, 0.0, 0.0]], + [[0.0, 0.0, 0.0, 1.0], [0.0, -4.0, 0.0, 1.0]], + ], + ) ref = list(iter_segments(SIMPLE, neurite_filter=lambda n: n.type == nm.AXON)) assert len(ref) == 3 @@ -184,7 +278,6 @@ def test_iter_segments_morph(): def test_iter_segments_pop(): - ref = list(iter_segments(POP)) assert len(ref) == 3387 @@ -199,7 +292,9 @@ def test_iter_segments_pop(): def test_iter_segments_section(): - sec = load_morphology(StringIO(u""" + sec = load_morphology( + StringIO( + u""" ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) @@ -207,10 +302,38 @@ def test_iter_segments_section(): (5 6 7 16) (8 7 6 10) (4 3 2 2)) - """), reader='asc').sections[0] - ref = [[p1[COLS.XYZR].tolist(), p2[COLS.XYZR].tolist()] - for p1, p2 in iter_segments(sec)] - - assert_array_equal(ref, [[[1, 2, 3, 4], [5, 6, 7, 8]], - [[5, 6, 7, 8], [8, 7, 6, 5]], - [[8, 7, 6, 5], [4, 3, 2, 1]]]) + """ + ), + reader='asc', + ).sections[0] + ref = [[p1[COLS.XYZR].tolist(), p2[COLS.XYZR].tolist()] for p1, p2 in iter_segments(sec)] + + assert_array_equal( + ref, + [[[1, 2, 3, 4], [5, 6, 7, 8]], [[5, 6, 7, 8], [8, 7, 6, 5]], [[8, 7, 6, 5], [4, 3, 2, 1]]], + ) + assert_array_equal(ref, sec.segments) + + +def test_iter_segments_neurite(): + morph = load_morphology( + StringIO( + u""" + ((CellBody) (-1 0 0 2) (1 0 0 2)) + + ((Dendrite) + (1 2 3 8) + (5 6 7 16) + (8 7 6 10) + (4 3 2 2)) + """ + ), + reader='asc', + ) + neurite = morph.neurites[0] + ref = list(iter_segments(neurite)) + assert_array_equal( + ref, + [[[1, 2, 3, 4], [5, 6, 7, 8]], [[5, 6, 7, 8], [8, 7, 6, 5]], [[8, 7, 6, 5], [4, 3, 2, 1]]], + ) + assert_array_equal(neurite.segments, ref) diff --git a/tests/core/test_neurite.py b/tests/core/test_neurite.py index 234530daf..a32fed004 100644 --- a/tests/core/test_neurite.py +++ b/tests/core/test_neurite.py @@ -38,7 +38,7 @@ m = nm.load_morphology(SWC_PATH / 'point_soma_single_neurite.swc') ROOT_NODE = m.neurites[0].morphio_root_node -RADIUS = .5 +RADIUS = 0.5 REF_LEN = 3 @@ -72,4 +72,4 @@ def test_str(): def test_neurite_hash(): nrt = Neurite(ROOT_NODE) - assert hash(nrt) == hash((nrt.type, nrt.root_node)) + assert hash(nrt) == hash((nrt.type, nrt.root_node, nrt.process_subtrees)) diff --git a/tests/core/test_neuron.py b/tests/core/test_neuron.py index 4e1fbf73f..6fc9efe49 100644 --- a/tests/core/test_neuron.py +++ b/tests/core/test_neuron.py @@ -29,11 +29,13 @@ from copy import copy, deepcopy from pathlib import Path +import pytest import neurom as nm import numpy as np import morphio from neurom.core.morphology import Morphology, graft_morphology, iter_segments from numpy.testing import assert_array_equal +from neurom.exceptions import NeuroMError SWC_PATH = Path(__file__).parent.parent / 'data/swc/' @@ -49,47 +51,36 @@ def test_load_morphology_pathlib(): def test_load_morphology_from_other_morphologies(): filename = SWC_PATH / 'simple.swc' - expected_points = [[ 0., 0., 0., 1.], - [ 0., 5., 0., 1.], - [ 0., 5., 0., 1.], - [-5., 5., 0., 0.], - [ 0., 5., 0., 1.], - [ 6., 5., 0., 0.], - [ 0., 0., 0., 1.], - [ 0., -4., 0., 1.], - [ 0., -4., 0., 1.], - [ 6., -4., 0., 0.], - [ 0., -4., 0., 1.], - [-5., -4., 0., 0.]] - - assert_array_equal(nm.load_morphology(nm.load_morphology(filename)).points, - expected_points) - - assert_array_equal(nm.load_morphology(Morphology(filename)).points, - expected_points) - - assert_array_equal(nm.load_morphology(morphio.Morphology(filename)).points, - expected_points) + expected_points = [ + [0.0, 0.0, 0.0, 1.0], + [0.0, 5.0, 0.0, 1.0], + [0.0, 5.0, 0.0, 1.0], + [-5.0, 5.0, 0.0, 0.0], + [0.0, 5.0, 0.0, 1.0], + [6.0, 5.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0], + [0.0, -4.0, 0.0, 1.0], + [0.0, -4.0, 0.0, 1.0], + [6.0, -4.0, 0.0, 0.0], + [0.0, -4.0, 0.0, 1.0], + [-5.0, -4.0, 0.0, 0.0], + ] + + assert_array_equal(nm.load_morphology(nm.load_morphology(filename)).points, expected_points) + assert_array_equal(nm.load_morphology(morphio.Morphology(filename)).points, expected_points) def test_for_morphio(): Morphology(morphio.mut.Morphology()) morphio_m = morphio.mut.Morphology() - morphio_m.soma.points = [[0,0,0], [1,1,1], [2,2,2]] + morphio_m.soma.points = [[0, 0, 0], [1, 1, 1], [2, 2, 2]] morphio_m.soma.diameters = [1, 1, 1] neurom_m = Morphology(morphio_m) - assert_array_equal(neurom_m.soma.points, - [[0., 0., 0., 0.5], - [1., 1., 1., 0.5], - [2., 2., 2., 0.5]]) - - neurom_m.soma.points = [[1, 1, 1, 1], - [2, 2, 2, 2]] - assert_array_equal(neurom_m.soma.points, - [[1, 1, 1, 1], - [2, 2, 2, 2]]) + assert_array_equal( + neurom_m.soma.points, [[0.0, 0.0, 0.0, 0.5], [1.0, 1.0, 1.0, 0.5], [2.0, 2.0, 2.0, 0.5]] + ) def _check_cloned_morphology(m, m2): @@ -116,10 +107,6 @@ def _check_cloned_morphology(m, m2): for neu1, neu2 in zip(m.neurites, m2.neurites): assert neu1 is not neu2 - # check if changes are propagated between morphs - m2.soma.radius = 10. - assert m.soma.radius != m2.soma.radius - def test_copy(): m = nm.load_morphology(SWC_PATH / 'simple.swc') @@ -131,6 +118,15 @@ def test_deepcopy(): _check_cloned_morphology(m, deepcopy(m)) +def test_eq(): + m1 = nm.load_morphology(SWC_PATH / 'simple.swc').neurites[1] + m2 = nm.load_morphology(SWC_PATH / 'simple.swc').neurites[1] + assert m1 == m2 + + m1.process_subtrees = True + assert m1 != m2 + + def test_graft_morphology(): m = nm.load_morphology(SWC_PATH / 'simple.swc') basal_dendrite = m.neurites[0] @@ -143,3 +139,8 @@ def test_str(): n = nm.load_morphology(SWC_PATH / 'simple.swc') assert 'Morphology' in str(n) assert 'Section' in str(n.neurites[0].root_node) + + +def test_morphology_raises_wrong_argument(): + with pytest.raises(NeuroMError, match="Expected morphio Morphology object but got: my-path"): + Morphology("my-path") diff --git a/tests/core/test_population.py b/tests/core/test_population.py index e52cf298d..e2a5f51f3 100644 --- a/tests/core/test_population.py +++ b/tests/core/test_population.py @@ -36,14 +36,15 @@ DATA_PATH = Path(__file__).parent.parent / 'data' -FILES = [DATA_PATH / 'swc/Neuron.swc', - DATA_PATH / 'swc/Single_basal.swc', - DATA_PATH / 'swc/Neuron_small_radius.swc'] +FILES = [ + DATA_PATH / 'swc/Neuron.swc', + DATA_PATH / 'swc/Single_basal.swc', + DATA_PATH / 'swc/Neuron_small_radius.swc', +] NEURONS = [load_morphology(f) for f in FILES] TOT_NEURITES = sum(len(N.neurites) for N in NEURONS) -populations = [Population(NEURONS, name='foo'), - Population(FILES, name='foo', cache=True)] +populations = [Population(NEURONS, name='foo'), Population(FILES, name='foo', cache=True)] @pytest.mark.parametrize('pop', populations) @@ -57,7 +58,8 @@ def test_names(pop): def test_indexing(): pop = populations[0] for i, n in enumerate(NEURONS): - assert n is pop[i] + assert n.name == pop[i].name + assert (n.points == pop[i].points).all() with pytest.raises(ValueError, match='no 10 index'): pop[10] @@ -68,22 +70,53 @@ def test_cache(): assert isinstance(n, Morphology) +@pytest.mark.parametrize("cache", [True, False]) +def test_reset_cache(cache): + pop = Population(FILES, cache=cache, process_subtrees=True) + + assert pop._process_subtrees is True + for n in pop: + assert isinstance(n, Morphology) + assert n.process_subtrees is True + + pop.process_subtrees = False + assert pop._process_subtrees is False + for n in pop: + assert isinstance(n, Morphology) + assert n.process_subtrees is False + + mixed_pop = Population(FILES + NEURONS, cache=cache, process_subtrees=True) + assert mixed_pop._process_subtrees is True + for n in mixed_pop: + assert isinstance(n, Morphology) + assert n.process_subtrees is True + + mixed_pop.process_subtrees = False + assert mixed_pop._process_subtrees is False + for n in mixed_pop: + assert isinstance(n, Morphology) + assert n.process_subtrees is False + + def test_double_indexing(): pop = populations[0] for i, n in enumerate(NEURONS): - assert n is pop[i] + assert n.name == pop[i].name + assert (n.points == pop[i].points).all() # second time to assure that generator is available again for i, n in enumerate(NEURONS): - assert n is pop[i] + assert n.name == pop[i].name + assert (n.points == pop[i].points).all() def test_iterating(): pop = populations[0] for a, b in zip(NEURONS, pop): - assert a is b + assert a.name == b.name + assert (a.points == b.points).all() for a, b in zip(NEURONS, pop.somata): - assert a.soma is b + assert (a.soma.points == b.points).all() @pytest.mark.parametrize('pop', populations) @@ -94,7 +127,8 @@ def test_len(pop): def test_getitem(): pop = populations[0] for i in range(len(NEURONS)): - assert pop[i] is NEURONS[i] + assert pop[i].name == NEURONS[i].name + assert (pop[i].points == NEURONS[i].points).all() @pytest.mark.parametrize('pop', populations) diff --git a/tests/core/test_section.py b/tests/core/test_section.py index 25fc48179..5f7717a10 100644 --- a/tests/core/test_section.py +++ b/tests/core/test_section.py @@ -45,62 +45,35 @@ def test_section_base_func(): assert_almost_equal(section.area, 31.41592653589793) assert_almost_equal(section.volume, 15.707963267948964) + # __nonzero__ + assert section + def test_section_tree(): m = nm.load_morphology(str(SWC_PATH / 'simple.swc')) assert m.sections[0].parent is None - assert m.sections[0] == m.sections[0].children[0].parent - - assert_array_equal([s.is_root() for s in m.sections], - [True, False, False, True, False, False]) - assert_array_equal([s.is_leaf() for s in m.sections], - [False, True, True, False, True, True]) - assert_array_equal([s.is_forking_point() for s in m.sections], - [True, False, False, True, False, False]) - assert_array_equal([s.is_bifurcation_point() for s in m.sections], - [True, False, False, True, False, False]) - assert_array_equal([s.id for s in m.neurites[0].root_node.ipreorder()], - [0, 1, 2]) - assert_array_equal([s.id for s in m.neurites[0].root_node.ipostorder()], - [1, 2, 0]) - assert_array_equal([s.id for s in m.neurites[0].root_node.iupstream()], - [0]) - assert_array_equal([s.id for s in m.sections[2].iupstream()], - [2, 0]) - assert_array_equal([s.id for s in m.neurites[0].root_node.ileaf()], - [1, 2]) - assert_array_equal([s.id for s in m.sections[2].ileaf()], - [2]) - assert_array_equal([s.id for s in m.neurites[0].root_node.iforking_point()], - [0]) - assert_array_equal([s.id for s in m.neurites[0].root_node.ibifurcation_point()], - [0]) - -def test_append_section(): - n = nm.load_morphology(SWC_PATH / 'simple.swc') - s = n.sections[0] - - s.append_section(n.sections[-1]) - assert len(s.children) == 3 - assert s.children[-1].id == 6 - assert s.children[-1].type == n.sections[-1].type - - s.append_section(n.sections[-1].morphio_section) - assert len(s.children) == 4 - assert s.children[-1].id == 7 - assert s.children[-1].type == n.sections[-1].type + assert m.sections[0] == m.sections[0] + assert m.sections[0] == m.sections[0].children[0].parent -def test_set_points(): - n = nm.load_morphology(SWC_PATH / 'simple.swc') - s = n.sections[0] - s.points = np.array([ - [0, 5, 0, 2], - [0, 7, 0, 2], - ]) - assert_array_equal(s.points, np.array([ - [0, 5, 0, 2], - [0, 7, 0, 2], - ])) + assert_array_equal([s.is_root() for s in m.sections], [True, False, False, True, False, False]) + assert_array_equal([s.is_leaf() for s in m.sections], [False, True, True, False, True, True]) + assert_array_equal( + [s.is_forking_point() for s in m.sections], [True, False, False, True, False, False] + ) + assert_array_equal( + [s.is_bifurcation_point() for s in m.sections], [True, False, False, True, False, False] + ) + assert_array_equal([s.id for s in m.neurites[0].root_node.ipreorder()], [0, 1, 2]) + assert_array_equal([s.id for s in m.neurites[0].root_node.ipostorder()], [1, 2, 0]) + assert_array_equal([s.id for s in m.neurites[0].root_node.iupstream()], [0]) + assert_array_equal([s.id for s in m.sections[2].iupstream()], [2, 0]) + assert_array_equal([s.id for s in m.sections[2].iupstream(stop_node=m.sections[2])], [2]) + # if a stop node that is not upstream is given, it should stop at root + assert_array_equal([s.id for s in m.sections[2].iupstream(stop_node=m.sections[1])], [2, 0]) + assert_array_equal([s.id for s in m.neurites[0].root_node.ileaf()], [1, 2]) + assert_array_equal([s.id for s in m.sections[2].ileaf()], [2]) + assert_array_equal([s.id for s in m.neurites[0].root_node.iforking_point()], [0]) + assert_array_equal([s.id for s in m.neurites[0].root_node.ibifurcation_point()], [0]) diff --git a/tests/core/test_soma.py b/tests/core/test_soma.py index 2dd292aa9..cef38c126 100644 --- a/tests/core/test_soma.py +++ b/tests/core/test_soma.py @@ -32,6 +32,7 @@ from unittest.mock import Mock import numpy as np +import morphio from morphio import MorphioError, SomaError, set_raise_warnings from neurom import load_morphology from neurom.core import soma @@ -48,10 +49,15 @@ def test_no_soma_builder(): def test_no_soma(): - sm = load_morphology(StringIO(u""" + sm = load_morphology( + StringIO( + u""" ((Dendrite) (0 0 0 1.0) - (0 0 0 2.0))"""), reader='asc').soma + (0 0 0 2.0))""" + ), + reader='asc', + ).soma assert sm.center is None assert sm.points.shape == (0, 4) @@ -62,14 +68,20 @@ def test_Soma_SinglePoint(): assert isinstance(sm, soma.SomaSinglePoint) assert list(sm.center) == [11, 22, 33] assert sm.radius == 44 + assert_almost_equal(sm.area, 24328.49350939936, decimal=5) def test_Soma_contour(): with warnings.catch_warnings(record=True): - sm = load_morphology(StringIO(u"""((CellBody) + sm = load_morphology( + StringIO( + u"""((CellBody) (0 0 0 44) (0 -44 0 44) - (0 +44 0 44))"""), reader='asc').soma + (0 +44 0 44))""" + ), + reader='asc', + ).soma assert 'SomaSimpleContour' in str(sm) assert isinstance(sm, soma.SomaSimpleContour) @@ -78,9 +90,14 @@ def test_Soma_contour(): def test_Soma_ThreePointCylinder(): - sm = load_morphology(StringIO(u"""1 1 0 0 0 44 -1 + sm = load_morphology( + StringIO( + u"""1 1 0 0 0 44 -1 2 1 0 -44 0 44 1 - 3 1 0 +44 0 44 1"""), reader='swc').soma + 3 1 0 +44 0 44 1""" + ), + reader='swc', + ).soma assert 'SomaNeuromorphoThreePointCylinders' in str(sm) assert isinstance(sm, soma.SomaNeuromorphoThreePointCylinders) assert list(sm.center) == [0, 0, 0] @@ -89,24 +106,37 @@ def test_Soma_ThreePointCylinder(): def test_Soma_ThreePointCylinder_invalid_radius(): with warnings.catch_warnings(record=True) as w_list: - load_morphology(StringIO(u""" + load_morphology( + StringIO( + u""" 1 1 0 0 0 1e-8 -1 2 1 0 -1e-8 0 1e-8 1 - 3 1 0 +1e-8 0 1e-8 1"""), reader='swc').soma + 3 1 0 +1e-8 0 1e-8 1""" + ), + reader='swc', + ).soma assert 'Zero radius for SomaNeuromorphoThreePointCylinders' in str(w_list[0]) def test_Soma_ThreePointCylinder_invalid(): try: set_raise_warnings(True) - with pytest.raises(MorphioError, - match=('Warning: the soma does not conform the three point soma spec|' # morphio < 3.3.7 - 'The non-constant columns is not offset by' # morphio >= 3.3.7 - )): - load_morphology(StringIO(u""" + with pytest.raises( + MorphioError, + match=( + 'Warning: the soma does not conform the three point soma spec|' # morphio < 3.3.7 + 'The non-constant columns is not offset by' # morphio >= 3.3.7 + ), + ): + load_morphology( + StringIO( + u""" 1 1 0 0 0 1e-4 -1 2 1 0 -44 0 1e-4 1 - 3 1 0 +44 0 1e-4 1"""), reader='swc') + 3 1 0 +44 0 1e-4 1""" + ), + reader='swc', + ) finally: set_raise_warnings(False) @@ -115,31 +145,37 @@ def check_SomaC(stream): sm = load_morphology(StringIO(stream), reader='asc').soma assert 'SomaSimpleContour' in str(sm) assert isinstance(sm, soma.SomaSimpleContour) - np.testing.assert_almost_equal(sm.center, [0., 0., 0.]) + np.testing.assert_almost_equal(sm.center, [0.0, 0.0, 0.0]) assert_almost_equal(sm.radius, 1.0) def test_SomaC(): with warnings.catch_warnings(record=True): - check_SomaC(u"""((CellBody) + check_SomaC( + u"""((CellBody) (1 0 0 44) (0 1 0 44) (-1 0 0 44) - (0 -1 0 44)) """) + (0 -1 0 44)) """ + ) - sin_pi_by_4 = math.cos(math.pi/4.) - cos_pi_by_4 = math.sin(math.pi/4.) + sin_pi_by_4 = math.cos(math.pi / 4.0) + cos_pi_by_4 = math.sin(math.pi / 4.0) - check_SomaC(u"""((CellBody) + check_SomaC( + u"""((CellBody) (1 0 0 44) ({sin} {cos} 0 44) (0 1 0 44) (-1 0 0 44) (-{sin} -{cos} 0 44) - (0 -1 0 44))""".format(sin=sin_pi_by_4, - cos=cos_pi_by_4)) + (0 -1 0 44))""".format( + sin=sin_pi_by_4, cos=cos_pi_by_4 + ) + ) - check_SomaC(u"""((CellBody) + check_SomaC( + u"""((CellBody) (1 0 0 44) ({sin} {cos} 0 44) (0 1 0 44) @@ -147,24 +183,41 @@ def test_SomaC(): (-1 0 0 44) (-{sin} -{cos} 0 44) (0 -1 0 44) - ({sin} -{cos} 0 44))""".format(sin=sin_pi_by_4, - cos=cos_pi_by_4)) + ({sin} -{cos} 0 44))""".format( + sin=sin_pi_by_4, cos=cos_pi_by_4 + ) + ) def test_soma_points_2(): - load_morphology(StringIO(u""" + load_morphology( + StringIO( + u""" 1 1 0 0 -10 40 -1 - 2 1 0 0 0 40 1"""), reader='swc').soma - load_morphology(StringIO(u"""((CellBody) + 2 1 0 0 0 40 1""" + ), + reader='swc', + ).soma + load_morphology( + StringIO( + u"""((CellBody) (0 0 0 44) - (0 +44 0 44))"""), reader='asc').soma + (0 +44 0 44))""" + ), + reader='asc', + ).soma def test_Soma_Cylinders(): - s = load_morphology(StringIO(u""" + s = load_morphology( + StringIO( + u""" 1 1 0 0 -10 40 -1 2 1 0 0 0 40 1 - 3 1 0 0 10 40 2"""), reader='swc').soma + 3 1 0 0 10 40 2""" + ), + reader='swc', + ).soma # if r = 2*h (ie: as in this case 10 - -10 == 20), then the # area of a cylinder (excluding end caps) is: @@ -176,38 +229,54 @@ def test_Soma_Cylinders(): # neuromorpho style with warnings.catch_warnings(record=True): - s = load_morphology(StringIO(u""" + s = load_morphology( + StringIO( + u""" 1 1 0 0 0 10 -1 2 1 0 -10 0 10 1 - 3 1 0 10 0 10 1"""), reader='swc').soma + 3 1 0 10 0 10 1""" + ), + reader='swc', + ).soma assert 'SomaNeuromorphoThreePointCylinders' in str(s) - assert list(s.center) == [0., 0., 0.] + assert list(s.center) == [0.0, 0.0, 0.0] assert_almost_equal(s.area, 1256.6370614) # some neuromorpho files don't follow the convention - #but have (ys + rs) as point 2, and have xs different in each line + # but have (ys + rs) as point 2, and have xs different in each line # ex: http://neuromorpho.org/dableFiles/brumberg/CNG%20version/april11s1cell-1.CNG.swc with warnings.catch_warnings(record=True): - s = load_morphology(StringIO(u""" + s = load_morphology( + StringIO( + u""" 1 1 0 0 0 10 -1 2 1 -2 -6 0 10 1 - 3 1 2 6 0 10 1"""), reader='swc').soma + 3 1 2 6 0 10 1""" + ), + reader='swc', + ).soma assert 'SomaNeuromorphoThreePointCylinders' in str(s) - assert list(s.center) == [0., 0., 0.] + assert list(s.center) == [0.0, 0.0, 0.0] assert_almost_equal(s.area, 794.76706126368811, decimal=5) + assert_almost_equal(s.volume, 3160.274957542371, decimal=5) - s = load_morphology(StringIO(u""" + s = load_morphology( + StringIO( + u""" 1 1 0 0 0 0 -1 2 1 0 2 0 2 1 3 1 0 4 0 4 2 4 1 0 6 0 6 3 5 1 0 8 0 8 4 - 6 1 0 10 0 10 5"""), reader='swc').soma + 6 1 0 10 0 10 5""" + ), + reader='swc', + ).soma - assert list(s.center) == [0., 0., 0.] - assert_almost_equal(s.area, 444.288293851) # cone area, not including bottom + assert list(s.center) == [0.0, 0.0, 0.0] + assert_almost_equal(s.area, 444.288293851) # cone area, not including bottom def test_soma_overlaps(): @@ -223,10 +292,15 @@ def test_soma_overlaps(): np.testing.assert_array_equal(sm.overlaps(points, exclude_boundary=True), [True, False, False]) # Test with cynlindrical soma - sm = load_morphology(StringIO(u""" + sm = load_morphology( + StringIO( + u""" 1 1 0 0 -10 40 -1 2 1 0 0 0 40 1 - 3 1 0 0 10 40 2"""), reader='swc').soma + 3 1 0 0 10 40 2""" + ), + reader='swc', + ).soma assert isinstance(sm, soma.SomaCylinders) points = [ [0, 0, -20], # on the axis of the cylinder but outside it @@ -236,13 +310,20 @@ def test_soma_overlaps(): [100, 0, 0], # outside all cylinders ] np.testing.assert_array_equal(sm.overlaps(points), [False, True, True, True, False]) - np.testing.assert_array_equal(sm.overlaps(points, exclude_boundary=True), [False, False, True, True, False]) + np.testing.assert_array_equal( + sm.overlaps(points, exclude_boundary=True), [False, False, True, True, False] + ) # Test with all points in soma for coverage - sm = load_morphology(StringIO(u""" + sm = load_morphology( + StringIO( + u""" 1 1 0 0 -10 40 -1 2 1 0 0 0 40 1 - 3 1 0 0 10 40 2"""), reader='swc').soma + 3 1 0 0 10 40 2""" + ), + reader='swc', + ).soma assert isinstance(sm, soma.SomaCylinders) points = [ [0, 0, -10], # on the axis of the cylinder and on it's edge @@ -253,12 +334,17 @@ def test_soma_overlaps(): np.testing.assert_array_equal(sm.overlaps(points, exclude_boundary=True), [False, True, True]) # Test with contour soma - sm = load_morphology(StringIO(u""" + sm = load_morphology( + StringIO( + u""" ((CellBody) (1 0 0 1) (1 1 0 1) (-1 1 0 1) - (-1 0 0 1)) """), reader='asc').soma + (-1 0 0 1)) """ + ), + reader='asc', + ).soma assert isinstance(sm, soma.SomaSimpleContour) points = [ [0, 0.5, 0], # on the center of the soma @@ -269,4 +355,45 @@ def test_soma_overlaps(): [2, 3, 0], # outside the soma ] np.testing.assert_array_equal(sm.overlaps(points), [True, True, True, True, True, False]) - np.testing.assert_array_equal(sm.overlaps(points, exclude_boundary=True), [True, False, False, True, True, False]) + np.testing.assert_array_equal( + sm.overlaps(points, exclude_boundary=True), [True, False, False, True, True, False] + ) + + +def test_morphio_soma(): + sm = load_morphology( + StringIO( + u""" + ((CellBody) + (1 0 0 1) + (1 1 0 1) + (-1 1 0 1) + (-1 0 0 1)) """ + ), + reader='asc', + ).soma + + morphio_soma = soma._morphio_soma(sm) + assert isinstance(morphio_soma, morphio.Soma) + + morphio_soma = soma._morphio_soma(sm.to_morphio()) + assert isinstance(morphio_soma, morphio.Soma) + + with pytest.raises(TypeError, match="Unknown soma type"): + soma._morphio_soma(10) + + +def test_soma_undefined_area(): + sm = load_morphology( + StringIO( + u""" + ((CellBody) + (1 0 0 1) + (1 1 0 1) + (-1 1 0 1) + (-1 0 0 1)) """ + ), + reader='asc', + ).soma + res = soma._soma_undefined_area(sm) + assert_almost_equal(res, 15.70796372920407, decimal=5) diff --git a/tests/core/test_types.py b/tests/core/test_types.py index e99449fa9..5950b5ea4 100644 --- a/tests/core/test_types.py +++ b/tests/core/test_types.py @@ -1,4 +1,3 @@ - # Copyright (c) 2016, Ecole Polytechnique Federale de Lausanne, Blue Brain Project # All rights reserved. # @@ -72,16 +71,15 @@ def test_tree_type_checker_broken(): assert tree_filter(mock_tree) tree_filter = tree_type_checker( - NeuriteType.axon, - NeuriteType.apical_dendrite, - NeuriteType.basal_dendrite) + NeuriteType.axon, NeuriteType.apical_dendrite, NeuriteType.basal_dendrite + ) mock_tree.type = NeuriteType.soma assert not tree_filter(mock_tree) def test_tree_type_checker_error(): with pytest.raises(ValueError, match='is not a valid NeuriteType'): - tree_type_checker('all') + tree_type_checker('NOT A VALID NeuriteType') def test_type_filters(): diff --git a/tests/data/mixed/expected_morphology_features.json b/tests/data/mixed/expected_morphology_features.json new file mode 100644 index 000000000..2ea067cb0 --- /dev/null +++ b/tests/data/mixed/expected_morphology_features.json @@ -0,0 +1,5269 @@ +{ + "soma_radius": [ + { + "expected_wout_subtrees": 0.5, + "expected_with_subtrees": 0.5 + } + ], + "soma_surface_area": [ + { + "expected_wout_subtrees": 3.141592653589793, + "expected_with_subtrees": 3.141592653589793 + } + ], + "soma_volume": [ + { + "expected_wout_subtrees": 0.5235987755982988, + "expected_with_subtrees": 0.5235987755982988 + } + ], + "number_of_sections_per_neurite": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 5, + 9, + 5 + ], + "expected_with_subtrees": [ + 5, + 9, + 5 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 5, + 9 + ], + "expected_with_subtrees": [ + 5, + 4 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 5 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 5 + ], + "expected_with_subtrees": [ + 5 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 5, + 9 + ], + "expected_with_subtrees": [ + 5, + 9 + ] + } + ], + "max_radial_distance": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 4.472136, + "expected_with_subtrees": 4.472136 + }, + { + "kwargs": { + "neurite_type": 32, + "origin": [ + 0.0, + 0.0, + 0.0 + ] + }, + "expected_wout_subtrees": 4.472136, + "expected_with_subtrees": 4.472136 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 4.472136, + "expected_with_subtrees": 4.24264 + }, + { + "kwargs": { + "neurite_type": 3, + "origin": [ + 0.0, + 0.0, + 0.0 + ] + }, + "expected_wout_subtrees": 4.472136, + "expected_with_subtrees": 4.242641 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0.0, + "expected_with_subtrees": 4.472136 + }, + { + "kwargs": { + "neurite_type": 2, + "origin": [ + 0.0, + 0.0, + 0.0 + ] + }, + "expected_wout_subtrees": 0.0, + "expected_with_subtrees": 4.47213595499958 + }, + { + "kwargs": { + "neurite_type": [3, 2], + "origin": [ + 0.0, + 0.0, + 0.0 + ] + }, + "expected_wout_subtrees": 4.472136, + "expected_with_subtrees": 4.472136 + } + ], + "total_length_per_neurite": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 5.0, + 10.828427, + 5.0 + ], + "expected_with_subtrees": [ + 5.0, + 10.828427, + 5.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 5.0, + 10.828427 + ], + "expected_with_subtrees": [ + 5.0, + 5.414214 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 5.414214 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 5.0 + ], + "expected_with_subtrees": [ + 5.0 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 5.0, + 10.828427 + ], + "expected_with_subtrees": [ + 5.0, + 10.828427 + ] + } + ], + "total_area_per_neurite": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 3.141593, + 6.803702, + 3.141593 + ], + "expected_with_subtrees": [ + 3.141593, + 6.803702, + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 3.141593, + 6.803702 + ], + "expected_with_subtrees": [ + 3.141593, + 3.401851 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 3.401851 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 3.141593 + ], + "expected_with_subtrees": [ + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 3.141593, + 6.803702 + ], + "expected_with_subtrees": [ + 3.141593, + 6.803702 + ] + } + ], + "total_volume_per_neurite": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.15708, + 0.340185, + 0.15708 + ], + "expected_with_subtrees": [ + 0.15708, + 0.340185, + 0.15708 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.15708, + 0.340185 + ], + "expected_with_subtrees": [ + 0.15708, + 0.170093 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.170093 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.15708 + ], + "expected_with_subtrees": [ + 0.15708 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 0.15708, + 0.340185 + ], + "expected_with_subtrees": [ + 0.15708, + 0.340185 + ] + } + ], + "trunk_origin_azimuths": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 3.1415927, + 0.0, + 0.0 + ], + "expected_with_subtrees": [ + 3.1415927, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 3.1415927, + 0.0 + ], + "expected_with_subtrees": [ + 3.1415927, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.0 + ], + "expected_with_subtrees": [ + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 3.1415927, + 0.0 + ], + "expected_with_subtrees": [ + 3.1415927, + 0.0 + ] + } + ], + "trunk_origin_elevations": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.0, + 1.5707964, + -1.5707964 + ], + "expected_with_subtrees": [ + 0.0, + 1.5707964, + -1.5707964 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.0, + 1.5707964 + ], + "expected_with_subtrees": [ + 0.0, + 1.5707964 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + -1.570796 + ], + "expected_with_subtrees": [ + -1.570796 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 0.0, + 1.5707964 + ], + "expected_with_subtrees": [ + 0.0, + 1.5707964 + ] + } + ], + "trunk_vectors": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + [ + -1.0, + 0.0, + 0.0 + ], + [ + 0.0, + 1.0, + 0.0 + ], + [ + 0.0, + -1.0, + 0.0 + ] + ], + "expected_with_subtrees": [ + [ + -1.0, + 0.0, + 0.0 + ], + [ + 0.0, + 1.0, + 0.0 + ], + [ + 0.0, + -1.0, + 0.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + [ + -1.0, + 0.0, + 0.0 + ], + [ + 0.0, + 1.0, + 0.0 + ] + ], + "expected_with_subtrees": [ + [ + -1.0, + 0.0, + 0.0 + ], + [ + 0.0, + 1.0, + 0.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + [ + 0.0, + -1.0, + 0.0 + ] + ], + "expected_with_subtrees": [ + [ + 0.0, + -1.0, + 0.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + [ + -1.0, + 0.0, + 0.0 + ], + [ + 0.0, + 1.0, + 0.0 + ] + ], + "expected_with_subtrees": [ + [ + -1.0, + 0.0, + 0.0 + ], + [ + 0.0, + 1.0, + 0.0 + ] + ] + } + ], + "trunk_angles": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.570796, + 3.141592, + 1.570796 + ], + "expected_with_subtrees": [ + 1.570796, + 3.141592, + 1.570796 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.5707964, + 1.570796 + ], + "expected_with_subtrees": [ + 1.5707964, + 1.570796 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.0 + ], + "expected_with_subtrees": [ + 0.0 + ] + } + ], + "trunk_angles_from_vector": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + [ + 1.5707963267948966, + -1.5707963267948966, + 3.141592653589793 + ], + [ + 0.0, + 0.0, + 0.0 + ], + [ + 3.141592653589793, + 3.141592653589793, + 0.0 + ] + ], + "expected_with_subtrees": [ + [ + 1.5707963267948966, + -1.5707963267948966, + 3.141592653589793 + ], + [ + 0.0, + 0.0, + 0.0 + ], + [ + 3.141592653589793, + 3.141592653589793, + 0.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + [ + 1.5707963267948966, + -1.5707963267948966, + 3.141592653589793 + ], + [ + 0.0, + 0.0, + 0.0 + ] + ], + "expected_with_subtrees": [ + [ + 1.5707963267948966, + -1.5707963267948966, + 3.141592653589793 + ], + [ + 0.0, + 0.0, + 0.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + } + ], + "trunk_angles_inter_types": [ + { + "kwargs": { + "source_neurite_type": 3, + "target_neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + } + ], + "trunk_origin_radii": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.1, + 0.1, + 0.1 + ], + "expected_with_subtrees": [ + 0.1, + 0.1, + 0.1 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.1, + 0.1 + ], + "expected_with_subtrees": [ + 0.1, + 0.1 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.1 + ], + "expected_with_subtrees": [ + 0.1 + ] + } + ], + "trunk_section_lengths": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 1.414213, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.414213, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 1.414213 + ], + "expected_with_subtrees": [ + 1.0, + 1.414213 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.0 + ], + "expected_with_subtrees": [ + 1.0 + ] + } + ], + "number_of_neurites": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 3, + "expected_with_subtrees": 3 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 2, + "expected_with_subtrees": 2 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0, + "expected_with_subtrees": 1 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 1, + "expected_with_subtrees": 1 + } + ], + "neurite_volume_density": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.235619, + 0.063785, + 0.235619 + ], + "expected_with_subtrees": [ + 0.235619, + 0.063785, + 0.235619 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.235619, + 0.063785 + ], + "expected_with_subtrees": [ + 0.235619, + 0.255139 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.170093 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.235619 + ], + "expected_with_subtrees": [ + 0.235619 + ] + } + ], + "sholl_crossings": [ + { + "kwargs": { + "neurite_type": 32, + "radii": [ + 1.5, + 3.5 + ] + }, + "expected_wout_subtrees": [ + 3, + 2 + ], + "expected_with_subtrees": [ + 3, + 2 + ] + }, + { + "kwargs": { + "neurite_type": 3, + "radii": [ + 1.5, + 3.5 + ] + }, + "expected_wout_subtrees": [ + 2, + 2 + ], + "expected_with_subtrees": [ + 2, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 2, + "radii": [ + 1.5, + 3.5 + ] + }, + "expected_wout_subtrees": [ + 0, + 0 + ], + "expected_with_subtrees": [ + 0, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 4, + "radii": [ + 1.5, + 3.5 + ] + }, + "expected_wout_subtrees": [ + 1, + 0 + ], + "expected_with_subtrees": [ + 1, + 0 + ] + } + ], + "sholl_frequency": [ + { + "kwargs": { + "neurite_type": 32, + "step_size": 3 + }, + "expected_wout_subtrees": [ + 0, + 2 + ], + "expected_with_subtrees": [ + 0, + 2 + ] + }, + { + "kwargs": { + "neurite_type": 3, + "step_size": 3 + }, + "expected_wout_subtrees": [ + 0, + 2 + ], + "expected_with_subtrees": [ + 0, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 2, + "step_size": 3 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 4, + "step_size": 2 + }, + "expected_wout_subtrees": [ + 0, + 1 + ], + "expected_with_subtrees": [ + 0, + 1 + ] + } + ], + "total_width": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 6.0, + "expected_with_subtrees": 6.0 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 6.0, + "expected_with_subtrees": 4.0 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0.0, + "expected_with_subtrees": 2.0 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 1.0, + "expected_with_subtrees": 1.0 + } + ], + "total_height": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 7.0, + "expected_with_subtrees": 7.0 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 4.0, + "expected_with_subtrees": 4.0 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0.0, + "expected_with_subtrees": 2.0 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 2.0, + "expected_with_subtrees": 2.0 + } + ], + "total_depth": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 2.0, + "expected_with_subtrees": 2.0 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 2.0, + "expected_with_subtrees": 2.0 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0.0, + "expected_with_subtrees": 2.0 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 2.0, + "expected_with_subtrees": 2.0 + } + ], + "volume_density": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 0.01570426, + "expected_with_subtrees": 0.01570426 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 0.02983588, + "expected_with_subtrees": 0.04907583 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": NaN, + "expected_with_subtrees": 0.17009254 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 0.23561945, + "expected_with_subtrees": 0.23561945 + } + ], + "aspect_ratio": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 0.630311, + "expected_with_subtrees": 0.630311 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 0.305701, + "expected_with_subtrees": 0.284467 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": NaN, + "expected_with_subtrees": 0.666667 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 0.5, + "expected_with_subtrees": 0.5 + } + ], + "circularity": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 0.739583, + "expected_with_subtrees": 0.739583 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 0.525588, + "expected_with_subtrees": 0.483687 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": NaN, + "expected_with_subtrees": 0.544013 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 0.539012, + "expected_with_subtrees": 0.539012 + } + ], + "shape_factor": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 0.40566, + "expected_with_subtrees": 0.40566 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 0.21111, + "expected_with_subtrees": 0.1875 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": NaN, + "expected_with_subtrees": 0.3 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 0.25, + "expected_with_subtrees": 0.25 + } + ], + "length_fraction_above_soma": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 0.567898, + "expected_with_subtrees": 0.567898 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 0.74729, + "expected_with_subtrees": 0.61591 + } + ], + "number_of_segments": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 19, + "expected_with_subtrees": 19 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 14, + "expected_with_subtrees": 9 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0, + "expected_with_subtrees": 5 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 5, + "expected_with_subtrees": 5 + } + ], + "number_of_leaves": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 11, + "expected_with_subtrees": 11 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 8, + "expected_with_subtrees": 5 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0, + "expected_with_subtrees": 3 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 3, + "expected_with_subtrees": 3 + } + ], + "total_length": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 20.828427, + "expected_with_subtrees": 20.828427 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 15.828427, + "expected_with_subtrees": 10.414214 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0.0, + "expected_with_subtrees": 5.414214 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 5.0, + "expected_with_subtrees": 5.0 + } + ], + "total_area": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 13.086887, + "expected_with_subtrees": 13.086887 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 9.945294, + "expected_with_subtrees": 6.543443 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0.0, + "expected_with_subtrees": 3.401851 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 3.141593, + "expected_with_subtrees": 3.141593 + } + ], + "total_volume": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 0.654344, + "expected_with_subtrees": 0.654344 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 0.497265, + "expected_with_subtrees": 0.327172 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0.0, + "expected_with_subtrees": 0.170093 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 0.15708, + "expected_with_subtrees": 0.15708 + } + ], + "section_lengths": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + } + ], + "section_areas": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.888577, + 1.256637, + 0.628319, + 0.628319, + 0.888577, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318 + ], + "expected_with_subtrees": [ + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.888577, + 1.256637, + 0.628319, + 0.628319, + 0.888577, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.888577, + 1.256637, + 0.628319, + 0.628319, + 0.888577, + 0.628319, + 0.628319, + 0.628319, + 0.628319 + ], + "expected_with_subtrees": [ + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.888577, + 1.256637, + 0.628319, + 0.628319 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.888577, + 0.628319, + 0.628319, + 0.628319, + 0.628319 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318 + ], + "expected_with_subtrees": [ + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318 + ] + } + ], + "section_volumes": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.044429, + 0.062832, + 0.031416, + 0.031416, + 0.044429, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416 + ], + "expected_with_subtrees": [ + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.044429, + 0.062832, + 0.031416, + 0.031416, + 0.044429, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.044429, + 0.062832, + 0.031416, + 0.031416, + 0.044429, + 0.031416, + 0.031416, + 0.031416, + 0.031416 + ], + "expected_with_subtrees": [ + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.044429, + 0.062832, + 0.031416, + 0.031416 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.044429, + 0.031416, + 0.031416, + 0.031416, + 0.031416 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.031415 + ], + "expected_with_subtrees": [ + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.031415 + ] + } + ], + "section_tortuosity": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + } + ], + "section_radial_distances": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 2.0, + 3.0, + 3.162278, + 3.162278, + 2.236068, + 2.236068, + 4.123106, + 4.24264, + 4.24264, + 3.605551, + 4.472136, + 4.24264, + 4.358899, + 4.358899, + 2.0, + 3.0, + 3.162278, + 3.162278, + 2.236068 + ], + "expected_with_subtrees": [ + 2.0, + 3.0, + 3.162278, + 3.162278, + 2.236068, + 2.236068, + 4.123106, + 4.24264, + 4.24264, + 3.605551, + 4.472136, + 4.24264, + 4.358899, + 4.358899, + 2.0, + 3.0, + 3.162278, + 3.162278, + 2.236068 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 2.0, + 3.0, + 3.162278, + 3.162278, + 2.236068, + 2.236068, + 4.123106, + 4.24264, + 4.24264, + 3.605551, + 4.472136, + 4.24264, + 4.358899, + 4.358899 + ], + "expected_with_subtrees": [ + 2.0, + 3.0, + 3.162278, + 3.162278, + 2.236068, + 2.236068, + 4.123106, + 4.24264, + 4.24264 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 3.605551, + 4.472136, + 4.24264, + 4.358899, + 4.358899 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 2.0, + 3.0, + 3.162278, + 3.162278, + 2.236068 + ], + "expected_with_subtrees": [ + 2.0, + 3.0, + 3.162278, + 3.162278, + 2.236068 + ] + } + ], + "section_term_radial_distances": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 3.162278, + 3.162278, + 2.236068, + 4.24264, + 4.24264, + 4.472136, + 4.358899, + 4.358899, + 3.162278, + 3.162278, + 2.236068 + ], + "expected_with_subtrees": [ + 3.162278, + 3.162278, + 2.236068, + 4.24264, + 4.24264, + 4.472136, + 4.358899, + 4.358899, + 3.162278, + 3.162278, + 2.236068 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 3.162278, + 3.162278, + 2.236068, + 4.24264, + 4.24264, + 4.472136, + 4.358899, + 4.358899 + ], + "expected_with_subtrees": [ + 3.162278, + 3.162278, + 2.236068, + 4.24264, + 4.24264 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 4.472136, + 4.358899, + 4.358899 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 3.162278, + 3.162278, + 2.236068 + ], + "expected_with_subtrees": [ + 3.162278, + 3.162278, + 2.236068 + ] + } + ], + "section_bif_radial_distances": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 2.0, + 3.0, + 2.236068, + 4.123106, + 3.605551, + 4.24264, + 2.0, + 3.0 + ], + "expected_with_subtrees": [ + 2.0, + 3.0, + 2.236068, + 4.123106, + 3.605551, + 4.24264, + 2.0, + 3.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 2.0, + 3.0, + 2.236068, + 4.123106, + 3.605551, + 4.24264 + ], + "expected_with_subtrees": [ + 2.0, + 3.0, + 4.123106 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 3.605551, + 4.24264 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 2.0, + 3.0 + ], + "expected_with_subtrees": [ + 2.0, + 3.0 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 2.0, + 3.0, + 2.236068, + 4.123106, + 3.605551, + 4.24264 + ], + "expected_with_subtrees": [ + 2.0, + 3.0, + 2.236068, + 4.123106, + 3.605551, + 4.24264 + ] + } + ], + "section_end_distances": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + } + ], + "section_term_lengths": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0 + ] + } + ], + "section_taper_rates": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "expected_with_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "expected_with_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "expected_with_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + } + ], + "section_bif_lengths": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.414214, + 2.0, + 1.414214, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.414214, + 2.0, + 1.414214, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.414214, + 2.0, + 1.414214, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 2.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.414214, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.414214, + 2.0, + 1.414214, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.414214, + 2.0, + 1.414214, + 1.0 + ] + } + ], + "section_branch_orders": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0, + 1, + 2, + 2, + 1, + 0, + 1, + 2, + 2, + 1, + 2, + 2, + 3, + 3, + 0, + 1, + 2, + 2, + 1 + ], + "expected_with_subtrees": [ + 0, + 1, + 2, + 2, + 1, + 0, + 1, + 2, + 2, + 1, + 2, + 2, + 3, + 3, + 0, + 1, + 2, + 2, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0, + 1, + 2, + 2, + 1, + 0, + 1, + 2, + 2, + 1, + 2, + 2, + 3, + 3 + ], + "expected_with_subtrees": [ + 0, + 1, + 2, + 2, + 1, + 0, + 1, + 2, + 2 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1, + 2, + 2, + 3, + 3 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0, + 1, + 2, + 2, + 1 + ], + "expected_with_subtrees": [ + 0, + 1, + 2, + 2, + 1 + ] + } + ], + "section_bif_branch_orders": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0, + 1, + 0, + 1, + 1, + 2, + 0, + 1 + ], + "expected_with_subtrees": [ + 0, + 1, + 0, + 1, + 1, + 2, + 0, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0, + 1, + 0, + 1, + 1, + 2 + ], + "expected_with_subtrees": [ + 0, + 1, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1, + 2 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0, + 1 + ], + "expected_with_subtrees": [ + 0, + 1 + ] + } + ], + "section_term_branch_orders": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 2, + 2, + 1, + 2, + 2, + 2, + 3, + 3, + 2, + 2, + 1 + ], + "expected_with_subtrees": [ + 2, + 2, + 1, + 2, + 2, + 2, + 3, + 3, + 2, + 2, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 2, + 2, + 1, + 2, + 2, + 2, + 3, + 3 + ], + "expected_with_subtrees": [ + 2, + 2, + 1, + 2, + 2 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 2, + 3, + 3 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 2, + 2, + 1 + ], + "expected_with_subtrees": [ + 2, + 2, + 1 + ] + } + ], + "section_strahler_orders": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 2, + 2, + 1, + 1, + 1, + 3, + 2, + 1, + 1, + 2, + 1, + 2, + 1, + 1, + 2, + 2, + 1, + 1, + 1 + ], + "expected_with_subtrees": [ + 2, + 2, + 1, + 1, + 1, + 3, + 2, + 1, + 1, + 2, + 1, + 2, + 1, + 1, + 2, + 2, + 1, + 1, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 2, + 2, + 1, + 1, + 1, + 3, + 2, + 1, + 1, + 2, + 1, + 2, + 1, + 1 + ], + "expected_with_subtrees": [ + 2, + 2, + 1, + 1, + 1, + 3, + 2, + 1, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 2, + 1, + 2, + 1, + 1 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 2, + 2, + 1, + 1, + 1 + ], + "expected_with_subtrees": [ + 2, + 2, + 1, + 1, + 1 + ] + } + ], + "segment_lengths": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.414214, + 2.0, + 1.0, + 1.0, + 1.414214, + 1.0, + 1.0, + 1.0, + 1.0 + ] + } + ], + "segment_areas": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.888577, + 1.256637, + 0.628319, + 0.628319, + 0.888577, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319 + ], + "expected_with_subtrees": [ + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.888577, + 1.256637, + 0.628319, + 0.628319, + 0.888577, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.888577, + 1.256637, + 0.628319, + 0.628319, + 0.888577, + 0.628319, + 0.628319, + 0.628319, + 0.628319 + ], + "expected_with_subtrees": [ + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.628319, + 0.888577, + 1.256637, + 0.628319, + 0.628319 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.888577, + 0.628319, + 0.628319, + 0.628319, + 0.628319 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318 + ], + "expected_with_subtrees": [ + 0.628318, + 0.628318, + 0.628318, + 0.628318, + 0.628318 + ] + } + ], + "segment_volumes": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.044429, + 0.062832, + 0.031416, + 0.031416, + 0.044429, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416 + ], + "expected_with_subtrees": [ + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.044429, + 0.062832, + 0.031416, + 0.031416, + 0.044429, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416, + 0.031416 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.044429, + 0.062832, + 0.031416, + 0.031416, + 0.044429, + 0.031416, + 0.031416, + 0.031416, + 0.031416 + ], + "expected_with_subtrees": [ + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.044429, + 0.062832, + 0.031416, + 0.031416 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.044429, + 0.031416, + 0.031416, + 0.031416, + 0.031416 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.031415 + ], + "expected_with_subtrees": [ + 0.031415, + 0.031415, + 0.031415, + 0.031415, + 0.031415 + ] + } + ], + "segment_radii": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1 + ], + "expected_with_subtrees": [ + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1 + ], + "expected_with_subtrees": [ + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1, + 0.1 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.1, + 0.1, + 0.1, + 0.1, + 0.1 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.1, + 0.1, + 0.1, + 0.1, + 0.1 + ], + "expected_with_subtrees": [ + 0.1, + 0.1, + 0.1, + 0.1, + 0.1 + ] + } + ], + "segment_taper_rates": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "expected_with_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "expected_with_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "expected_with_subtrees": [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + } + ], + "segment_radial_distances": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.5, + 2.5, + 3.041381, + 3.041381, + 2.061553, + 1.581139, + 3.162278, + 4.153312, + 4.153312, + 2.915476, + 4.031129, + 3.905125, + 4.272002, + 4.272002, + 1.5, + 2.5, + 3.041381, + 3.041381, + 2.061553 + ], + "expected_with_subtrees": [ + 1.5, + 2.5, + 3.041381, + 3.041381, + 2.061553, + 1.581139, + 3.162278, + 4.153312, + 4.153312, + 2.915476, + 4.031129, + 3.905125, + 4.272002, + 4.272002, + 1.5, + 2.5, + 3.041381, + 3.041381, + 2.061553 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.5, + 2.5, + 3.041381, + 3.041381, + 2.061553, + 1.581139, + 3.162278, + 4.153312, + 4.153312, + 2.915476, + 4.031129, + 3.905125, + 4.272002, + 4.272002 + ], + "expected_with_subtrees": [ + 1.5, + 2.5, + 3.041381, + 3.041381, + 2.061553, + 1.581139, + 3.162278, + 4.153312, + 4.153312 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 2.915476, + 4.031129, + 3.905125, + 4.272002, + 4.272002 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.5, + 2.5, + 3.041381, + 3.041381, + 2.061553 + ], + "expected_with_subtrees": [ + 1.5, + 2.5, + 3.041381, + 3.041381, + 2.061553 + ] + } + ], + "segment_midpoints": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + [ + -1.5, + 0.0, + 0.0 + ], + [ + -2.5, + 0.0, + 0.0 + ], + [ + -3.0, + 0.0, + 0.5 + ], + [ + -3.0, + 0.0, + -0.5 + ], + [ + -2.0, + 0.5, + 0.0 + ], + [ + 0.5, + 1.5, + 0.0 + ], + [ + 1.0, + 3.0, + 0.0 + ], + [ + 1.0, + 4.0, + 0.5 + ], + [ + 1.0, + 4.0, + -0.5 + ], + [ + 1.5, + 2.5, + 0.0 + ], + [ + 2.0, + 3.5, + 0.0 + ], + [ + 2.5, + 3.0, + 0.0 + ], + [ + 3.0, + 3.0, + 0.5 + ], + [ + 3.0, + 3.0, + -0.5 + ], + [ + 0.0, + -1.5, + 0.0 + ], + [ + 0.0, + -2.5, + 0.0 + ], + [ + 0.0, + -3.0, + 0.5 + ], + [ + 0.0, + -3.0, + -0.5 + ], + [ + 0.5, + -2.0, + 0.0 + ] + ], + "expected_with_subtrees": [ + [ + -1.5, + 0.0, + 0.0 + ], + [ + -2.5, + 0.0, + 0.0 + ], + [ + -3.0, + 0.0, + 0.5 + ], + [ + -3.0, + 0.0, + -0.5 + ], + [ + -2.0, + 0.5, + 0.0 + ], + [ + 0.5, + 1.5, + 0.0 + ], + [ + 1.0, + 3.0, + 0.0 + ], + [ + 1.0, + 4.0, + 0.5 + ], + [ + 1.0, + 4.0, + -0.5 + ], + [ + 1.5, + 2.5, + 0.0 + ], + [ + 2.0, + 3.5, + 0.0 + ], + [ + 2.5, + 3.0, + 0.0 + ], + [ + 3.0, + 3.0, + 0.5 + ], + [ + 3.0, + 3.0, + -0.5 + ], + [ + 0.0, + -1.5, + 0.0 + ], + [ + 0.0, + -2.5, + 0.0 + ], + [ + 0.0, + -3.0, + 0.5 + ], + [ + 0.0, + -3.0, + -0.5 + ], + [ + 0.5, + -2.0, + 0.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + [ + -1.5, + 0.0, + 0.0 + ], + [ + -2.5, + 0.0, + 0.0 + ], + [ + -3.0, + 0.0, + 0.5 + ], + [ + -3.0, + 0.0, + -0.5 + ], + [ + -2.0, + 0.5, + 0.0 + ], + [ + 0.5, + 1.5, + 0.0 + ], + [ + 1.0, + 3.0, + 0.0 + ], + [ + 1.0, + 4.0, + 0.5 + ], + [ + 1.0, + 4.0, + -0.5 + ], + [ + 1.5, + 2.5, + 0.0 + ], + [ + 2.0, + 3.5, + 0.0 + ], + [ + 2.5, + 3.0, + 0.0 + ], + [ + 3.0, + 3.0, + 0.5 + ], + [ + 3.0, + 3.0, + -0.5 + ] + ], + "expected_with_subtrees": [ + [ + -1.5, + 0.0, + 0.0 + ], + [ + -2.5, + 0.0, + 0.0 + ], + [ + -3.0, + 0.0, + 0.5 + ], + [ + -3.0, + 0.0, + -0.5 + ], + [ + -2.0, + 0.5, + 0.0 + ], + [ + 0.5, + 1.5, + 0.0 + ], + [ + 1.0, + 3.0, + 0.0 + ], + [ + 1.0, + 4.0, + 0.5 + ], + [ + 1.0, + 4.0, + -0.5 + ] + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + [ + 1.5, + 2.5, + 0.0 + ], + [ + 2.0, + 3.5, + 0.0 + ], + [ + 2.5, + 3.0, + 0.0 + ], + [ + 3.0, + 3.0, + 0.5 + ], + [ + 3.0, + 3.0, + -0.5 + ] + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + [ + 0.0, + -1.5, + 0.0 + ], + [ + 0.0, + -2.5, + 0.0 + ], + [ + 0.0, + -3.0, + 0.5 + ], + [ + 0.0, + -3.0, + -0.5 + ], + [ + 0.5, + -2.0, + 0.0 + ] + ], + "expected_with_subtrees": [ + [ + 0.0, + -1.5, + 0.0 + ], + [ + 0.0, + -2.5, + 0.0 + ], + [ + 0.0, + -3.0, + 0.5 + ], + [ + 0.0, + -3.0, + -0.5 + ], + [ + 0.5, + -2.0, + 0.0 + ] + ] + } + ], + "segment_meander_angles": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [] + } + ], + "number_of_sections": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 19, + "expected_with_subtrees": 19 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 14, + "expected_with_subtrees": 9 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0, + "expected_with_subtrees": 5 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 5, + "expected_with_subtrees": 5 + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": 14, + "expected_with_subtrees": 14 + } + ], + "number_of_bifurcations": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 8, + "expected_with_subtrees": 8 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 6, + "expected_with_subtrees": 3 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0, + "expected_with_subtrees": 2 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 2, + "expected_with_subtrees": 2 + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": 6, + "expected_with_subtrees": 6 + } + ], + "number_of_forking_points": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": 8, + "expected_with_subtrees": 8 + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": 6, + "expected_with_subtrees": 3 + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": 0, + "expected_with_subtrees": 2 + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": 2, + "expected_with_subtrees": 2 + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": 6, + "expected_with_subtrees": 6 + } + ], + "local_bifurcation_angles": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.570796, + 3.141593, + 0.785398, + 3.141593, + 1.570796, + 3.141593, + 1.570796, + 3.141593 + ], + "expected_with_subtrees": [ + 1.570796, + 3.141593, + 0.785398, + 3.141593, + 1.570796, + 3.141593, + 1.570796, + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.570796, + 3.141593, + 0.785398, + 3.141593, + 1.570796, + 3.141593 + ], + "expected_with_subtrees": [ + 1.570796, + 3.141593, + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.570796, + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.570796, + 3.141593 + ], + "expected_with_subtrees": [ + 1.570796, + 3.141593 + ] + } + ], + "remote_bifurcation_angles": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.570796, + 3.141593, + 0.785398, + 3.141593, + 1.570796, + 3.141593, + 1.570796, + 3.141593 + ], + "expected_with_subtrees": [ + 1.570796, + 3.141593, + 0.785398, + 3.141593, + 1.570796, + 3.141593, + 1.570796, + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.570796, + 3.141593, + 0.785398, + 3.141593, + 1.570796, + 3.141593 + ], + "expected_with_subtrees": [ + 1.570796, + 3.141593, + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.570796, + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.570796, + 3.141593 + ], + "expected_with_subtrees": [ + 1.570796, + 3.141593 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 1.570796, + 3.141593, + 0.785398, + 3.141593, + 1.570796, + 3.141593 + ], + "expected_with_subtrees": [ + 1.570796, + 3.141593, + 0.785398, + 3.141593, + 1.570796, + 3.141593 + ] + } + ], + "sibling_ratios": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 1.0, + 1.0 + ], + "expected_with_subtrees": [ + 1.0, + 1.0 + ] + } + ], + "partition_pairs": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ], + [ + 3.0, + 5.0 + ], + [ + 1.0, + 1.0 + ], + [ + 1.0, + 3.0 + ], + [ + 1.0, + 1.0 + ], + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ] + ], + "expected_with_subtrees": [ + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ], + [ + 3.0, + 5.0 + ], + [ + 1.0, + 1.0 + ], + [ + 1.0, + 3.0 + ], + [ + 1.0, + 1.0 + ], + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ], + [ + 3.0, + 5.0 + ], + [ + 1.0, + 1.0 + ], + [ + 1.0, + 3.0 + ], + [ + 1.0, + 1.0 + ] + ], + "expected_with_subtrees": [ + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ], + [ + 1.0, + 1.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + [ + 1.0, + 3.0 + ], + [ + 1.0, + 1.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ] + ], + "expected_with_subtrees": [ + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ] + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ], + [ + 3.0, + 5.0 + ], + [ + 1.0, + 1.0 + ], + [ + 1.0, + 3.0 + ], + [ + 1.0, + 1.0 + ] + ], + "expected_with_subtrees": [ + [ + 3.0, + 1.0 + ], + [ + 1.0, + 1.0 + ], + [ + 3.0, + 5.0 + ], + [ + 1.0, + 1.0 + ], + [ + 1.0, + 3.0 + ], + [ + 1.0, + 1.0 + ] + ] + } + ], + "diameter_power_relations": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 2.0, + 2.0, + 2.0, + 2.0, + 2.0, + 2.0, + 2.0, + 2.0 + ], + "expected_with_subtrees": [ + 2.0, + 2.0, + 2.0, + 2.0, + 2.0, + 2.0, + 2.0, + 2.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 2.0, + 2.0, + 2.0, + 2.0, + 2.0, + 2.0 + ], + "expected_with_subtrees": [ + 2.0, + 2.0, + 2.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 2.0, + 2.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 2.0, + 2.0 + ], + "expected_with_subtrees": [ + 2.0, + 2.0 + ] + } + ], + "bifurcation_partitions": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 3.0, + 1.0, + 1.666667, + 1.0, + 3.0, + 1.0, + 3.0, + 1.0 + ], + "expected_with_subtrees": [ + 3.0, + 1.0, + 1.666667, + 1.0, + 3.0, + 1.0, + 3.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 3.0, + 1.0, + 1.666667, + 1.0, + 3.0, + 1.0 + ], + "expected_with_subtrees": [ + 3.0, + 1.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 3.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 3.0, + 1.0 + ], + "expected_with_subtrees": [ + 3.0, + 1.0 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2] + }, + "expected_wout_subtrees": [ + 3.0, + 1.0, + 1.666667, + 1.0, + 3.0, + 1.0 + ], + "expected_with_subtrees": [ + 3.0, + 1.0, + 1.666667, + 1.0, + 3.0, + 1.0 + ] + } + ], + "section_path_distances": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 2.0, + 3.0, + 3.0, + 2.0, + 1.414213, + 3.414213, + 4.414213, + 4.414213, + 2.828427, + 3.828427, + 3.828427, + 4.828427, + 4.828427, + 1.0, + 2.0, + 3.0, + 3.0, + 2.0 + ], + "expected_with_subtrees": [ + 1.0, + 2.0, + 3.0, + 3.0, + 2.0, + 1.414213, + 3.414213, + 4.414213, + 4.414213, + 2.828427, + 3.828427, + 3.828427, + 4.828427, + 4.828427, + 1.0, + 2.0, + 3.0, + 3.0, + 2.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 1.0, + 2.0, + 3.0, + 3.0, + 2.0, + 1.414214, + 3.414214, + 4.414214, + 4.414214, + 2.828427, + 3.828427, + 3.828427, + 4.828427, + 4.828427 + ], + "expected_with_subtrees": [ + 1.0, + 2.0, + 3.0, + 3.0, + 2.0, + 1.414214, + 3.414214, + 4.414214, + 4.414214 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 2.828427, + 3.828427, + 3.828427, + 4.828427, + 4.828427 + ] + } + ], + "terminal_path_lengths": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 3.0, + 3.0, + 2.0, + 4.414214, + 4.414214, + 3.828427, + 4.828427, + 4.828427, + 3.0, + 3.0, + 2.0 + ], + "expected_with_subtrees": [ + 3.0, + 3.0, + 2.0, + 4.414214, + 4.414214, + 3.828427, + 4.828427, + 4.828427, + 3.0, + 3.0, + 2.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 3.0, + 3.0, + 2.0, + 4.414214, + 4.414214, + 3.828427, + 4.828427, + 4.828427 + ], + "expected_with_subtrees": [ + 3.0, + 3.0, + 2.0, + 4.414214, + 4.414214 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 3.828427, + 4.828427, + 4.828427 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 3.0, + 3.0, + 2.0 + ], + "expected_with_subtrees": [ + 3.0, + 3.0, + 2.0 + ] + } + ], + "principal_direction_extents": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 2.0, + 3.596771, + 2.0 + ], + "expected_with_subtrees": [ + 2.0, + 3.596771, + 2.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 2.0, + 3.596771 + ], + "expected_with_subtrees": [ + 2.0, + 3.154926 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 2.235207 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 2.0 + ], + "expected_with_subtrees": [ + 2.0 + ] + } + ], + "partition_asymmetry": [ + { + "kwargs": { + "neurite_type": 32, + "variant": "branch-order", + "method": "petilla" + }, + "expected_wout_subtrees": [ + 0.5, + 0.0, + 0.25, + 0.0, + 0.5, + 0.0, + 0.5, + 0.0 + ], + "expected_with_subtrees": [ + 0.5, + 0.0, + 0.25, + 0.0, + 0.5, + 0.0, + 0.5, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 3, + "variant": "branch-order", + "method": "petilla" + }, + "expected_wout_subtrees": [ + 0.5, + 0.0, + 0.25, + 0.0, + 0.5, + 0.0 + ], + "expected_with_subtrees": [ + 0.5, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 2, + "variant": "branch-order", + "method": "petilla" + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.5, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 4, + "variant": "branch-order", + "method": "petilla" + }, + "expected_wout_subtrees": [ + 0.5, + 0.0 + ], + "expected_with_subtrees": [ + 0.5, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 32, + "variant": "length" + }, + "expected_wout_subtrees": [ + 0.4, + 0.0, + 0.130601, + 0.0, + 0.184699, + 0.0, + 0.4, + 0.0 + ], + "expected_with_subtrees": [ + 0.4, + 0.0, + 0.130601, + 0.0, + 0.184699, + 0.0, + 0.4, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 3, + "variant": "length" + }, + "expected_wout_subtrees": [ + 0.4, + 0.0, + 0.130601, + 0.0, + 0.184699, + 0.0 + ], + "expected_with_subtrees": [ + 0.4, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 2, + "variant": "length" + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.369398, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 4, + "variant": "length" + }, + "expected_wout_subtrees": [ + 0.4, + 0.0 + ], + "expected_with_subtrees": [ + 0.4, + 0.0 + ] + } + ], + "partition_asymmetry_length": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 0.4, + 0.0, + 0.130601, + 0.0, + 0.184699, + 0.0, + 0.4, + 0.0 + ], + "expected_with_subtrees": [ + 0.4, + 0.0, + 0.130601, + 0.0, + 0.184699, + 0.0, + 0.4, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 3 + }, + "expected_wout_subtrees": [ + 0.4, + 0.0, + 0.130601, + 0.0, + 0.184699, + 0.0 + ], + "expected_with_subtrees": [ + 0.4, + 0.0, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 2 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0.369398, + 0.0 + ] + }, + { + "kwargs": { + "neurite_type": 4 + }, + "expected_wout_subtrees": [ + 0.4, + 0.0 + ], + "expected_with_subtrees": [ + 0.4, + 0.0 + ] + } + ], + "segment_path_lengths": [ + { + "kwargs": { + "neurite_type": 32 + }, + "expected_wout_subtrees": [ + 1.0, + 2.0, + 3.0, + 3.0, + 2.0, + 1.414213, + 3.414213, + 4.414213, + 4.414213, + 2.828427, + 3.828427, + 3.828427, + 4.828427, + 4.828427, + 1.0, + 2.0, + 3.0, + 3.0, + 2.0 + ], + "expected_with_subtrees": [ + 1.0, + 2.0, + 3.0, + 3.0, + 2.0, + 1.414213, + 3.414213, + 4.414213, + 4.414213, + 2.828427, + 3.828427, + 3.828427, + 4.828427, + 4.828427, + 1.0, + 2.0, + 3.0, + 3.0, + 2.0 + ] + } + ] +} diff --git a/tests/data/mixed/expected_neurite_features.json b/tests/data/mixed/expected_neurite_features.json new file mode 100644 index 000000000..2131324bd --- /dev/null +++ b/tests/data/mixed/expected_neurite_features.json @@ -0,0 +1,430 @@ +{ + "max_radial_distance": [ + { + "kwargs": { + "section_type": 32 + }, + "expected": [ + 2.236068, + 3.7416575, + 2.236068 + ] + }, + { + "kwargs": { + "section_type": 32, + "origin": [ + 0.0, + 0.0, + 0.0 + ] + }, + "expected": [ + 3.162277, + 4.472135, + 3.162277 + ] + }, + { + "kwargs": { + "section_type": 3 + }, + "expected": [ + 2.236068, + 3.3166249, + 0.0 + ] + }, + { + "kwargs": { + "section_type": 3, + "origin": [ + 0.0, + 0.0, + 0.0 + ] + }, + "expected": [ + 3.162277, + 4.24264, + 0.0 + ] + }, + { + "kwargs": { + "section_type": 2 + }, + "expected": [ + 0.0, + 3.741657, + 0.0 + ] + }, + { + "kwargs": { + "section_type": 2, + "origin": [ + 0.0, + 0.0, + 0.0 + ] + }, + "expected": [ + 0.0, + 4.472135, + 0.0 + ] + } + ], + "volume_density": [ + { + "kwargs": { + "section_type": 32 + }, + "expected": [ + 0.235619, + 0.063784, + 0.235619 + ] + }, + { + "kwargs": { + "section_type": 3 + }, + "expected": [ + 0.235619, + 0.255138, + NaN + ] + }, + { + "kwargs": { + "section_type": 2 + }, + "expected": [ + NaN, + 0.170092, + NaN + ] + }, + { + "kwargs": { + "section_type": 4 + }, + "expected": [ + NaN, + NaN, + 0.2356194583819102 + ] + } + ], + "section_radial_distances": [ + { + "kwargs": { + "section_type": 32 + }, + "expected": [ + [ + 1.0, + 2.0, + 2.236068, + 2.236068, + 1.4142135 + ], + [ + 1.4142135, + 3.1622777, + 3.3166249, + 3.3166249, + 2.828427, + 3.6055512, + 3.6055512, + 3.7416575, + 3.7416575 + ], + [ + 1.0, + 2.0, + 2.236068, + 2.236068, + 1.4142135 + ] + ] + }, + { + "kwargs": { + "section_type": 3 + }, + "expected": [ + [ + 1.0, + 2.0, + 2.236068, + 2.236068, + 1.4142135 + ], + [ + 1.414214, + 3.162278, + 3.316625, + 3.316625 + ], + [] + ] + }, + { + "kwargs": { + "section_type": 2 + }, + "expected": [ + [], + [ + 2.828427, + 3.605551, + 3.605551, + 3.741657, + 3.741657 + ], + [] + ] + }, + { + "kwargs": { + "section_type": 4 + }, + "expected": [ + [], + [], + [ + 1.0, + 2.0, + 2.236068, + 2.236068, + 1.414214 + ] + ] + } + ], + "section_bif_radial_distances": [ + { + "kwargs": { + "section_type": 32 + }, + "expected": [ + [ + 1.0, + 2.0 + ], + [ + 1.414214, + 3.162278, + 2.828427, + 3.605551 + ], + [ + 1.0, + 2.0 + ] + ] + }, + { + "kwargs": { + "section_type": 3 + }, + "expected": [ + [ + 1.0, + 2.0 + ], + [ + 3.162278 + ], + [] + ] + }, + { + "kwargs": { + "section_type": 2 + }, + "expected": [ + [], + [ + 2.828427, + 3.605551 + ], + [] + ] + }, + { + "kwargs": { + "section_type": 4 + }, + "expected": [ + [], + [], + [ + 1.0, + 2.0 + ] + ] + } + ], + "section_term_radial_distances": [ + { + "kwargs": { + "section_type": 32 + }, + "expected": [ + [ + 2.236068, + 2.236068, + 1.414214 + ], + [ + 3.316625, + 3.316625, + 3.605551, + 3.741657, + 3.741657 + ], + [ + 2.236068, + 2.236068, + 1.414214 + ] + ] + }, + { + "kwargs": { + "section_type": 3 + }, + "expected": [ + [ + 2.236068, + 2.236068, + 1.414214 + ], + [ + 3.316625, + 3.316625 + ], + [] + ] + }, + { + "kwargs": { + "section_type": 2 + }, + "expected": [ + [], + [ + 3.605551, + 3.741657, + 3.741657 + ], + [] + ] + }, + { + "kwargs": { + "section_type": 4 + }, + "expected": [ + [], + [], + [ + 2.236068, + 2.236068, + 1.414214 + ] + ] + } + ], + "segment_radial_distances": [ + { + "kwargs": { + "section_type": 32 + }, + "expected": [ + [ + 0.5, + 1.5, + 2.061553, + 2.061553, + 1.118034 + ], + [ + 0.707107, + 2.236068, + 3.201562, + 3.201562, + 2.12132, + 3.201562, + 3.201562, + 3.640055, + 3.640055 + ], + [ + 0.5, + 1.5, + 2.061553, + 2.061553, + 1.118034 + ] + ] + }, + { + "kwargs": { + "section_type": 3 + }, + "expected": [ + [ + 0.5, + 1.5, + 2.061553, + 2.061553, + 1.118034 + ], + [ + 0.707107, + 2.236068, + 3.201562, + 3.201562 + ], + [] + ] + }, + { + "kwargs": { + "section_type": 2 + }, + "expected": [ + [], + [ + 2.12132, + 3.201562, + 3.201562, + 3.640055, + 3.640055 + ], + [] + ] + }, + { + "kwargs": { + "section_type": 4 + }, + "expected": [ + [], + [], + [ + 0.5, + 1.5, + 2.061553, + 2.061553, + 1.118034 + ] + ] + } + ] +} \ No newline at end of file diff --git a/tests/data/mixed/expected_population_features.json b/tests/data/mixed/expected_population_features.json new file mode 100644 index 000000000..f15a36979 --- /dev/null +++ b/tests/data/mixed/expected_population_features.json @@ -0,0 +1,71 @@ +{ + "sholl_frequency": [ + { + "kwargs": { + "neurite_type": 32, + "step_size": 3 + }, + "expected_wout_subtrees": [ + 0, + 4 + ], + "expected_with_subtrees": [ + 0, + 4 + ] + }, + { + "kwargs": { + "neurite_type": 3, + "step_size": 3 + }, + "expected_wout_subtrees": [ + 0, + 4 + ], + "expected_with_subtrees": [ + 0, + 2 + ] + }, + { + "kwargs": { + "neurite_type": 2, + "step_size": 3 + }, + "expected_wout_subtrees": [], + "expected_with_subtrees": [ + 0, + 2 + ] + }, + { + "kwargs": { + "neurite_type": 4, + "step_size": 2 + }, + "expected_wout_subtrees": [ + 0, + 2 + ], + "expected_with_subtrees": [ + 0, + 2 + ] + }, + { + "kwargs": { + "neurite_type": [3, 2], + "step_size": 3 + }, + "expected_wout_subtrees": [ + 0, + 4 + ], + "expected_with_subtrees": [ + 0, + 4 + ] + } + ] +} diff --git a/tests/data/mixed/mixed.h5 b/tests/data/mixed/mixed.h5 new file mode 100644 index 0000000000000000000000000000000000000000..dcc1353a8f3b459d7416a6d8286d4188180a2a68 GIT binary patch literal 10240 zcmeHL&2G~`5FRIO2qhIs2?_;OH9{3VRh7z_2DK?cDx?Aq+#obYjg%iNaVvTv$M7Z` z`v%RC14ujyZvZp9GsJOikU&IHHJ0r8W@dMG_Ves|laJNiy~Tyw3qV&k3v-Y+P#!<{ zf^}>8rE`NfEFG3ZmY-4ZrGf&XfiwTs=VwB zR4b1TXpXOVqMIx~bDgmYez(~P`T+f)cYGKe_dIsAy`bKx2X$#D^~z-}<_pq+jHzGy zpbE7;oRp1lpL=k5LR3#<>r@pGw()Szak=2OZ|}CuX?B zW{SDDQfvSmair;p7tdWaD~*OPDec=*ndt+-fE{)B&8ii5MD`9!}H>sM?BXPiiMACUU` z#`PxoNf#_u@D{E}qO&q-p%ukHI#;;9R~bi6oF0xB^;2KEWqwJzEk zsm(RUC}5Fs2IY0e)UOc2HxN@iYIBn@ZHT21US^y_c_)Nd5Zko}u*#UmxyzW==DiSJ zV@z%IQU;U(Wnfkru)n+Qek{MaNzQ7%n%8t0kaO>lhn4?za$rx^bnY$Ux%aT!ZhM^o zmhqfR-$T0TJB@!TnWvn8txbFY^v#t+zxYfCeJBIUfHI&A{8t7Z@6`6qzDpZ6Ua|+9 zeHrzgo>vdNhWqx^ecJ8$M^9?*$=1f!#skxO{21wvoiQ1TlmTTx8Bhk40cAiLPzIC% OWk4BF29$vrW8f!@(DKg! literal 0 HcmV?d00001 diff --git a/tests/data/swc/heterogeneous_morphology.swc b/tests/data/swc/heterogeneous_morphology.swc new file mode 100644 index 000000000..d3b26ba57 --- /dev/null +++ b/tests/data/swc/heterogeneous_morphology.swc @@ -0,0 +1,25 @@ +# Created by MorphIO v3.3.3 +# index type X Y Z radius parent +1 1 0.000000000 0.000000000 0.000000000 0.500000000 -1 +2 3 -1.000000000 0.000000000 0.000000000 0.100000001 1 +3 3 -2.000000000 0.000000000 0.000000000 0.100000001 2 +4 3 -3.000000000 0.000000000 0.000000000 0.100000001 3 +5 3 -3.000000000 0.000000000 1.000000000 0.100000001 4 +6 3 -3.000000000 0.000000000 -1.000000000 0.100000001 4 +7 3 -2.000000000 1.000000000 0.000000000 0.100000001 3 +8 3 0.000000000 1.000000000 0.000000000 0.100000001 1 +9 3 1.000000000 2.000000000 0.000000000 0.100000001 8 +10 3 1.000000000 4.000000000 0.000000000 0.100000001 9 +11 3 1.000000000 4.000000000 1.000000000 0.100000001 10 +12 3 1.000000000 4.000000000 -1.000000000 0.100000001 10 +13 2 2.000000000 3.000000000 0.000000000 0.100000001 9 +14 2 2.000000000 4.000000000 0.000000000 0.100000001 13 +15 2 3.000000000 3.000000000 0.000000000 0.100000001 13 +16 2 3.000000000 3.000000000 1.000000000 0.100000001 15 +17 2 3.000000000 3.000000000 -1.000000000 0.100000001 15 +18 4 0.000000000 -1.000000000 0.000000000 0.100000001 1 +19 4 0.000000000 -2.000000000 0.000000000 0.100000001 18 +20 4 0.000000000 -3.000000000 0.000000000 0.100000001 19 +21 4 0.000000000 -3.000000000 1.000000000 0.100000001 20 +22 4 0.000000000 -3.000000000 -1.000000000 0.100000001 20 +23 4 1.000000000 -2.000000000 0.000000000 0.100000001 19 diff --git a/tests/features/test_bifurcation.py b/tests/features/test_bifurcation.py index f2eea4c08..6ef214af7 100644 --- a/tests/features/test_bifurcation.py +++ b/tests/features/test_bifurcation.py @@ -26,7 +26,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -"""Test features.bifurcationfunc.""" +"""Test ``neurom.features.bifurcation``.""" from pathlib import Path import warnings diff --git a/tests/features/test_features.py b/tests/features/test_features.py new file mode 100644 index 000000000..156053dc1 --- /dev/null +++ b/tests/features/test_features.py @@ -0,0 +1,63 @@ +"""Miscelaneous tests of features.""" + +from pathlib import Path +from itertools import chain + +import numpy as np +import pytest +from numpy import testing as npt + +import neurom as nm +from neurom import features + + +def _check_nested_type(data): + """Check that the given data contains only built-in types. + + The data should either be an int or float, or a list or tuple of ints or floats. + """ + if isinstance(data, (list, tuple)): + for i in data: + _check_nested_type(i) + else: + assert isinstance(data, (int, float)) + + +@pytest.mark.parametrize( + "feature_name", + [ + pytest.param(name, id=f"Test type of {name} neurite feature") + for name in features._NEURITE_FEATURES + ], +) +def test_neurite_feature_types(feature_name, NEURITE): + """Test neurite features.""" + res = features._NEURITE_FEATURES.get(feature_name)(NEURITE) + _check_nested_type(res) + + +@pytest.mark.parametrize( + "feature_name", + [ + pytest.param(name, id=f"Test type of {name} morphology feature") + for name in features._MORPHOLOGY_FEATURES + ], +) +@pytest.mark.filterwarnings('ignore::UserWarning') +def test_morphology_feature_types(feature_name, MORPHOLOGY): + """Test morphology features.""" + res = features._MORPHOLOGY_FEATURES.get(feature_name)(MORPHOLOGY) + _check_nested_type(res) + + +@pytest.mark.parametrize( + "feature_name", + [ + pytest.param(name, id=f"Test type of {name} population feature") + for name in features._POPULATION_FEATURES + ], +) +def test_population_feature_types(feature_name, POP): + """Test population features.""" + res = features._POPULATION_FEATURES.get(feature_name)(POP) + _check_nested_type(res) diff --git a/tests/features/test_get_features.py b/tests/features/test_get_features.py index 0bb416e02..21a10da93 100644 --- a/tests/features/test_get_features.py +++ b/tests/features/test_get_features.py @@ -45,18 +45,21 @@ from numpy.testing import assert_allclose DATA_PATH = Path(__file__).parent.parent / 'data' -NRN_FILES = [DATA_PATH / 'h5/v1' / f - for f in ('Neuron.h5', 'Neuron_2_branch.h5', 'bio_neuron-001.h5')] +NRN_FILES = [ + DATA_PATH / 'h5/v1' / f for f in ('Neuron.h5', 'Neuron_2_branch.h5', 'bio_neuron-001.h5') +] POP = load_morphologies(NRN_FILES) NRN = POP[0] SWC_PATH = DATA_PATH / 'swc' NEURON_PATH = SWC_PATH / 'Neuron.swc' NEURON = load_morphology(NEURON_PATH) -NEURITES = (NeuriteType.axon, - NeuriteType.apical_dendrite, - NeuriteType.basal_dendrite, - NeuriteType.all) +NEURITES = ( + NeuriteType.axon, + NeuriteType.apical_dendrite, + NeuriteType.basal_dendrite, + NeuriteType.all, +) def _stats(seq): @@ -65,8 +68,10 @@ def _stats(seq): def test_get_raises(): - with pytest.raises(NeuroMError, - match='Only Neurite, Morphology, Population or list, tuple of Neurite, Morphology'): + with pytest.raises( + NeuroMError, + match='Only Neurite, Morphology, Population or list, tuple of Neurite, Morphology', + ): features.get('soma_radius', (n for n in POP)) with pytest.raises(NeuroMError, match='Cant apply "invalid" feature'): features.get('invalid', NRN) @@ -83,24 +88,26 @@ def test_register_existing_feature(): def test_number_of_sections(): assert features.get('number_of_sections', POP) == [84, 42, 202] - assert features.get('number_of_sections', POP, - neurite_type=NeuriteType.all) == [84, 42, 202] - assert features.get('number_of_sections', POP, - neurite_type=NeuriteType.axon) == [21, 21, 179] - assert features.get('number_of_sections', POP, - neurite_type=NeuriteType.apical_dendrite) == [21, 0, 0] - assert features.get('number_of_sections', POP, - neurite_type=NeuriteType.basal_dendrite) == [42, 21, 23] + assert features.get('number_of_sections', POP, neurite_type=NeuriteType.all) == [84, 42, 202] + assert features.get('number_of_sections', POP, neurite_type=NeuriteType.axon) == [21, 21, 179] + assert features.get('number_of_sections', POP, neurite_type=NeuriteType.apical_dendrite) == [ + 21, + 0, + 0, + ] + assert features.get('number_of_sections', POP, neurite_type=NeuriteType.basal_dendrite) == [ + 42, + 21, + 23, + ] assert features.get('number_of_sections', NEURON) == 84 - assert features.get('number_of_sections', NEURON, - neurite_type=NeuriteType.all) == 84 - assert features.get('number_of_sections', NEURON, - neurite_type=NeuriteType.axon) == 21 - assert features.get('number_of_sections', NEURON, - neurite_type=NeuriteType.basal_dendrite) == 42 - assert features.get('number_of_sections', NEURON, - neurite_type=NeuriteType.apical_dendrite) == 21 + assert features.get('number_of_sections', NEURON, neurite_type=NeuriteType.all) == 84 + assert features.get('number_of_sections', NEURON, neurite_type=NeuriteType.axon) == 21 + assert features.get('number_of_sections', NEURON, neurite_type=NeuriteType.basal_dendrite) == 42 + assert ( + features.get('number_of_sections', NEURON, neurite_type=NeuriteType.apical_dendrite) == 21 + ) assert features.get('number_of_sections', NEURON.neurites) == [21, 21, 21, 21] assert features.get('number_of_sections', NEURON.neurites[0]) == 21 @@ -110,305 +117,385 @@ def test_number_of_sections(): def test_max_radial_distance(): - assert_allclose( - features.get('max_radial_distance', POP), - [99.58945832, 94.43342439, 1053.77939245]) + assert_allclose(features.get('max_radial_distance', POP), [99.62086, 94.43019, 1072.9137]) assert_allclose( features.get('max_radial_distance', POP, neurite_type=NeuriteType.all), - [99.58945832, 94.43342439, 1053.77939245]) + [99.62086, 94.43019, 1072.9137], + ) assert_allclose( features.get('max_radial_distance', POP, neurite_type=NeuriteType.axon), - [82.442545, 82.442545, 1053.779392]) + [82.52528, 82.44438, 1072.9137], + ) assert_allclose( features.get('max_radial_distance', POP, neurite_type=NeuriteType.basal_dendrite), - [94.43342563, 94.43342439, 207.56977859]) + [94.36033, 94.43019, 209.92587], + ) + assert_allclose(features.get('max_radial_distance', NRN), 99.62086) assert_allclose( - features.get('max_radial_distance', NRN), 99.58945832) + features.get('max_radial_distance', NRN, neurite_type=NeuriteType.all), 99.62086 + ) assert_allclose( - features.get('max_radial_distance', NRN, neurite_type=NeuriteType.all), 99.58945832) - assert_allclose(features.get( - 'max_radial_distance', NRN, neurite_type=NeuriteType.apical_dendrite), 99.589458) + features.get('max_radial_distance', NRN, neurite_type=NeuriteType.apical_dendrite), 99.62086 + ) assert_allclose( - features.get('max_radial_distance', NRN.neurites), - [99.58946, 80.05163, 94.433426, 82.44254]) - assert_allclose( - features.get('max_radial_distance', NRN.neurites[0]), 99.58946) + features.get('max_radial_distance', NRN.neurites), [99.58946, 80.05163, 94.433426, 82.44254] + ) + assert_allclose(features.get('max_radial_distance', NRN.neurites[0]), 99.58946) def test_section_tortuosity(): assert_allclose( - _stats(features.get('section_tortuosity', POP)), - (1.0, 4.657, 440.408, 1.342), rtol=1e-3) + _stats(features.get('section_tortuosity', POP)), (1.0, 4.657, 440.408, 1.342), rtol=1e-3 + ) assert_allclose( _stats(features.get('section_tortuosity', POP, neurite_type=NeuriteType.all)), - (1.0, 4.657, 440.408, 1.342), rtol=1e-3) + (1.0, 4.657, 440.408, 1.342), + rtol=1e-3, + ) assert_allclose( _stats(features.get('section_tortuosity', POP, neurite_type=NeuriteType.apical_dendrite)), - (1.070, 1.573, 26.919, 1.281), rtol=1e-3) + (1.070, 1.573, 26.919, 1.281), + rtol=1e-3, + ) assert_allclose( _stats(features.get('section_tortuosity', POP, neurite_type=NeuriteType.basal_dendrite)), - (1.042, 1.674, 106.596, 1.239), rtol=1e-3) + (1.042, 1.674, 106.596, 1.239), + rtol=1e-3, + ) assert_allclose( - _stats(features.get('section_tortuosity', NRN)), - (1.070, 1.573, 106.424, 1.266), rtol=1e-3) + _stats(features.get('section_tortuosity', NRN)), (1.070, 1.573, 106.424, 1.266), rtol=1e-3 + ) assert_allclose( _stats(features.get('section_tortuosity', NRN, neurite_type=NeuriteType.all)), - (1.070, 1.573, 106.424, 1.266), rtol=1e-3) + (1.070, 1.573, 106.424, 1.266), + rtol=1e-3, + ) assert_allclose( _stats(features.get('section_tortuosity', NRN, neurite_type=NeuriteType.apical_dendrite)), - (1.070, 1.573, 26.919, 1.281), rtol=1e-3) + (1.070, 1.573, 26.919, 1.281), + rtol=1e-3, + ) assert_allclose( _stats(features.get('section_tortuosity', NRN, neurite_type=NeuriteType.basal_dendrite)), - (1.078, 1.550, 51.540, 1.227), rtol=1e-3) + (1.078, 1.550, 51.540, 1.227), + rtol=1e-3, + ) def test_number_of_segments(): assert features.get('number_of_segments', POP) == [840, 419, 5179] - assert features.get('number_of_segments', POP, - neurite_type=NeuriteType.all) == [840, 419, 5179] - assert features.get('number_of_segments', POP, - neurite_type=NeuriteType.axon) == [210, 209, 4508] - assert features.get('number_of_segments', POP, - neurite_type=NeuriteType.apical_dendrite) == [210, 0, 0] - assert features.get('number_of_segments', POP, - neurite_type=NeuriteType.basal_dendrite) == [420, 210, 671] + assert features.get('number_of_segments', POP, neurite_type=NeuriteType.all) == [840, 419, 5179] + assert features.get('number_of_segments', POP, neurite_type=NeuriteType.axon) == [ + 210, + 209, + 4508, + ] + assert features.get('number_of_segments', POP, neurite_type=NeuriteType.apical_dendrite) == [ + 210, + 0, + 0, + ] + assert features.get('number_of_segments', POP, neurite_type=NeuriteType.basal_dendrite) == [ + 420, + 210, + 671, + ] assert features.get('number_of_segments', NRN) == 840 - assert features.get('number_of_segments', NRN, - neurite_type=NeuriteType.all) == 840 - assert features.get('number_of_segments', NRN, - neurite_type=NeuriteType.axon) == 210 - assert features.get('number_of_segments', NRN, - neurite_type=NeuriteType.apical_dendrite) == 210 - assert features.get('number_of_segments', NRN, - neurite_type=NeuriteType.basal_dendrite) == 420 + assert features.get('number_of_segments', NRN, neurite_type=NeuriteType.all) == 840 + assert features.get('number_of_segments', NRN, neurite_type=NeuriteType.axon) == 210 + assert features.get('number_of_segments', NRN, neurite_type=NeuriteType.apical_dendrite) == 210 + assert features.get('number_of_segments', NRN, neurite_type=NeuriteType.basal_dendrite) == 420 def test_number_of_neurites(): assert features.get('number_of_neurites', POP) == [4, 2, 4] - assert features.get('number_of_neurites', POP, - neurite_type=NeuriteType.all) == [4, 2, 4] - assert features.get('number_of_neurites', POP, - neurite_type=NeuriteType.axon) == [1, 1, 1] - assert features.get('number_of_neurites', POP, - neurite_type=NeuriteType.apical_dendrite) == [1, 0, 0] - assert features.get('number_of_neurites', POP, - neurite_type=NeuriteType.basal_dendrite) == [2, 1, 3] + assert features.get('number_of_neurites', POP, neurite_type=NeuriteType.all) == [4, 2, 4] + assert features.get('number_of_neurites', POP, neurite_type=NeuriteType.axon) == [1, 1, 1] + assert features.get('number_of_neurites', POP, neurite_type=NeuriteType.apical_dendrite) == [ + 1, + 0, + 0, + ] + assert features.get('number_of_neurites', POP, neurite_type=NeuriteType.basal_dendrite) == [ + 2, + 1, + 3, + ] assert features.get('number_of_neurites', NRN) == 4 - assert features.get('number_of_neurites', NRN, - neurite_type=NeuriteType.all) == 4 - assert features.get('number_of_neurites', NRN, - neurite_type=NeuriteType.axon) == 1 - assert features.get('number_of_neurites', NRN, - neurite_type=NeuriteType.apical_dendrite) == 1 - assert features.get('number_of_neurites', NRN, - neurite_type=NeuriteType.basal_dendrite) == 2 + assert features.get('number_of_neurites', NRN, neurite_type=NeuriteType.all) == 4 + assert features.get('number_of_neurites', NRN, neurite_type=NeuriteType.axon) == 1 + assert features.get('number_of_neurites', NRN, neurite_type=NeuriteType.apical_dendrite) == 1 + assert features.get('number_of_neurites', NRN, neurite_type=NeuriteType.basal_dendrite) == 2 def test_number_of_bifurcations(): assert features.get('number_of_bifurcations', POP) == [40, 20, 97] - assert features.get('number_of_bifurcations', POP, - neurite_type=NeuriteType.all) == [40, 20, 97] - assert features.get('number_of_bifurcations', POP, - neurite_type=NeuriteType.axon) == [10, 10, 87] - assert features.get('number_of_bifurcations', POP, - neurite_type=NeuriteType.apical_dendrite) == [10, 0, 0] - assert features.get('number_of_bifurcations', POP, - neurite_type=NeuriteType.basal_dendrite) == [20, 10, 10] + assert features.get('number_of_bifurcations', POP, neurite_type=NeuriteType.all) == [40, 20, 97] + assert features.get('number_of_bifurcations', POP, neurite_type=NeuriteType.axon) == [ + 10, + 10, + 87, + ] + assert features.get( + 'number_of_bifurcations', POP, neurite_type=NeuriteType.apical_dendrite + ) == [10, 0, 0] + assert features.get('number_of_bifurcations', POP, neurite_type=NeuriteType.basal_dendrite) == [ + 20, + 10, + 10, + ] assert features.get('number_of_bifurcations', NRN) == 40 - assert features.get('number_of_bifurcations', NRN, - neurite_type=NeuriteType.all) == 40 - assert features.get('number_of_bifurcations', NRN, - neurite_type=NeuriteType.axon) == 10 - assert features.get('number_of_bifurcations', NRN, - neurite_type=NeuriteType.apical_dendrite) == 10 - assert features.get('number_of_bifurcations', NRN, - neurite_type=NeuriteType.basal_dendrite) == 20 + assert features.get('number_of_bifurcations', NRN, neurite_type=NeuriteType.all) == 40 + assert features.get('number_of_bifurcations', NRN, neurite_type=NeuriteType.axon) == 10 + assert ( + features.get('number_of_bifurcations', NRN, neurite_type=NeuriteType.apical_dendrite) == 10 + ) + assert ( + features.get('number_of_bifurcations', NRN, neurite_type=NeuriteType.basal_dendrite) == 20 + ) def test_number_of_forking_points(): assert features.get('number_of_forking_points', POP) == [40, 20, 98] - assert features.get('number_of_forking_points', POP, - neurite_type=NeuriteType.all) == [40, 20, 98] - assert features.get('number_of_forking_points', POP, - neurite_type=NeuriteType.axon) == [10, 10, 88] - assert features.get('number_of_forking_points', POP, - neurite_type=NeuriteType.apical_dendrite) == [10, 0, 0] - assert features.get('number_of_forking_points', POP, - neurite_type=NeuriteType.basal_dendrite) == [20, 10, 10] + assert features.get('number_of_forking_points', POP, neurite_type=NeuriteType.all) == [ + 40, + 20, + 98, + ] + assert features.get('number_of_forking_points', POP, neurite_type=NeuriteType.axon) == [ + 10, + 10, + 88, + ] + assert features.get( + 'number_of_forking_points', POP, neurite_type=NeuriteType.apical_dendrite + ) == [10, 0, 0] + assert features.get( + 'number_of_forking_points', POP, neurite_type=NeuriteType.basal_dendrite + ) == [20, 10, 10] assert features.get('number_of_forking_points', NRN) == 40 - assert features.get('number_of_forking_points', NRN, - neurite_type=NeuriteType.all) == 40 - assert features.get('number_of_forking_points', NRN, - neurite_type=NeuriteType.axon) == 10 - assert features.get('number_of_forking_points', NRN, - neurite_type=NeuriteType.apical_dendrite) == 10 - assert features.get('number_of_forking_points', NRN, - neurite_type=NeuriteType.basal_dendrite) == 20 + assert features.get('number_of_forking_points', NRN, neurite_type=NeuriteType.all) == 40 + assert features.get('number_of_forking_points', NRN, neurite_type=NeuriteType.axon) == 10 + assert ( + features.get('number_of_forking_points', NRN, neurite_type=NeuriteType.apical_dendrite) + == 10 + ) + assert ( + features.get('number_of_forking_points', NRN, neurite_type=NeuriteType.basal_dendrite) == 20 + ) def test_number_of_leaves(): assert features.get('number_of_leaves', POP) == [44, 22, 103] - assert features.get('number_of_leaves', POP, - neurite_type=NeuriteType.all) == [44, 22, 103] - assert features.get('number_of_leaves', POP, - neurite_type=NeuriteType.axon) == [11, 11, 90] - assert features.get('number_of_leaves', POP, - neurite_type=NeuriteType.apical_dendrite) == [11, 0, 0] - assert features.get('number_of_leaves', POP, - neurite_type=NeuriteType.basal_dendrite) == [22, 11, 13] + assert features.get('number_of_leaves', POP, neurite_type=NeuriteType.all) == [44, 22, 103] + assert features.get('number_of_leaves', POP, neurite_type=NeuriteType.axon) == [11, 11, 90] + assert features.get('number_of_leaves', POP, neurite_type=NeuriteType.apical_dendrite) == [ + 11, + 0, + 0, + ] + assert features.get('number_of_leaves', POP, neurite_type=NeuriteType.basal_dendrite) == [ + 22, + 11, + 13, + ] assert features.get('number_of_leaves', NRN) == 44 - assert features.get('number_of_leaves', NRN, - neurite_type=NeuriteType.all) == 44 - assert features.get('number_of_leaves', NRN, - neurite_type=NeuriteType.axon) == 11 - assert features.get('number_of_leaves', NRN, - neurite_type=NeuriteType.apical_dendrite) == 11 - assert features.get('number_of_leaves', NRN, - neurite_type=NeuriteType.basal_dendrite) == 22 + assert features.get('number_of_leaves', NRN, neurite_type=NeuriteType.all) == 44 + assert features.get('number_of_leaves', NRN, neurite_type=NeuriteType.axon) == 11 + assert features.get('number_of_leaves', NRN, neurite_type=NeuriteType.apical_dendrite) == 11 + assert features.get('number_of_leaves', NRN, neurite_type=NeuriteType.basal_dendrite) == 22 def test_total_length(): assert_allclose( features.get('total_length', POP), - [840.68522362011538, 418.83424432013902, 13250.825773939932]) + [840.68522362011538, 418.83424432013902, 13250.825773939932], + ) assert_allclose( features.get('total_length', POP, neurite_type=NeuriteType.all), - [840.68522362011538, 418.83424432013902, 13250.825773939932]) + [840.68522362011538, 418.83424432013902, 13250.825773939932], + ) assert_allclose( features.get('total_length', POP, neurite_type=NeuriteType.axon), - [207.8797736031714, 207.81088341560977, 11767.156115224638]) + [207.8797736031714, 207.81088341560977, 11767.156115224638], + ) assert_allclose( features.get('total_length', POP, neurite_type=NeuriteType.apical_dendrite), - [214.37302709169489, 0, 0]) + [214.37302709169489, 0, 0], + ) assert_allclose( features.get('total_length', POP, neurite_type=NeuriteType.basal_dendrite), - [418.43242292524889, 211.02336090452931, 1483.6696587152967]) + [418.43242292524889, 211.02336090452931, 1483.6696587152967], + ) assert_allclose( - features.get('total_length', NEURON, neurite_type=NeuriteType.axon), - 207.87975221) + features.get('total_length', NEURON, neurite_type=NeuriteType.axon), 207.87975221 + ) assert_allclose( - features.get('total_length', NEURON, neurite_type=NeuriteType.basal_dendrite), - 418.432424) + features.get('total_length', NEURON, neurite_type=NeuriteType.basal_dendrite), 418.432424 + ) assert_allclose( - features.get('total_length', NEURON, neurite_type=NeuriteType.apical_dendrite), - 214.37304578) + features.get('total_length', NEURON, neurite_type=NeuriteType.apical_dendrite), 214.37304578 + ) assert_allclose( - features.get('total_length', NEURON, neurite_type=NeuriteType.axon), - 207.87975221) + features.get('total_length', NEURON, neurite_type=NeuriteType.axon), 207.87975221 + ) assert_allclose( - features.get('total_length', NEURON, neurite_type=NeuriteType.basal_dendrite), - 418.43241644) + features.get('total_length', NEURON, neurite_type=NeuriteType.basal_dendrite), 418.43241644 + ) assert_allclose( - features.get('total_length', NEURON, neurite_type=NeuriteType.apical_dendrite), - 214.37304578) + features.get('total_length', NEURON, neurite_type=NeuriteType.apical_dendrite), 214.37304578 + ) def test_trunk_angles(): trunk_angles_pop = features.get('trunk_angles', POP, neurite_type=NeuriteType.basal_dendrite) trunk_angles_morphs = features.get( - 'trunk_angles', - [i for i in POP], - neurite_type=NeuriteType.basal_dendrite + 'trunk_angles', [i for i in POP], neurite_type=NeuriteType.basal_dendrite ) - trunk_angles_morphs_2 = np.concatenate([ - features.get('trunk_angles', i, neurite_type=NeuriteType.basal_dendrite) - for i in POP - ]).tolist() + trunk_angles_morphs_2 = np.concatenate( + [features.get('trunk_angles', i, neurite_type=NeuriteType.basal_dendrite) for i in POP] + ).tolist() assert trunk_angles_pop == trunk_angles_morphs == trunk_angles_morphs_2 def test_neurite_lengths(): actual = features.get('total_length_per_neurite', POP, neurite_type=NeuriteType.basal_dendrite) - expected = [207.31504917144775, 211.11737489700317, 211.02336168289185, - 501.28893661499023, 133.21348762512207, 849.1672043800354] - for a,e in zip(actual, expected): + expected = [ + 207.31504917144775, + 211.11737489700317, + 211.02336168289185, + 501.28893661499023, + 133.21348762512207, + 849.1672043800354, + ] + for a, e in zip(actual, expected): assert_allclose(a, e) assert_allclose( features.get('total_length_per_neurite', NEURON, neurite_type=NeuriteType.axon), - (207.87975221,)) + (207.87975221,), + ) assert_allclose( features.get('total_length_per_neurite', NEURON, neurite_type=NeuriteType.basal_dendrite), - (211.11737442, 207.31504202)) + (211.11737442, 207.31504202), + ) assert_allclose( features.get('total_length_per_neurite', NEURON, neurite_type=NeuriteType.apical_dendrite), - (214.37304578,)) + (214.37304578,), + ) def test_segment_radii(): assert_allclose( _stats(features.get('segment_radii', POP)), - (0.079999998211860657, 1.2150000333786011, 1301.9191725363567, 0.20222416473071708)) + (0.079999998211860657, 1.2150000333786011, 1301.9191725363567, 0.20222416473071708), + ) assert_allclose( _stats(features.get('segment_radii', POP, neurite_type=NeuriteType.all)), - (0.079999998211860657, 1.2150000333786011, 1301.9191725363567, 0.20222416473071708)) + (0.079999998211860657, 1.2150000333786011, 1301.9191725363567, 0.20222416473071708), + ) assert_allclose( _stats(features.get('segment_radii', POP, neurite_type=NeuriteType.apical_dendrite)), - (0.13142434507608414, 1.0343990325927734, 123.41135908663273, 0.58767313850777492)) + (0.13142434507608414, 1.0343990325927734, 123.41135908663273, 0.58767313850777492), + ) assert_allclose( _stats(features.get('segment_radii', POP, neurite_type=NeuriteType.basal_dendrite)), - (0.079999998211860657, 1.2150000333786011, 547.43900821779164, 0.42078324997524336)) + (0.079999998211860657, 1.2150000333786011, 547.43900821779164, 0.42078324997524336), + ) assert_allclose( _stats(features.get('segment_radii', NRN)), - (0.12087134271860123, 1.0343990325927734, 507.01994501426816, 0.60359517263603357)) + (0.12087134271860123, 1.0343990325927734, 507.01994501426816, 0.60359517263603357), + ) assert_allclose( _stats(features.get('segment_radii', NRN, neurite_type=NeuriteType.all)), - (0.12087134271860123, 1.0343990325927734, 507.01994501426816, 0.60359517263603357)) + (0.12087134271860123, 1.0343990325927734, 507.01994501426816, 0.60359517263603357), + ) assert_allclose( _stats(features.get('segment_radii', NRN, neurite_type=NeuriteType.apical_dendrite)), - (0.13142434507608414, 1.0343990325927734, 123.41135908663273, 0.58767313850777492)) + (0.13142434507608414, 1.0343990325927734, 123.41135908663273, 0.58767313850777492), + ) assert_allclose( _stats(features.get('segment_radii', NRN, neurite_type=NeuriteType.basal_dendrite)), - (0.14712842553853989, 1.0215770602226257, 256.71241207793355, 0.61122002875698467)) + (0.14712842553853989, 1.0215770602226257, 256.71241207793355, 0.61122002875698467), + ) def test_segment_meander_angles(): assert_allclose( _stats(features.get('segment_meander_angles', POP)), - (0.0, 3.1415, 14637.9776, 2.3957), rtol=1e-3) + (0.0, 3.1415, 14637.9776, 2.3957), + rtol=1e-3, + ) assert_allclose( _stats(features.get('segment_meander_angles', POP, neurite_type=NeuriteType.all)), - (0.0, 3.1415, 14637.9776, 2.3957), rtol=1e-3) + (0.0, 3.1415, 14637.9776, 2.3957), + rtol=1e-3, + ) assert_allclose( - _stats(features.get('segment_meander_angles', POP, neurite_type=NeuriteType.apical_dendrite)), - (0.3261, 3.0939, 461.9816, 2.4443), rtol=1e-4) + _stats( + features.get('segment_meander_angles', POP, neurite_type=NeuriteType.apical_dendrite) + ), + (0.3261, 3.0939, 461.9816, 2.4443), + rtol=1e-4, + ) assert_allclose( - _stats(features.get('segment_meander_angles', POP, neurite_type=NeuriteType.basal_dendrite)), - (0.0, 3.1415, 2926.2411, 2.4084), rtol=1e-4) + _stats( + features.get('segment_meander_angles', POP, neurite_type=NeuriteType.basal_dendrite) + ), + (0.0, 3.1415, 2926.2411, 2.4084), + rtol=1e-4, + ) assert_allclose( _stats(features.get('segment_meander_angles', NRN)), - (0.32610, 3.12996, 1842.35, 2.43697), rtol=1e-5) + (0.32610, 3.12996, 1842.35, 2.43697), + rtol=1e-5, + ) assert_allclose( _stats(features.get('segment_meander_angles', NRN, neurite_type=NeuriteType.all)), - (0.32610, 3.12996, 1842.35, 2.43697), rtol=1e-5) + (0.32610, 3.12996, 1842.35, 2.43697), + rtol=1e-5, + ) assert_allclose( - _stats(features.get('segment_meander_angles', NRN, neurite_type=NeuriteType.apical_dendrite)), - (0.32610, 3.09392, 461.981, 2.44434), rtol=1e-5) + _stats( + features.get('segment_meander_angles', NRN, neurite_type=NeuriteType.apical_dendrite) + ), + (0.32610, 3.09392, 461.981, 2.44434), + rtol=1e-5, + ) assert_allclose( - _stats(features.get('segment_meander_angles', NRN, neurite_type=NeuriteType.basal_dendrite)), - (0.47318, 3.12996, 926.338, 2.45063), rtol=1e-4) + _stats( + features.get('segment_meander_angles', NRN, neurite_type=NeuriteType.basal_dendrite) + ), + (0.47318, 3.12996, 926.338, 2.45063), + rtol=1e-4, + ) def test_segment_meander_angles_single_section(): - m = nm.load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + m = nm.load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (1 0 0 2) (1 1 0 2) (2 1 0 2) - (2 2 0 2)))"""), reader='asc') + (2 2 0 2)))""" + ), + reader='asc', + ) nrt = m.neurites[0] pop = [m] @@ -422,73 +509,114 @@ def test_segment_meander_angles_single_section(): def test_neurite_volumes(): assert_allclose( _stats(features.get('total_volume_per_neurite', POP)), - (28.356406629821159, 281.24754646913954, 2249.4613918388391, 224.9461391838839)) + (28.356406629821159, 281.24754646913954, 2249.4613918388391, 224.9461391838839), + ) assert_allclose( _stats(features.get('total_volume_per_neurite', POP, neurite_type=NeuriteType.all)), - (28.356406629821159, 281.24754646913954, 2249.4613918388391, 224.9461391838839)) + (28.356406629821159, 281.24754646913954, 2249.4613918388391, 224.9461391838839), + ) assert_allclose( _stats(features.get('total_volume_per_neurite', POP, neurite_type=NeuriteType.axon)), - (276.58135508666612, 277.5357232437392, 830.85568094763551, 276.95189364921185)) + (276.58135508666612, 277.5357232437392, 830.85568094763551, 276.95189364921185), + ) assert_allclose( - _stats(features.get('total_volume_per_neurite', POP, neurite_type=NeuriteType.apical_dendrite)), - (271.94122143951864, 271.94122143951864, 271.94122143951864, 271.94122143951864)) + _stats( + features.get('total_volume_per_neurite', POP, neurite_type=NeuriteType.apical_dendrite) + ), + (271.94122143951864, 271.94122143951864, 271.94122143951864, 271.94122143951864), + ) assert_allclose( - _stats(features.get('total_volume_per_neurite', POP, neurite_type=NeuriteType.basal_dendrite)), - (28.356406629821159, 281.24754646913954, 1146.6644894516851, 191.1107482419475)) + _stats( + features.get('total_volume_per_neurite', POP, neurite_type=NeuriteType.basal_dendrite) + ), + (28.356406629821159, 281.24754646913954, 1146.6644894516851, 191.1107482419475), + ) assert_allclose( _stats(features.get('total_volume_per_neurite', NRN)), - (271.9412, 281.2475, 1104.907, 276.2269), rtol=1e-5) + (271.9412, 281.2475, 1104.907, 276.2269), + rtol=1e-5, + ) assert_allclose( _stats(features.get('total_volume_per_neurite', NRN, neurite_type=NeuriteType.all)), - (271.9412, 281.2475, 1104.907, 276.2269), rtol=1e-5) + (271.9412, 281.2475, 1104.907, 276.2269), + rtol=1e-5, + ) assert_allclose( _stats(features.get('total_volume_per_neurite', NRN, neurite_type=NeuriteType.axon)), - (276.7386, 276.7386, 276.7386, 276.7386), rtol=1e-5) + (276.7386, 276.7386, 276.7386, 276.7386), + rtol=1e-5, + ) assert_allclose( - _stats(features.get('total_volume_per_neurite', NRN, neurite_type=NeuriteType.apical_dendrite)), - (271.9412, 271.9412, 271.9412, 271.9412), rtol=1e-5) + _stats( + features.get('total_volume_per_neurite', NRN, neurite_type=NeuriteType.apical_dendrite) + ), + (271.9412, 271.9412, 271.9412, 271.9412), + rtol=1e-5, + ) assert_allclose( - _stats(features.get('total_volume_per_neurite', NRN, neurite_type=NeuriteType.basal_dendrite)), - (274.9803, 281.2475, 556.2279, 278.1139), rtol=1e-5) + _stats( + features.get('total_volume_per_neurite', NRN, neurite_type=NeuriteType.basal_dendrite) + ), + (274.9803, 281.2475, 556.2279, 278.1139), + rtol=1e-5, + ) def test_neurite_density(): assert_allclose( _stats(features.get('neurite_volume_density', POP)), - (6.1847539631150784e-06, 0.52464681266899216, 1.9767794901940539, 0.19767794901940539)) + (6.1847539631150784e-06, 0.52464681266899216, 1.9767794901940539, 0.19767794901940539), + ) assert_allclose( _stats(features.get('neurite_volume_density', POP, neurite_type=NeuriteType.all)), - (6.1847539631150784e-06, 0.52464681266899216, 1.9767794901940539, 0.19767794901940539)) + (6.1847539631150784e-06, 0.52464681266899216, 1.9767794901940539, 0.19767794901940539), + ) assert_allclose( _stats(features.get('neurite_volume_density', POP, neurite_type=NeuriteType.axon)), - (6.1847539631150784e-06, 0.26465213325053372, 0.5275513670655404, 0.1758504556885134), 1e-6) + (6.1847539631150784e-06, 0.26465213325053372, 0.5275513670655404, 0.1758504556885134), + 1e-6, + ) assert_allclose( - _stats(features.get('neurite_volume_density', POP, neurite_type=NeuriteType.apical_dendrite)), - (0.43756606998299519, 0.43756606998299519, 0.43756606998299519, 0.43756606998299519)) + _stats( + features.get('neurite_volume_density', POP, neurite_type=NeuriteType.apical_dendrite) + ), + (0.43756606998299519, 0.43756606998299519, 0.43756606998299519, 0.43756606998299519), + ) assert_allclose( - _stats(features.get('neurite_volume_density', POP, neurite_type=NeuriteType.basal_dendrite)), - (0.00034968816544949771, 0.52464681266899216, 1.0116620531455183, 0.16861034219091972)) + _stats( + features.get('neurite_volume_density', POP, neurite_type=NeuriteType.basal_dendrite) + ), + (0.00034968816544949771, 0.52464681266899216, 1.0116620531455183, 0.16861034219091972), + ) assert_allclose( _stats(features.get('neurite_volume_density', NRN)), - (0.24068543213643726, 0.52464681266899216, 1.4657913638494682, 0.36644784096236704)) + (0.24068543213643726, 0.52464681266899216, 1.4657913638494682, 0.36644784096236704), + ) assert_allclose( _stats(features.get('neurite_volume_density', NRN, neurite_type=NeuriteType.all)), - (0.24068543213643726, 0.52464681266899216, 1.4657913638494682, 0.36644784096236704)) + (0.24068543213643726, 0.52464681266899216, 1.4657913638494682, 0.36644784096236704), + ) assert_allclose( _stats(features.get('neurite_volume_density', NRN, neurite_type=NeuriteType.axon)), - (0.26289304906104355, 0.26289304906104355, 0.26289304906104355, 0.26289304906104355)) + (0.26289304906104355, 0.26289304906104355, 0.26289304906104355, 0.26289304906104355), + ) assert_allclose( - _stats(features.get('neurite_volume_density', NRN, neurite_type=NeuriteType.apical_dendrite)), - (0.43756606998299519, 0.43756606998299519, 0.43756606998299519, 0.43756606998299519)) + _stats( + features.get('neurite_volume_density', NRN, neurite_type=NeuriteType.apical_dendrite) + ), + (0.43756606998299519, 0.43756606998299519, 0.43756606998299519, 0.43756606998299519), + ) assert_allclose( - _stats(features.get('neurite_volume_density', NRN, neurite_type=NeuriteType.basal_dendrite)), - (0.24068543213643726, 0.52464681266899216, 0.76533224480542938, 0.38266612240271469)) + _stats( + features.get('neurite_volume_density', NRN, neurite_type=NeuriteType.basal_dendrite) + ), + (0.24068543213643726, 0.52464681266899216, 0.76533224480542938, 0.38266612240271469), + ) def test_morphology_volume_density(): - volume_density = features.get("volume_density", NEURON) # volume density should not be calculated as the sum of the neurite volume densities, @@ -543,8 +671,9 @@ def test_segment_lengths(): def test_local_bifurcation_angles(): - ref_local_bifangles = np.concatenate([neurite.local_bifurcation_angles(s) - for s in NEURON.neurites]) + ref_local_bifangles = np.concatenate( + [neurite.local_bifurcation_angles(s) for s in NEURON.neurites] + ) local_bifangles = features.get('local_bifurcation_angles', NEURON) assert len(local_bifangles) == 40 @@ -567,13 +696,15 @@ def test_local_bifurcation_angles(): def test_remote_bifurcation_angles(): - ref_remote_bifangles = np.concatenate([neurite.remote_bifurcation_angles(s) - for s in NEURON.neurites]) + ref_remote_bifangles = np.concatenate( + [neurite.remote_bifurcation_angles(s) for s in NEURON.neurites] + ) remote_bifangles = features.get('remote_bifurcation_angles', NEURON) assert len(remote_bifangles) == 40 assert_allclose(remote_bifangles, ref_remote_bifangles) - remote_bifangles = features.get('remote_bifurcation_angles', - NEURON, neurite_type=NeuriteType.all) + remote_bifangles = features.get( + 'remote_bifurcation_angles', NEURON, neurite_type=NeuriteType.all + ) assert len(remote_bifangles) == 40 assert_allclose(remote_bifangles, ref_remote_bifangles) @@ -593,8 +724,9 @@ def test_remote_bifurcation_angles(): def test_segment_radial_distances_origin(): origin = (-100, -200, -300) ref_segs = np.concatenate([neurite.segment_radial_distances(s) for s in NEURON.neurites]) - ref_segs_origin = np.concatenate([neurite.segment_radial_distances(s, origin) - for s in NEURON.neurites]) + ref_segs_origin = np.concatenate( + [neurite.segment_radial_distances(s, origin) for s in NEURON.neurites] + ) rad_dists = features.get('segment_radial_distances', NEURON) rad_dists_origin = features.get('segment_radial_distances', NEURON, origin=origin) @@ -603,8 +735,10 @@ def test_segment_radial_distances_origin(): assert np.all(rad_dists_origin == ref_segs_origin) assert np.all(rad_dists_origin != ref_segs) - morphs = [nm.load_morphology(Path(SWC_PATH, f)) for - f in ('point_soma_single_neurite.swc', 'point_soma_single_neurite2.swc')] + morphs = [ + nm.load_morphology(Path(SWC_PATH, f)) + for f in ('point_soma_single_neurite.swc', 'point_soma_single_neurite2.swc') + ] pop = Population(morphs) rad_dist_morphs = [] for m in morphs: @@ -616,15 +750,18 @@ def test_segment_radial_distances_origin(): def test_section_radial_distances_endpoint(): - ref_sec_rad_dist = np.concatenate([neurite.section_radial_distances(s) - for s in NEURON.neurites]) + ref_sec_rad_dist = np.concatenate( + [neurite.section_radial_distances(s) for s in NEURON.neurites] + ) rad_dists = features.get('section_radial_distances', NEURON) assert len(rad_dists) == 84 assert np.all(rad_dists == ref_sec_rad_dist) - morphs = [nm.load_morphology(Path(SWC_PATH, f)) for - f in ('point_soma_single_neurite.swc', 'point_soma_single_neurite2.swc')] + morphs = [ + nm.load_morphology(Path(SWC_PATH, f)) + for f in ('point_soma_single_neurite.swc', 'point_soma_single_neurite2.swc') + ] pop = Population(morphs) rad_dist_morphs = [v for m in morphs for v in features.get('section_radial_distances', m)] rad_dist_pop = features.get('section_radial_distances', pop) @@ -636,61 +773,82 @@ def test_section_radial_distances_endpoint(): def test_section_radial_distances_origin(): origin = (-100, -200, -300) - ref_sec_rad_dist_origin = np.concatenate([neurite.section_radial_distances(s, origin) - for s in NEURON.neurites]) + ref_sec_rad_dist_origin = np.concatenate( + [neurite.section_radial_distances(s, origin) for s in NEURON.neurites] + ) rad_dists = features.get('section_radial_distances', NEURON, origin=origin) assert len(rad_dists) == 84 assert np.all(rad_dists == ref_sec_rad_dist_origin) def test_number_of_sections_per_neurite(): - nsecs = features.get('number_of_sections_per_neurite', NEURON) - assert len(nsecs) == 4 - assert np.all(nsecs == [21, 21, 21, 21]) - - nsecs = features.get('number_of_sections_per_neurite', NEURON, neurite_type=NeuriteType.axon) - assert len(nsecs) == 1 - assert nsecs == [21] - - nsecs = features.get('number_of_sections_per_neurite', NEURON, - neurite_type=NeuriteType.basal_dendrite) - assert len(nsecs) == 2 - assert np.all(nsecs == [21, 21]) - - nsecs = features.get('number_of_sections_per_neurite', NEURON, - neurite_type=NeuriteType.apical_dendrite) - assert len(nsecs) == 1 - assert np.all(nsecs == [21]) + for use_subtrees in (True, False): + neuron = load_morphology(NEURON_PATH, process_subtrees=use_subtrees) + nsecs = features.get('number_of_sections_per_neurite', neuron) + assert len(nsecs) == 4 + assert np.all(nsecs == [21, 21, 21, 21]) + + nsecs = features.get( + 'number_of_sections_per_neurite', + neuron, + neurite_type=NeuriteType.axon, + ) + assert len(nsecs) == 1 + assert nsecs == [21] + + nsecs = features.get( + 'number_of_sections_per_neurite', + neuron, + neurite_type=NeuriteType.basal_dendrite, + ) + assert len(nsecs) == 2 + assert np.all(nsecs == [21, 21]) + + nsecs = features.get( + 'number_of_sections_per_neurite', + neuron, + neurite_type=NeuriteType.apical_dendrite, + ) + assert len(nsecs) == 1 + assert np.all(nsecs == [21]) def test_trunk_origin_radii(): assert_allclose( features.get('trunk_origin_radii', NEURON), - [0.85351288499400002, 0.18391483031299999, 0.66943255462899998, 0.14656092843999999]) + [0.85351288499400002, 0.18391483031299999, 0.66943255462899998, 0.14656092843999999], + ) assert_allclose( features.get('trunk_origin_radii', NEURON, neurite_type=NeuriteType.apical_dendrite), - [0.14656092843999999]) + [0.14656092843999999], + ) assert_allclose( features.get('trunk_origin_radii', NEURON, neurite_type=NeuriteType.basal_dendrite), - [0.18391483031299999, 0.66943255462899998]) + [0.18391483031299999, 0.66943255462899998], + ) assert_allclose( features.get('trunk_origin_radii', NEURON, neurite_type=NeuriteType.axon), - [0.85351288499400002]) + [0.85351288499400002], + ) def test_trunk_section_lengths(): assert_allclose( features.get('trunk_section_lengths', NEURON), - [9.579117366740002, 7.972322416776259, 8.2245287740603779, 9.212707985134525]) + [9.579117366740002, 7.972322416776259, 8.2245287740603779, 9.212707985134525], + ) assert_allclose( features.get('trunk_section_lengths', NEURON, neurite_type=NeuriteType.apical_dendrite), - [9.212707985134525]) + [9.212707985134525], + ) assert_allclose( features.get('trunk_section_lengths', NEURON, neurite_type=NeuriteType.basal_dendrite), - [7.972322416776259, 8.2245287740603779]) + [7.972322416776259, 8.2245287740603779], + ) assert_allclose( features.get('trunk_section_lengths', NEURON, neurite_type=NeuriteType.axon), - [9.579117366740002]) + [9.579117366740002], + ) def test_soma_radius(): @@ -698,30 +856,35 @@ def test_soma_radius(): def test_soma_surface_area(): - area = 4. * math.pi * features.get('soma_radius', NEURON) ** 2 + area = 4.0 * math.pi * features.get('soma_radius', NEURON) ** 2 assert_allclose(features.get('soma_surface_area', NEURON), area) def test_sholl_frequency(): - assert_allclose(features.get('sholl_frequency', NEURON), - [4, 8, 8, 14, 9, 8, 7, 7, 7, 5]) + assert_allclose(features.get('sholl_frequency', NEURON), [4, 8, 8, 14, 9, 8, 7, 7, 7, 5]) - assert_allclose(features.get('sholl_frequency', NEURON, neurite_type=NeuriteType.all), - [4, 8, 8, 14, 9, 8, 7, 7, 7, 5]) + assert_allclose( + features.get('sholl_frequency', NEURON, neurite_type=NeuriteType.all), + [4, 8, 8, 14, 9, 8, 7, 7, 7, 5], + ) assert_allclose( features.get('sholl_frequency', NEURON, neurite_type=NeuriteType.apical_dendrite), - [1, 2, 2, 2, 2, 2, 1, 1, 3, 3]) + [1, 2, 2, 2, 2, 2, 1, 1, 3, 3], + ) assert_allclose( features.get('sholl_frequency', NEURON, neurite_type=NeuriteType.basal_dendrite), - [2, 4, 4, 6, 5, 4, 4, 4, 2, 2]) - - assert_allclose(features.get('sholl_frequency', NEURON, neurite_type=NeuriteType.axon), - [1, 2, 2, 6, 2, 2, 2, 2, 2]) + [2, 4, 4, 6, 5, 4, 4, 4, 2, 2], + ) - assert len(features.get('sholl_frequency', POP)) == 108 + assert_allclose( + features.get('sholl_frequency', NEURON, neurite_type=NeuriteType.axon), + [1, 2, 2, 6, 2, 2, 2, 2, 2], + ) + pop = Population([NEURON, NEURON]) + assert len(features.get('sholl_frequency', pop)) == 10 # check that the soma is taken into account for calculating max radius and num bins m = nm.load_morphology( @@ -729,7 +892,8 @@ def test_sholl_frequency(): 1 1 -10 0 0 5.0 -1 2 3 0 0 0 0.1 1 3 3 10 0 0 0.1 2 - """, reader="swc", + """, + reader="swc", ) assert features.get('sholl_frequency', m, step_size=5.0) == [0, 1, 1, 1] @@ -739,14 +903,17 @@ def test_sholl_frequency(): def test_bifurcation_partitions(): - assert_allclose(features.get('bifurcation_partitions', POP)[:10], - [19., 17., 15., 13., 11., 9., 7., 5., 3., 1.]) + assert_allclose( + features.get('bifurcation_partitions', POP)[:10], + [19.0, 17.0, 15.0, 13.0, 11.0, 9.0, 7.0, 5.0, 3.0, 1.0], + ) def test_partition_asymmetry(): assert_allclose( features.get('partition_asymmetry', POP)[:10], - [0.9, 0.88888889, 0.875, 0.85714286, 0.83333333, 0.8, 0.75, 0.66666667, 0.5, 0.]) + [0.9, 0.88888889, 0.875, 0.85714286, 0.83333333, 0.8, 0.75, 0.66666667, 0.5, 0.0], + ) def test_partition_asymmetry_length(): @@ -756,45 +923,99 @@ def test_partition_asymmetry_length(): def test_section_strahler_orders(): path = Path(SWC_PATH, 'strahler.swc') n = nm.load_morphology(path) - assert_allclose(features.get('section_strahler_orders', n), - [4, 1, 4, 3, 2, 1, 1, 2, 1, 1, 3, 1, 3, 2, 1, 1, 2, 1, 1]) + assert_allclose( + features.get('section_strahler_orders', n), + [4, 1, 4, 3, 2, 1, 1, 2, 1, 1, 3, 1, 3, 2, 1, 1, 2, 1, 1], + ) def test_section_bif_radial_distances(): + # the feature applied on morph calculates radial distance from soma trm_rads = features.get('section_bif_radial_distances', NRN, neurite_type=nm.AXON) - assert_allclose(trm_rads, - [8.842008561870646, - 16.7440421479104, - 23.070306480850533, - 30.181121708042546, - 36.62766031035137, - 43.967487830324885, - 51.91971040624528, - 59.427722328770955, - 66.25222507299583, - 74.05119754074926]) + + assert_allclose( + trm_rads, + [ + 8.92228, + 16.825268, + 23.152378, + 30.262894, + 36.71048, + 44.049297, + 52.00228, + 59.510105, + 66.33529, + 74.134636, + ], + ) + + # the feature applied per neurite calculates radial distance from root + trm_rads = features.get('section_bif_radial_distances', NRN.neurites[3]) + + assert_allclose( + trm_rads, + [ + 8.842008561870646, + 16.7440421479104, + 23.070306480850533, + 30.181121708042546, + 36.62766031035137, + 43.967487830324885, + 51.91971040624528, + 59.427722328770955, + 66.25222507299583, + 74.05119754074926, + ], + ) def test_section_term_radial_distances(): trm_rads = features.get('section_term_radial_distances', NRN, neurite_type=nm.APICAL_DENDRITE) - assert_allclose(trm_rads, - [16.22099879395879, - 25.992977561564082, - 33.31600613822663, - 42.721314797308175, - 52.379508081911546, - 59.44327819128149, - 67.07832724133213, - 79.97743930553612, - 87.10434825508366, - 97.25246040544428, - 99.58945832481642]) + + print(trm_rads) + assert_allclose( + trm_rads, + [ + 16.258472, + 26.040075, + 33.35425, + 42.755745, + 52.41365, + 59.476284, + 67.11225, + 80.00984, + 87.13672, + 97.284706, + 99.62086, + ], + ) + + apical = NRN.neurites[0] + trm_rads = features.get( + 'section_term_radial_distances', apical, section_type=nm.APICAL_DENDRITE + ) + assert_allclose( + trm_rads, + [ + 16.22099879395879, + 25.992977561564082, + 33.31600613822663, + 42.721314797308175, + 52.379508081911546, + 59.44327819128149, + 67.07832724133213, + 79.97743930553612, + 87.10434825508366, + 97.25246040544428, + 99.58945832481642, + ], + ) def test_principal_direction_extents(): m = nm.load_morphology(SWC_PATH / 'simple.swc') principal_dir = features.get('principal_direction_extents', m) - assert_allclose(principal_dir, [10.99514 , 10.997688]) + assert_allclose(principal_dir, [10.99514, 10.997688]) # test with a realistic morphology m = nm.load_morphology(DATA_PATH / 'h5/v1' / 'bio_neuron-000.h5') @@ -810,7 +1031,7 @@ def test_principal_direction_extents(): 152.396521, 293.913857, ], - atol=1e-6 + atol=1e-6, ) assert_allclose( features.get('principal_direction_extents', m, direction=1), @@ -823,181 +1044,130 @@ def test_principal_direction_extents(): 20.66982, 67.157249, ], - atol=1e-6 + atol=1e-6, ) assert_allclose( features.get('principal_direction_extents', m, direction=2), - [ - 282.961199, - 38.493958, - 40.715183, - 94.061625, - 51.120255, - 10.793167, - 62.808188 - ], - atol=1e-6 + [282.961199, 38.493958, 40.715183, 94.061625, 51.120255, 10.793167, 62.808188], + atol=1e-6, ) -def test_total_width(): - assert_allclose( - features.get('total_width', NRN), - 105.0758 - ) +def test_total_width(): + assert_allclose(features.get('total_width', NRN), 105.0758) - assert_allclose( - features.get('total_width', NRN, neurite_type=nm.AXON), - 33.25306 - ) + assert_allclose(features.get('total_width', NRN, neurite_type=nm.AXON), 33.25306) - assert_allclose( - features.get('total_width', NRN, neurite_type=nm.BASAL_DENDRITE), - 104.57807 - ) + assert_allclose(features.get('total_width', NRN, neurite_type=nm.BASAL_DENDRITE), 104.57807) def test_total_height(): + assert_allclose(features.get('total_height', NRN), 106.11643) - assert_allclose( - features.get('total_height', NRN), - 106.11643 - ) + assert_allclose(features.get('total_height', NRN, neurite_type=nm.AXON), 57.60017) - assert_allclose( - features.get('total_height', NRN, neurite_type=nm.AXON), - 57.60017 - ) + assert_allclose(features.get('total_height', NRN, neurite_type=nm.BASAL_DENDRITE), 48.516262) - assert_allclose( - features.get('total_height', NRN, neurite_type=nm.BASAL_DENDRITE), - 48.516262 - ) def test_total_depth(): + assert_allclose(features.get('total_depth', NRN), 54.204086) - assert_allclose( - features.get('total_depth', NRN), - 54.204086 - ) - - assert_allclose( - features.get('total_depth', NRN, neurite_type=nm.AXON), - 49.70138 - ) + assert_allclose(features.get('total_depth', NRN, neurite_type=nm.AXON), 49.70138) - assert_allclose( - features.get('total_depth', NRN, neurite_type=nm.BASAL_DENDRITE), - 51.64143 - ) + assert_allclose(features.get('total_depth', NRN, neurite_type=nm.BASAL_DENDRITE), 51.64143) def test_aspect_ratio(): - morph = load_morphology(DATA_PATH / "neurolucida/bio_neuron-000.asc") npt.assert_almost_equal( features.get("aspect_ratio", morph, neurite_type=nm.AXON, projection_plane="xy"), 0.710877, - decimal=6 + decimal=6, ) npt.assert_almost_equal( features.get("aspect_ratio", morph, neurite_type=nm.AXON, projection_plane="xz"), 0.222268, - decimal=6 + decimal=6, ) npt.assert_almost_equal( features.get("aspect_ratio", morph, neurite_type=nm.AXON, projection_plane="yz"), 0.315263, - decimal=6 - ) - npt.assert_almost_equal( - features.get("aspect_ratio", morph), - 0.731076, - decimal=6 + decimal=6, ) + npt.assert_almost_equal(features.get("aspect_ratio", morph), 0.731076, decimal=6) assert np.isnan(features.get("aspect_ratio", morph, neurite_type=nm.NeuriteType.custom5)) def test_circularity(): - morph = load_morphology(DATA_PATH / "neurolucida/bio_neuron-000.asc") npt.assert_almost_equal( features.get("circularity", morph, neurite_type=nm.AXON, projection_plane="xy"), 0.722613, - decimal=6 + decimal=6, ) npt.assert_almost_equal( features.get("circularity", morph, neurite_type=nm.AXON, projection_plane="xz"), 0.378692, - decimal=6 + decimal=6, ) npt.assert_almost_equal( features.get("circularity", morph, neurite_type=nm.AXON, projection_plane="yz"), 0.527657, - decimal=6 - ) - npt.assert_almost_equal( - features.get("circularity", morph), - 0.730983, - decimal=6 + decimal=6, ) + npt.assert_almost_equal(features.get("circularity", morph), 0.730983, decimal=6) assert np.isnan(features.get("circularity", morph, neurite_type=nm.NeuriteType.custom5)) def test_shape_factor(): - morph = load_morphology(DATA_PATH / "neurolucida/bio_neuron-000.asc") npt.assert_almost_equal( features.get("shape_factor", morph, neurite_type=nm.AXON, projection_plane="xy"), 0.356192, - decimal=6 + decimal=6, ) npt.assert_almost_equal( features.get("shape_factor", morph, neurite_type=nm.AXON, projection_plane="xz"), 0.131547, - decimal=6 + decimal=6, ) npt.assert_almost_equal( features.get("shape_factor", morph, neurite_type=nm.AXON, projection_plane="yz"), 0.194558, - decimal=6 - ) - npt.assert_almost_equal( - features.get("shape_factor", morph), - 0.364678, - decimal=6 + decimal=6, ) + npt.assert_almost_equal(features.get("shape_factor", morph), 0.364678, decimal=6) assert np.isnan(features.get("shape_factor", morph, neurite_type=nm.NeuriteType.custom5)) -@pytest.mark.parametrize("neurite_type, axis, expected_value", [ - (nm.AXON, "X", 0.50), - (nm.AXON, "Y", 0.74), - (nm.AXON, "Z", 0.16), - (nm.APICAL_DENDRITE, "X", np.nan), - (nm.APICAL_DENDRITE, "Y", np.nan), - (nm.APICAL_DENDRITE, "Z", np.nan), - (nm.BASAL_DENDRITE, "X", 0.50), - (nm.BASAL_DENDRITE, "Y", 0.59), - (nm.BASAL_DENDRITE, "Z", 0.48), -] +@pytest.mark.parametrize( + "neurite_type, axis, expected_value", + [ + (nm.AXON, "X", 0.50), + (nm.AXON, "Y", 0.74), + (nm.AXON, "Z", 0.16), + (nm.APICAL_DENDRITE, "X", np.nan), + (nm.APICAL_DENDRITE, "Y", np.nan), + (nm.APICAL_DENDRITE, "Z", np.nan), + (nm.BASAL_DENDRITE, "X", 0.50), + (nm.BASAL_DENDRITE, "Y", 0.59), + (nm.BASAL_DENDRITE, "Z", 0.48), + ], ) def test_length_fraction_from_soma(neurite_type, axis, expected_value): - morph = load_morphology(DATA_PATH / "neurolucida/bio_neuron-000.asc") npt.assert_almost_equal( features.get("length_fraction_above_soma", morph, neurite_type=neurite_type, up=axis), expected_value, - decimal=2 + decimal=2, ) def test_length_fraction_from_soma__wrong_axis(): - morph = load_morphology(DATA_PATH / "neurolucida/bio_neuron-000.asc") with pytest.raises(NeuroMError): diff --git a/tests/features/test_morphology.py b/tests/features/test_morphology.py index 3bd49d532..41c531347 100644 --- a/tests/features/test_morphology.py +++ b/tests/features/test_morphology.py @@ -26,7 +26,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -"""Test ``features.morphology``.""" +"""Test ``neurom.features.morphology``.""" from math import pi, sqrt import tempfile import warnings @@ -37,7 +37,6 @@ import numpy as np import pytest from morphio import PointLevel, SectionType -from numpy import testing as npt from numpy.testing import assert_allclose from numpy.testing import assert_almost_equal from numpy.testing import assert_array_almost_equal @@ -63,12 +62,15 @@ def _add_neurite_trunk(morph, elevation, azimuth, neurite_type=SectionType.basal_dendrite): """Add a neurite from the elevation and azimuth to a given morphology.""" - new_pts = np.array( - morphmath.vector_from_spherical(elevation, azimuth), - ndmin=2 - ) + mut = morph.to_morphio() + if hasattr(mut, 'as_mutable'): + mut = mut.as_mutable() + + new_pts = np.array(morphmath.vector_from_spherical(elevation, azimuth), ndmin=2) + point_lvl = PointLevel(new_pts, [1]) - morph.append_root_section(point_lvl, neurite_type) + mut.append_root_section(point_lvl, neurite_type) + return Morphology(mut) def test_soma_volume(): @@ -99,7 +101,7 @@ def test_soma_radius(): def test_total_area_per_neurite(): def surface(r0, r1, h): - return pi * (r0 + r1) * sqrt((r0 - r1) ** 2 + h ** 2) + return pi * (r0 + r1) * sqrt((r0 - r1) ** 2 + h**2) basal_area = surface(1, 1, 5) + surface(1, 0, 5) + surface(1, 0, 6) ret = morphology.total_area_per_neurite(SIMPLE, neurite_type=BASAL_DENDRITE) @@ -118,19 +120,17 @@ def test_total_volume_per_neurite(): assert len(vol) == 4 # calculate the volumes by hand and compare - vol2 = [sum(section.section_volume(s) for s in n.iter_sections()) - for n in NRN.neurites] + vol2 = [sum(section.section_volume(s) for s in n.iter_sections()) for n in NRN.neurites] assert vol == vol2 # regression test - ref_vol = [271.94122143951864, 281.24754646913954, - 274.98039928781355, 276.73860261723024] + ref_vol = [271.94122143951864, 281.24754646913954, 274.98039928781355, 276.73860261723024] assert np.allclose(vol, ref_vol) def test_total_length_per_neurite(): total_lengths = morphology.total_length_per_neurite(SIMPLE) - assert total_lengths == [5. + 5. + 6., 4. + 5. + 6.] + assert total_lengths == [5.0 + 5.0 + 6.0, 4.0 + 5.0 + 6.0] def test_number_of_neurites(): @@ -140,8 +140,10 @@ def test_number_of_neurites(): def test_total_volume_per_neurite(): # note: cannot use SIMPLE since it lies in a plane total_volumes = morphology.total_volume_per_neurite(NRN) - assert_allclose(total_volumes, - [271.94122143951864, 281.24754646913954, 274.98039928781355, 276.73860261723024]) + assert_allclose( + total_volumes, + [271.94122143951864, 281.24754646913954, 274.98039928781355, 276.73860261723024], + ) def test_number_of_sections_per_neurite(): @@ -155,14 +157,23 @@ def test_trunk_section_lengths(): def test_trunk_origin_radii(): - morph = Morphology(SIMPLE) - morph.section(0).diameters = [2, 1] - morph.section(3).diameters = [2, 0.5] - + morph = load_swc( + """ + 1 1 0 0 0 1. -1 + 2 3 0 0 0 1.0 1 + 3 3 0 5 0 0.5 2 + 4 3 -5 5 0 0. 3 + 5 3 6 5 0 0. 3 + 6 2 0 0 0 1.0 1 + 7 2 0 -4 0 0.25 6 + 8 2 6 -4 0 0. 7 + 9 2 -5 -4 0 0. 7 + """ + ) ret = morphology.trunk_origin_radii(morph) assert ret == [1.0, 1.0] - ret = morphology.trunk_origin_radii(morph, min_length_filter=1) + ret = morphology.trunk_origin_radii(morph, min_length_filter=1.0) assert_array_almost_equal(ret, [0.5, 0.25]) with pytest.warns( @@ -171,7 +182,7 @@ def test_trunk_origin_radii(): r"In 'trunk_origin_radii': the 'min_length_filter' value is greater than the " r"path distance of the last point of the last section so the radius of this " r"point is returned\." - ) + ), ): ret = morphology.trunk_origin_radii(morph, min_length_filter=999) assert_array_almost_equal(ret, [0.5, 0.25]) @@ -188,7 +199,7 @@ def test_trunk_origin_radii(): r"In 'trunk_origin_radii': the 'min_length_filter' and 'max_length_filter' " r"values excluded all the points of the section so the radius of the first " r"point after the 'min_length_filter' path distance is returned\." - ) + ), ): ret = morphology.trunk_origin_radii(morph, min_length_filter=0.1, max_length_filter=0.2) assert_array_almost_equal(ret, [0.5, 0.25]) @@ -198,8 +209,7 @@ def test_trunk_origin_radii(): match=( r"In 'trunk_origin_radii': the 'min_length_filter' value must be strictly greater " r"than 0\." - - ) + ), ): ret = morphology.trunk_origin_radii(morph, min_length_filter=-999) @@ -208,7 +218,7 @@ def test_trunk_origin_radii(): match=( r"In 'trunk_origin_radii': the 'max_length_filter' value must be strictly greater " r"than 0\." - ) + ), ): ret = morphology.trunk_origin_radii(morph, max_length_filter=-999) @@ -217,7 +227,7 @@ def test_trunk_origin_radii(): match=( r"In 'trunk_origin_radii': the 'min_length_filter' value must be strictly less than the" r" 'max_length_filter' value\." - ) + ), ): ret = morphology.trunk_origin_radii(morph, min_length_filter=15, max_length_filter=5) @@ -229,7 +239,7 @@ def test_trunk_origin_azimuths(): def test_trunk_angles(): ret = morphology.trunk_angles(SIMPLE_TRUNK) - assert_array_almost_equal(ret, [np.pi/2, np.pi/2, np.pi/2, np.pi/2]) + assert_array_almost_equal(ret, [np.pi / 2, np.pi / 2, np.pi / 2, np.pi / 2]) ret = morphology.trunk_angles(SIMPLE_TRUNK, neurite_type=NeuriteType.basal_dendrite) assert_array_almost_equal(ret, [np.pi, np.pi]) ret = morphology.trunk_angles(SIMPLE_TRUNK, neurite_type=NeuriteType.axon) @@ -237,31 +247,37 @@ def test_trunk_angles(): ret = morphology.trunk_angles(SIMPLE, neurite_type=NeuriteType.apical_dendrite) assert_array_almost_equal(ret, []) - ret = morphology.trunk_angles(SIMPLE_TRUNK, coords_only=None, sort_along=None, consecutive_only=False) + ret = morphology.trunk_angles( + SIMPLE_TRUNK, coords_only=None, sort_along=None, consecutive_only=False + ) assert_array_almost_equal( ret, [ - [0., np.pi/2, np.pi/2, np.pi], - [0., np.pi, np.pi/2, np.pi/2], - [0., np.pi/2, np.pi/2, np.pi], - [0., np.pi, np.pi/2, np.pi/2], - ]) + [0.0, np.pi / 2, np.pi / 2, np.pi], + [0.0, np.pi, np.pi / 2, np.pi / 2], + [0.0, np.pi / 2, np.pi / 2, np.pi], + [0.0, np.pi, np.pi / 2, np.pi / 2], + ], + ) - ret = morphology.trunk_angles(SIMPLE_TRUNK, coords_only="xyz", sort_along=None, consecutive_only=False) + ret = morphology.trunk_angles( + SIMPLE_TRUNK, coords_only="xyz", sort_along=None, consecutive_only=False + ) assert_array_almost_equal( ret, [ - [0., np.pi/2, np.pi/2, np.pi], - [0., np.pi, np.pi/2, np.pi/2], - [0., np.pi/2, np.pi/2, np.pi], - [0., np.pi, np.pi/2, np.pi/2], - ]) + [0.0, np.pi / 2, np.pi / 2, np.pi], + [0.0, np.pi, np.pi / 2, np.pi / 2], + [0.0, np.pi / 2, np.pi / 2, np.pi], + [0.0, np.pi, np.pi / 2, np.pi / 2], + ], + ) morph = load_morphology(SWC_PATH / 'simple_trunk.swc') # Add two basals - _add_neurite_trunk(morph, np.pi / 3, np.pi / 4) - _add_neurite_trunk(morph, -np.pi / 3, -np.pi / 4) + morph = _add_neurite_trunk(morph, np.pi / 3, np.pi / 4) + morph = _add_neurite_trunk(morph, -np.pi / 3, -np.pi / 4) ret = morphology.trunk_angles(morph) assert_array_almost_equal(ret, [np.pi / 2, 0.387596, 1.183199, 1.183199, 0.387596, np.pi / 2]) @@ -282,7 +298,7 @@ def test_trunk_angles(): [0.0, np.pi / 6, 2.617993, np.pi, np.pi / 2, np.pi / 2], [0.0, 2.418858, 2.617993, 1.209429, 1.932163, np.pi / 6], [0.0, np.pi / 6, 1.209429, 1.932163, 2.617993, 2.418858], - ] + ], ) ret = morphology.trunk_angles(morph, coords_only="xyz", sort_along=None, consecutive_only=False) @@ -295,7 +311,7 @@ def test_trunk_angles(): [0.0, np.pi / 6, 2.617993, np.pi, np.pi / 2, np.pi / 2], [0.0, 2.418858, 2.617993, 1.209429, 1.932163, np.pi / 6], [0.0, np.pi / 6, 1.209429, 1.932163, 2.617993, 2.418858], - ] + ], ) @@ -303,8 +319,8 @@ def test_trunk_angles_inter_types(): morph = load_morphology(SWC_PATH / 'simple_trunk.swc') # Add two basals - _add_neurite_trunk(morph, np.pi / 3, np.pi / 4) - _add_neurite_trunk(morph, -np.pi / 3, -np.pi / 4) + morph = _add_neurite_trunk(morph, np.pi / 3, np.pi / 4) + morph = _add_neurite_trunk(morph, -np.pi / 3, -np.pi / 4) # Test with no source ret = morphology.trunk_angles_inter_types( @@ -323,12 +339,14 @@ def test_trunk_angles_inter_types(): ) assert_array_almost_equal( ret, - [[ - [np.pi / 2, -np.pi / 2, 0], - [np.pi / 2, -np.pi / 2, np.pi], - [np.pi / 6, -np.pi / 6, np.pi / 4], - [5 * np.pi / 6, -5 * np.pi / 6, -np.pi / 4], - ]] + [ + [ + [np.pi / 2, -np.pi / 2, 0], + [np.pi / 2, -np.pi / 2, np.pi], + [np.pi / 6, -np.pi / 6, np.pi / 4], + [5 * np.pi / 6, -5 * np.pi / 6, -np.pi / 4], + ] + ], ) # Test with closest component equal to 3d angle @@ -354,7 +372,7 @@ def test_trunk_angles_inter_types(): [[np.pi / 2, np.pi / 2, -np.pi]], [[np.pi / 6, np.pi / 6, -np.pi / 4]], [[5 * np.pi / 6, 5 * np.pi / 6, np.pi / 4]], - ] + ], ) # Test with only one target per source and closest component equal to 3d angle @@ -371,7 +389,7 @@ def test_trunk_angles_inter_types(): [[np.pi / 2, np.pi / 2, -np.pi]], [[np.pi / 6, np.pi / 6, -np.pi / 4]], [[5 * np.pi / 6, 5 * np.pi / 6, np.pi / 4]], - ] + ], ) @@ -379,8 +397,8 @@ def test_trunk_angles_from_vector(): morph = load_morphology(SWC_PATH / 'simple_trunk.swc') # Add two basals - _add_neurite_trunk(morph, np.pi / 3, np.pi / 4) - _add_neurite_trunk(morph, -np.pi / 3, -np.pi / 4) + morph = _add_neurite_trunk(morph, np.pi / 3, np.pi / 4) + morph = _add_neurite_trunk(morph, -np.pi / 3, -np.pi / 4) # Test with no neurite selected ret = morphology.trunk_angles_from_vector( @@ -401,15 +419,11 @@ def test_trunk_angles_from_vector(): [np.pi / 2, -np.pi / 2, np.pi], [np.pi / 6, -np.pi / 6, np.pi / 4], [5 * np.pi / 6, -5 * np.pi / 6, -np.pi / 4], - ] + ], ) # Test with given vector - ret = morphology.trunk_angles_from_vector( - morph, - NeuriteType.basal_dendrite, - vector=(0, -1, 0) - ) + ret = morphology.trunk_angles_from_vector(morph, NeuriteType.basal_dendrite, vector=(0, -1, 0)) assert_array_almost_equal( ret, [ @@ -417,40 +431,54 @@ def test_trunk_angles_from_vector(): [np.pi / 2, np.pi / 2, np.pi], [5 * np.pi / 6, 5 * np.pi / 6, np.pi / 4], [np.pi / 6, np.pi / 6, -np.pi / 4], - ] + ], ) def test_trunk_vectors(): ret = morphology.trunk_vectors(SIMPLE_TRUNK) - assert_array_equal(ret[0], [0., -1., 0.]) - assert_array_equal(ret[1], [1., 0., 0.]) - assert_array_equal(ret[2], [-1., 0., 0.]) - assert_array_equal(ret[3], [0., 1., 0.]) + assert_array_equal(ret[0], [0.0, -1.0, 0.0]) + assert_array_equal(ret[1], [1.0, 0.0, 0.0]) + assert_array_equal(ret[2], [-1.0, 0.0, 0.0]) + assert_array_equal(ret[3], [0.0, 1.0, 0.0]) ret = morphology.trunk_vectors(SIMPLE_TRUNK, neurite_type=NeuriteType.axon) - assert_array_equal(ret[0], [0., -1., 0.]) + assert_array_equal(ret[0], [0.0, -1.0, 0.0]) def test_trunk_origin_elevations(): - n0 = load_morphology(StringIO(u""" + n0 = load_morphology( + StringIO( + u""" 1 1 0 0 0 4 -1 2 3 1 0 0 2 1 3 3 2 1 1 2 2 4 3 0 1 0 2 1 5 3 1 2 1 2 4 - """), reader='swc') + """ + ), + reader='swc', + ) - n1 = load_morphology(StringIO(u""" + n1 = load_morphology( + StringIO( + u""" 1 1 0 0 0 4 -1 2 3 0 -1 0 2 1 3 3 -1 -2 -1 2 2 - """), reader='swc') + """ + ), + reader='swc', + ) pop = [n0, n1] - assert_allclose(morphology.trunk_origin_elevations(n0), [0.0, np.pi / 2.]) - assert_allclose(morphology.trunk_origin_elevations(n1), [-np.pi / 2.]) - assert_allclose(morphology.trunk_origin_elevations(n0, NeuriteType.basal_dendrite), [0.0, np.pi / 2.]) - assert_allclose(morphology.trunk_origin_elevations(n1, NeuriteType.basal_dendrite), [-np.pi / 2.]) + assert_allclose(morphology.trunk_origin_elevations(n0), [0.0, np.pi / 2.0]) + assert_allclose(morphology.trunk_origin_elevations(n1), [-np.pi / 2.0]) + assert_allclose( + morphology.trunk_origin_elevations(n0, NeuriteType.basal_dendrite), [0.0, np.pi / 2.0] + ) + assert_allclose( + morphology.trunk_origin_elevations(n1, NeuriteType.basal_dendrite), [-np.pi / 2.0] + ) assert morphology.trunk_origin_elevations(n0, NeuriteType.axon) == [] assert morphology.trunk_origin_elevations(n1, NeuriteType.axon) == [] @@ -463,27 +491,79 @@ def test_trunk_elevation_zero_norm_vector_raises(): morphology.trunk_origin_elevations(SWC_NRN) -def test_sholl_crossings_simple(): - center = SIMPLE.soma.center +def test_sholl_crossings_simple(SIMPLE_MORPHOLOGY): + center = SIMPLE_MORPHOLOGY.soma.center radii = [] - assert (list(morphology.sholl_crossings(SIMPLE, center=center, radii=radii)) == []) - assert (list(morphology.sholl_crossings(SIMPLE, radii=radii)) == []) - assert (list(morphology.sholl_crossings(SIMPLE)) == [2]) + assert list(morphology.sholl_crossings(SIMPLE_MORPHOLOGY, center=center, radii=radii)) == [] + assert list(morphology.sholl_crossings(SIMPLE_MORPHOLOGY, radii=radii)) == [] + assert list(morphology.sholl_crossings(SIMPLE_MORPHOLOGY)) == [2] radii = [1.0] - assert ([2] == - list(morphology.sholl_crossings(SIMPLE, center=center, radii=radii))) + assert list(morphology.sholl_crossings(SIMPLE_MORPHOLOGY, center=center, radii=radii)) == [2] radii = [1.0, 5.1] - assert ([2, 4] == - list(morphology.sholl_crossings(SIMPLE, center=center, radii=radii))) + assert list(morphology.sholl_crossings(SIMPLE_MORPHOLOGY, center=center, radii=radii)) == [2, 4] + + radii = [1.0, 4.0, 5.0] + assert list(morphology.sholl_crossings(SIMPLE_MORPHOLOGY, center=center, radii=radii)) == [ + 2, + 4, + 5, + ] + + assert list( + morphology.sholl_crossings(SIMPLE_MORPHOLOGY.sections[:2], center=center, radii=radii) + ) == [1, 1, 2] + + radii = [1.0, 4.0, 5.0, 10] + assert list( + morphology.sholl_crossings( + SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.all, center=center, radii=radii + ) + ) == [2, 4, 5, 0] + assert list( + morphology.sholl_crossings( + SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.basal_dendrite, center=center, radii=radii + ) + ) == [1, 1, 3, 0] + assert list( + morphology.sholl_crossings( + SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.apical_dendrite, center=center, radii=radii + ) + ) == [0, 0, 0, 0] + assert list( + morphology.sholl_crossings( + SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.axon, center=center, radii=radii + ) + ) == [1, 3, 2, 0] + - radii = [1., 4., 5.] - assert ([2, 4, 5] == - list(morphology.sholl_crossings(SIMPLE, center=center, radii=radii))) +def test_sholl_frequency_simple(SIMPLE_MORPHOLOGY): + assert list(morphology.sholl_frequency(SIMPLE_MORPHOLOGY)) == [2] + assert list(morphology.sholl_frequency(SIMPLE_MORPHOLOGY, step_size=3)) == [2, 4, 3] + assert list(morphology.sholl_frequency(SIMPLE_MORPHOLOGY, bins=[1, 3, 5])) == [2, 2, 5] + + assert list( + morphology.sholl_frequency(SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.basal_dendrite) + ) == [1] + assert list( + morphology.sholl_frequency( + SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.basal_dendrite, step_size=3 + ) + ) == [1, 1, 2] + assert list( + morphology.sholl_frequency( + SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.basal_dendrite, bins=[1, 3, 5] + ) + ) == [1, 1, 3] - assert ([1, 1, 2] == - list(morphology.sholl_crossings(SIMPLE.sections[:2], center=center, radii=radii))) + assert list(morphology.sholl_frequency(SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.axon)) == [1] + assert list( + morphology.sholl_frequency(SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.axon, step_size=3) + ) == [1, 3, 1] + assert list( + morphology.sholl_frequency(SIMPLE_MORPHOLOGY, neurite_type=NeuriteType.axon, bins=[1, 3, 5]) + ) == [1, 1, 2] def load_swc(string): @@ -498,16 +578,27 @@ def test_sholl_analysis_custom(): # http://dx.doi.org/10.1016/j.jneumeth.2014.01.016 radii = np.arange(10, 81, 10) center = 0, 0, 0 - morph_A = load_swc("""\ + morph_A = load_swc( + """\ 1 1 0 0 0 1. -1 2 3 0 0 0 1. 1 3 3 80 0 0 1. 2 4 4 0 0 0 1. 1 - 5 4 -80 0 0 1. 4""") - assert (list(morphology.sholl_crossings(morph_A, center=center, radii=radii)) == - [2, 2, 2, 2, 2, 2, 2, 2]) - - morph_B = load_swc("""\ + 5 4 -80 0 0 1. 4""" + ) + assert list(morphology.sholl_crossings(morph_A, center=center, radii=radii)) == [ + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + ] + + morph_B = load_swc( + """\ 1 1 0 0 0 1. -1 2 3 0 0 0 1. 1 3 3 35 0 0 1. 2 @@ -522,11 +613,21 @@ def test_sholl_analysis_custom(): 12 4 -51 0 0 1. 9 13 4 -51 -5 0 1. 9 14 4 -51 -10 0 1. 9 - """) - assert (list(morphology.sholl_crossings(morph_B, center=center, radii=radii)) == - [2, 2, 2, 10, 10, 0, 0, 0]) - - morph_C = load_swc("""\ + """ + ) + assert list(morphology.sholl_crossings(morph_B, center=center, radii=radii)) == [ + 2, + 2, + 2, + 10, + 10, + 0, + 0, + 0, + ] + + morph_C = load_swc( + """\ 1 1 0 0 0 1. -1 2 3 0 0 0 1. 1 3 3 65 0 0 1. 2 @@ -541,66 +642,93 @@ def test_sholl_analysis_custom(): 12 4 85 0 0 1. 9 13 4 85 -5 0 1. 9 14 4 85 -10 0 1. 9 - """) - assert (list(morphology.sholl_crossings(morph_C, center=center, radii=radii)) == - [2, 2, 2, 2, 2, 2, 10, 10]) + """ + ) + assert list(morphology.sholl_crossings(morph_C, center=center, radii=radii)) == [ + 2, + 2, + 2, + 2, + 2, + 2, + 10, + 10, + ] def test_extent_along_axis(): - morph = load_swc(""" + morph = load_swc( + """ 1 1 0 0 0 1. -1 2 3 0 -60 0 1. 1 3 3 80 0 2 1. 2 4 4 0 60 3 1. 1 5 4 -80 0. 0 1. 4 - """) + """ + ) assert_almost_equal(morphology._extent_along_axis(morph, 0, NeuriteType.all), 160.0) assert_almost_equal(morphology._extent_along_axis(morph, 1, NeuriteType.all), 120.0) assert_almost_equal(morphology._extent_along_axis(morph, 2, NeuriteType.all), 3.0) def test_total_width(): - morph = load_swc(""" + morph = load_swc( + """ 1 1 0 0 0 1. -1 2 3 0 -60 0 1. 1 3 3 80 0 2 1. 2 4 4 0 60 3 1. 1 5 4 -80 0. 0 1. 4 - """) + """ + ) assert_almost_equal(morphology.total_width(morph, neurite_type=NeuriteType.axon), 0.0) - assert_almost_equal(morphology.total_width(morph, neurite_type=NeuriteType.basal_dendrite), 80.0) - assert_almost_equal(morphology.total_width(morph, neurite_type=NeuriteType.apical_dendrite), 80.0) + assert_almost_equal( + morphology.total_width(morph, neurite_type=NeuriteType.basal_dendrite), 80.0 + ) + assert_almost_equal( + morphology.total_width(morph, neurite_type=NeuriteType.apical_dendrite), 80.0 + ) def test_total_height(): - morph = load_swc(""" + morph = load_swc( + """ 1 1 0 0 0 1. -1 2 3 0 -60 0 1. 1 3 3 80 0 2 1. 2 4 4 0 60 3 1. 1 5 4 -80 0. 0 1. 4 - """) + """ + ) assert_almost_equal(morphology.total_height(morph, neurite_type=NeuriteType.axon), 0.0) - assert_almost_equal(morphology.total_height(morph, neurite_type=NeuriteType.basal_dendrite), 60.0) - assert_almost_equal(morphology.total_height(morph, neurite_type=NeuriteType.apical_dendrite), 60.0) + assert_almost_equal( + morphology.total_height(morph, neurite_type=NeuriteType.basal_dendrite), 60.0 + ) + assert_almost_equal( + morphology.total_height(morph, neurite_type=NeuriteType.apical_dendrite), 60.0 + ) def test_total_depth(): - morph = load_swc(""" + morph = load_swc( + """ 1 1 0 0 0 1. -1 2 3 0 -60 0 1. 1 3 3 80 0 2 1. 2 4 4 0 60 3 1. 1 5 4 -80 0. 0 1. 4 - """) + """ + ) assert_almost_equal(morphology.total_depth(morph, neurite_type=NeuriteType.axon), 0.0) assert_almost_equal(morphology.total_depth(morph, neurite_type=NeuriteType.basal_dendrite), 2.0) - assert_almost_equal(morphology.total_depth(morph, neurite_type=NeuriteType.apical_dendrite), 3.0) + assert_almost_equal( + morphology.total_depth(morph, neurite_type=NeuriteType.apical_dendrite), 3.0 + ) def test_volume_density(): - - morph = load_swc(""" + morph = load_swc( + """ 1 1 0.5 0.5 0.5 0.5 -1 2 3 0.211324 0.211324 0.788675 0.1 1 3 3 0.0 0.0 1.0 0.1 2 @@ -618,7 +746,8 @@ def test_volume_density(): 15 2 1.0 0.0 0.0 0.1 14 16 3 0.788675 0.788675 0.788675 0.1 1 17 3 1.0 1.0 1.0 0.1 16 - """) + """ + ) # the neurites sprout from the center of a cube to its vertices, therefore the convex hull # is the cube itself of side 1.0 @@ -632,15 +761,11 @@ def test_volume_density(): expected_volume_density = expected_neurite_volume / expected_hull_volume - assert_almost_equal( - morphology.volume_density(morph), - expected_volume_density, - decimal=5 - ) + assert_almost_equal(morphology.volume_density(morph), expected_volume_density, decimal=5) assert_almost_equal( morphology.volume_density(morph, neurite_type=NeuriteType.all), expected_volume_density, - decimal=5 + decimal=5, ) # (0 0 1) (0 1 1) (0 0 0) (0 1 0) (1 0 1)(1 1 1) @@ -655,7 +780,7 @@ def test_volume_density(): assert_almost_equal( morphology.volume_density(morph, neurite_type=NeuriteType.basal_dendrite), expected_volume_density, - decimal=5 + decimal=5, ) # invalid convex hull @@ -670,8 +795,8 @@ def test_volume_density(): def test_unique_projected_points(): - - morph = load_swc(""" + morph = load_swc( + """ 1 1 0.5 0.5 0.5 0.5 -1 2 3 0.2 0.2 0.7 0.1 1 3 3 0.0 0.0 1.0 0.1 2 @@ -689,37 +814,77 @@ def test_unique_projected_points(): 15 2 1.0 0.0 0.0 0.1 14 16 3 0.7 0.7 0.7 0.1 1 17 3 1.0 1.0 1.0 0.1 16 - """) + """ + ) for plane, enalp in zip(("xy", "xz", "yz"), ("yx", "zx", "zy")): - npt.assert_allclose( + assert_allclose( morphology._unique_projected_points(morph, plane, NeuriteType.all), morphology._unique_projected_points(morph, enalp, NeuriteType.all), ) - npt.assert_allclose( + assert_allclose( morphology._unique_projected_points(morph, "xy", NeuriteType.all), [ - [0. , 0. ], [0. , 0. ], [0. , 1. ], [0. , 1. ], [0.2, 0.2], [0.2, 0.2], - [0.2, 0.7], [0.2, 0.7], [0.7, 0.2], [0.7, 0.2], [0.7, 0.7], [0.7, 0.7], - [1. , 0. ], [1. , 0. ], [1. , 1. ], [1. , 1. ], - ] + [0.0, 0.0], + [0.0, 0.0], + [0.0, 1.0], + [0.0, 1.0], + [0.2, 0.2], + [0.2, 0.2], + [0.2, 0.7], + [0.2, 0.7], + [0.7, 0.2], + [0.7, 0.2], + [0.7, 0.7], + [0.7, 0.7], + [1.0, 0.0], + [1.0, 0.0], + [1.0, 1.0], + [1.0, 1.0], + ], ) - npt.assert_allclose( + assert_allclose( morphology._unique_projected_points(morph, "xz", NeuriteType.all), [ - [0. , 0. ], [0. , 1. ], [0. , 0. ], [0. , 1. ], [0.2, 0.2], [0.2, 0.7], - [0.2, 0.2], [0.2, 0.7], [0.7, 0.2], [0.7, 0.7], [0.7, 0.2], [0.7, 0.7], - [1. , 0. ], [1. , 1. ], [1. , 0. ], [1. , 1. ], - ] + [0.0, 0.0], + [0.0, 1.0], + [0.0, 0.0], + [0.0, 1.0], + [0.2, 0.2], + [0.2, 0.7], + [0.2, 0.2], + [0.2, 0.7], + [0.7, 0.2], + [0.7, 0.7], + [0.7, 0.2], + [0.7, 0.7], + [1.0, 0.0], + [1.0, 1.0], + [1.0, 0.0], + [1.0, 1.0], + ], ) - npt.assert_allclose( + assert_allclose( morphology._unique_projected_points(morph, "yz", NeuriteType.all), [ - [0. , 0. ], [0. , 1. ], [1. , 0. ], [1. , 1. ], [0.2, 0.2], [0.2, 0.7], - [0.7, 0.2], [0.7, 0.7], [0.2, 0.2], [0.2, 0.7], [0.7, 0.2], [0.7, 0.7], - [0. , 0. ], [0. , 1. ], [1. , 0. ], [1. , 1. ], - ] + [0.0, 0.0], + [0.0, 1.0], + [1.0, 0.0], + [1.0, 1.0], + [0.2, 0.2], + [0.2, 0.7], + [0.7, 0.2], + [0.7, 0.7], + [0.2, 0.2], + [0.2, 0.7], + [0.7, 0.2], + [0.7, 0.7], + [0.0, 0.0], + [0.0, 1.0], + [1.0, 0.0], + [1.0, 1.0], + ], ) with pytest.raises(NeuroMError): diff --git a/tests/features/test_neurite.py b/tests/features/test_neurite.py index 1f476d89f..558288e17 100644 --- a/tests/features/test_neurite.py +++ b/tests/features/test_neurite.py @@ -26,7 +26,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -"""Test features.neuritefunc.""" +"""Test ``neurom.features.neurite``.""" from math import pi, sqrt from pathlib import Path @@ -71,25 +71,27 @@ def test_neurite_volume_density(): assert len(vol_density) == 4 assert np.allclose(vol_density, vol / hull_vol) - ref_density = [0.43756606998299519, 0.52464681266899216, - 0.24068543213643726, 0.26289304906104355] + ref_density = [ + 0.43756606998299519, + 0.52464681266899216, + 0.24068543213643726, + 0.26289304906104355, + ] assert_allclose(vol_density, ref_density) def test_neurite_volume_density_failed_convex_hull(): - flat_neuron = nm.load_morphology( - """ + """ 1 1 0 0 0 0.5 -1 2 3 1 0 0 0.1 1 3 3 2 0 0 0.1 2 """, - reader="swc") - - assert np.isnan( - neurite.volume_density(flat_neuron.neurites[0]) + reader="swc", ) + assert np.isnan(neurite.volume_density(flat_neuron.neurites[0])) + def test_terminal_path_length_per_neurite(): terminal_distances = [neurite.terminal_path_lengths(s) for s in SIMPLE.neurites] @@ -97,8 +99,7 @@ def test_terminal_path_length_per_neurite(): def test_max_radial_distance(): - assert_allclose([neurite.max_radial_distance(s) for s in SIMPLE.neurites], - [7.81025, 7.2111025]) + assert_allclose([neurite.max_radial_distance(s) for s in SIMPLE.neurites], [7.81025, 7.2111025]) def test_number_of_segments(): @@ -111,17 +112,17 @@ def test_number_of_sections(): def test_section_path_distances(): path_lengths = [neurite.section_path_distances(s) for s in SIMPLE.neurites] - assert path_lengths == [[5., 10., 11.], [4., 10., 9.]] + assert path_lengths == [[5.0, 10.0, 11.0], [4.0, 10.0, 9.0]] def test_section_term_lengths(): term_lengths = [neurite.section_term_lengths(s) for s in SIMPLE.neurites] - assert term_lengths == [[5., 6.], [6., 5.]] + assert term_lengths == [[5.0, 6.0], [6.0, 5.0]] def test_section_bif_lengths(): bif_lengths = [neurite.section_bif_lengths(s) for s in SIMPLE.neurites] - assert bif_lengths == [[5.], [4.]] + assert bif_lengths == [[5.0], [4.0]] def test_section_end_distances(): @@ -131,17 +132,19 @@ def test_section_end_distances(): def test_section_partition_pairs(): part_pairs = [neurite.partition_pairs(s) for s in SIMPLE.neurites] - assert part_pairs == [[(1.0, 1.0)], [(1.0, 1.0)]] + assert part_pairs == [[[1.0, 1.0]], [[1.0, 1.0]]] def test_section_bif_radial_distances(): bif_rads = [neurite.section_bif_radial_distances(s) for s in SIMPLE.neurites] - assert bif_rads == [[5.], [4.]] + assert bif_rads == [[5.0], [4.0]] def test_section_term_radial_distances(): trm_rads = [neurite.section_term_radial_distances(s) for s in SIMPLE.neurites] - assert_allclose(trm_rads, [[7.0710678118654755, 7.810249675906654], [7.211102550927978, 6.4031242374328485]]) + assert_allclose( + trm_rads, [[7.0710678118654755, 7.810249675906654], [7.211102550927978, 6.4031242374328485]] + ) def test_section_branch_orders(): @@ -161,9 +164,13 @@ def test_section_term_branch_orders(): def test_section_radial_distances(): radial_distances = [neurite.section_radial_distances(s) for s in SIMPLE.neurites] - assert_allclose(radial_distances, - [[5.0, sqrt(5**2 + 5**2), sqrt(6**2 + 5**2)], - [4.0, sqrt(6**2 + 4**2), sqrt(5**2 + 4**2)]]) + assert_allclose( + radial_distances, + [ + [5.0, sqrt(5**2 + 5**2), sqrt(6**2 + 5**2)], + [4.0, sqrt(6**2 + 4**2), sqrt(5**2 + 4**2)], + ], + ) def test_local_bifurcation_angles(): @@ -213,40 +220,49 @@ def test_segment_volumes(): def test_segment_midpoints(): midpoints = [neurite.segment_midpoints(s) for s in SIMPLE.neurites] - assert_allclose(midpoints, - [[[0., (5. + 0) / 2, 0.], # trunk type 2 - [-2.5, 5., 0.], - [3., 5., 0.]], - [[0., (-4. + 0) / 2., 0.], # trunk type 3 - [3., -4., 0.], - [-2.5, -4., 0.]]]) + assert_allclose( + midpoints, + [ + [[0.0, (5.0 + 0) / 2, 0.0], [-2.5, 5.0, 0.0], [3.0, 5.0, 0.0]], # trunk type 2 + [[0.0, (-4.0 + 0) / 2.0, 0.0], [3.0, -4.0, 0.0], [-2.5, -4.0, 0.0]], # trunk type 3 + ], + ) def test_segment_radial_distances(): """midpoints on segments.""" radial_distances = [neurite.segment_radial_distances(s) for s in SIMPLE.neurites] - assert_allclose(radial_distances, - [[2.5, sqrt(2.5**2 + 5**2), sqrt(3**2 + 5**2)], [2.0, 5.0, sqrt(2.5**2 + 4**2)]]) + assert_allclose( + radial_distances, + [ + [2.5, sqrt(2.5**2 + 5**2), sqrt(3**2 + 5**2)], + [2.0, 5.0, sqrt(2.5**2 + 4**2)], + ], + ) def test_segment_path_lengths(): pathlengths = [neurite.segment_path_lengths(s) for s in SIMPLE.neurites] - assert_allclose(pathlengths, [[5., 10., 11.], [4., 10., 9.]]) + assert_allclose(pathlengths, [[5.0, 10.0, 11.0], [4.0, 10.0, 9.0]]) pathlengths = neurite.segment_path_lengths(NRN.neurites[0])[:5] assert_allclose(pathlengths, [0.1, 1.332525, 2.5301487, 3.267878, 4.471462]) def test_section_taper_rates(): - assert_allclose(neurite.section_taper_rates(NRN.neurites[0])[:10], - [0.06776235492169848, - 0.0588716599404923, - 0.03791571485186163, - 0.04674653812192691, - -0.026399800285566058, - -0.026547582897720887, - -0.045038414440432537, - 0.02083822978267914, - -0.0027721371791201038, - 0.0803069042861474], - atol=1e-4) + assert_allclose( + neurite.section_taper_rates(NRN.neurites[0])[:10], + [ + 0.06776235492169848, + 0.0588716599404923, + 0.03791571485186163, + 0.04674653812192691, + -0.026399800285566058, + -0.026547582897720887, + -0.045038414440432537, + 0.02083822978267914, + -0.0027721371791201038, + 0.0803069042861474, + ], + atol=1e-4, + ) diff --git a/tests/features/test_population.py b/tests/features/test_population.py new file mode 100644 index 000000000..1ba996ddb --- /dev/null +++ b/tests/features/test_population.py @@ -0,0 +1,57 @@ +# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project +# All rights reserved. +# +# This file is part of NeuroM +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# 3. Neither the name of the copyright holder nor the names of +# its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Test ``features.population``.""" +from neurom import NeuriteType +from neurom.features import population + + +def test_sholl_crossings(POP): + assert list(population.sholl_frequency(POP)) == [4] + assert list(population.sholl_frequency(POP, step_size=3)) == [4, 8, 6] + assert list(population.sholl_frequency(POP, bins=[1, 3, 5])) == [4, 4, 10] + + assert list(population.sholl_frequency(POP, neurite_type=NeuriteType.basal_dendrite)) == [2] + assert list( + population.sholl_frequency(POP, neurite_type=NeuriteType.basal_dendrite, step_size=3) + ) == [2, 2, 4] + assert list( + population.sholl_frequency(POP, neurite_type=NeuriteType.basal_dendrite, bins=[1, 3, 5]) + ) == [2, 2, 6] + + assert list(population.sholl_frequency(POP, neurite_type=NeuriteType.axon)) == [2] + assert list(population.sholl_frequency(POP, neurite_type=NeuriteType.axon, step_size=3)) == [ + 2, + 6, + 2, + ] + assert list(population.sholl_frequency(POP, neurite_type=NeuriteType.axon, bins=[1, 3, 5])) == [ + 2, + 2, + 4, + ] diff --git a/tests/features/test_section.py b/tests/features/test_section.py index 373d32a55..f53f90501 100644 --- a/tests/features/test_section.py +++ b/tests/features/test_section.py @@ -26,7 +26,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -"""Test neurom.sectionfunc.""" +"""Test ``neurom.features.section``.""" import math import warnings @@ -48,9 +48,13 @@ NRN = load_morphology(H5_PATH / 'Neuron.h5') SECTION_ID = 0 + def test_section_points(): - sec = Mock(points=np.array([[0., 1., 2., 1.], [3., 4., 5., 1.], [6., 7., 8., 1.]])) - npt.assert_almost_equal(section.section_points(sec), [[0., 1., 2.], [3., 4., 5.], [6., 7., 8.]]) + sec = Mock(points=np.array([[0.0, 1.0, 2.0, 1.0], [3.0, 4.0, 5.0, 1.0], [6.0, 7.0, 8.0, 1.0]])) + npt.assert_almost_equal( + section.section_points(sec), [[0.0, 1.0, 2.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]] + ) + def test_section_length(): sec = Mock(length=3.2) @@ -58,148 +62,231 @@ def test_section_length(): def test_number_of_segments(): - sec = Mock(points=np.array([[0., 1., 2., 1.], [3., 4., 5., 1.], [6., 7., 8., 1.]])) + sec = Mock(points=np.array([[0.0, 1.0, 2.0, 1.0], [3.0, 4.0, 5.0, 1.0], [6.0, 7.0, 8.0, 1.0]])) npt.assert_almost_equal(section.number_of_segments(sec), 2) def test_section_taper_rate(): # Note: taper rate is calculated on the diameters - sec = Mock(points=np.array([[0., 0., 0., 2.], [1., 0., 0., 1.], [2., 0., 0., 0.]])) - npt.assert_almost_equal(section.taper_rate(sec), -2.) + sec = Mock(points=np.array([[0.0, 0.0, 0.0, 2.0], [1.0, 0.0, 0.0, 1.0], [2.0, 0.0, 0.0, 0.0]])) + npt.assert_almost_equal(section.taper_rate(sec), -2.0) def test_segment_taper_rates(): # Note: taper rate is calculated on the diameters - sec = Mock(points=np.array([[0., 0., 0., 2.], [1., 0., 0., 1.], [2., 0., 0., 0.]])) - npt.assert_almost_equal(section.segment_taper_rates(sec), [-2., -2.]) + sec = Mock(points=np.array([[0.0, 0.0, 0.0, 2.0], [1.0, 0.0, 0.0, 1.0], [2.0, 0.0, 0.0, 0.0]])) + npt.assert_almost_equal(section.segment_taper_rates(sec), [-2.0, -2.0]) + + +def test_section_path_length(): + m = load_morphology( + """ + 1 1 0 0 0 0.5 -1 + 2 3 1 0 0 0.1 1 + 3 3 2 0 0 0.1 2 + 4 3 3 0 0 0.1 3 + 5 3 2 1 0 0.1 3 + """, + reader="swc", + ) + + sec = m.sections[1] + npt.assert_almost_equal(section.section_path_length(sec), 2.0) def test_section_area(): - sec = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + sec = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) - (1 0 0 2))"""), reader='asc').sections[SECTION_ID] + (1 0 0 2))""" + ), + reader='asc', + ).sections[SECTION_ID] area = section.section_area(sec) assert math.pi * 1 * 2 * 1 == area def test_segment_areas(): - sec = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + sec = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 4) (1 0 0 4) - (2 0 0 4))"""), reader='asc').sections[SECTION_ID] + (2 0 0 4))""" + ), + reader='asc', + ).sections[SECTION_ID] - npt.assert_allclose(section.segment_areas(sec), [2. * np.pi * 2. * 1.] * 2) + npt.assert_allclose(section.segment_areas(sec), [2.0 * np.pi * 2.0 * 1.0] * 2) def test_segment_volumes(): - sec = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + sec = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 4) (1 0 0 4) - (2 0 0 4))"""), reader='asc').sections[SECTION_ID] + (2 0 0 4))""" + ), + reader='asc', + ).sections[SECTION_ID] - npt.assert_allclose(section.segment_areas(sec), [np.pi * 4. * 1.] * 2) + npt.assert_allclose(section.segment_areas(sec), [np.pi * 4.0 * 1.0] * 2) def test_segment_mean_radii(): - sec = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + sec = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (1 0 0 4) - (2 0 0 6))"""), reader='asc').sections[SECTION_ID] + (2 0 0 6))""" + ), + reader='asc', + ).sections[SECTION_ID] npt.assert_allclose(section.segment_mean_radii(sec), [1.5, 2.5]) def test_segment_midpoints(): - sec = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + sec = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (1 0 0 4) - (2 0 0 6))"""), reader='asc').sections[SECTION_ID] + (2 0 0 6))""" + ), + reader='asc', + ).sections[SECTION_ID] - npt.assert_allclose(section.segment_midpoints(sec), [[0.5, 0., 0.], [1.5, 0., 0.]]) + npt.assert_allclose(section.segment_midpoints(sec), [[0.5, 0.0, 0.0], [1.5, 0.0, 0.0]]) def test_section_tortuosity(): - sec_a = load_morphology(StringIO(u""" + sec_a = load_morphology( + StringIO( + u""" ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (1 0 0 2) (2 0 0 2) - (3 0 0 2))"""), reader='asc').sections[SECTION_ID] - - sec_b = load_morphology(StringIO(u""" + (3 0 0 2))""" + ), + reader='asc', + ).sections[SECTION_ID] + + sec_b = load_morphology( + StringIO( + u""" ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (1 0 0 2) (1 2 0 2) - (0 2 0 2))"""), reader='asc').sections[SECTION_ID] + (0 2 0 2))""" + ), + reader='asc', + ).sections[SECTION_ID] assert section.section_tortuosity(sec_a) == 1.0 assert section.section_tortuosity(sec_b) == 4.0 / 2.0 for s in iter_sections(NRN): - assert (section.section_tortuosity(s) == - morphmath.section_length(s.points) / morphmath.point_dist(s.points[0], s.points[-1])) + assert section.section_tortuosity(s) == morphmath.section_length( + s.points + ) / morphmath.point_dist(s.points[0], s.points[-1]) + def test_setion_tortuosity_single_point(): - sec = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + sec = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) - (1 2 3 2))"""), reader='asc').sections[SECTION_ID] + (1 2 3 2))""" + ), + reader='asc', + ).sections[SECTION_ID] assert section.section_tortuosity(sec) == 1.0 def test_section_tortuosity_looping_section(): - sec = load_morphology(StringIO(u""" + sec = load_morphology( + StringIO( + u""" ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (1 0 0 2) (1 2 0 2) (0 2 0 2) - (0 0 0 2))"""), reader='asc').sections[SECTION_ID] + (0 0 0 2))""" + ), + reader='asc', + ).sections[SECTION_ID] with warnings.catch_warnings(record=True): assert section.section_tortuosity(sec) == np.inf def test_section_meander_angles(): - s0 = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + s0 = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (1 0 0 2) (2 0 0 2) (3 0 0 2) - (4 0 0 2))"""), reader='asc').sections[SECTION_ID] + (4 0 0 2))""" + ), + reader='asc', + ).sections[SECTION_ID] assert section.section_meander_angles(s0) == [math.pi, math.pi, math.pi] - s1 = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + s1 = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (1 0 0 2) (1 1 0 2) (2 1 0 2) - (2 2 0 2))"""), reader='asc').sections[SECTION_ID] + (2 2 0 2))""" + ), + reader='asc', + ).sections[SECTION_ID] assert section.section_meander_angles(s1) == [math.pi / 2, math.pi / 2, math.pi / 2] - s2 = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + s2 = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) (0 0 1 2) (0 0 2 2) - (0 0 0 2))"""), reader='asc').sections[SECTION_ID] - assert section.section_meander_angles(s2) == [math.pi, 0.] + (0 0 0 2))""" + ), + reader='asc', + ).sections[SECTION_ID] + assert section.section_meander_angles(s2) == [math.pi, 0.0] def test_section_meander_angles_single_segment(): - s = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + s = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 2) - (1 1 1 2))"""), reader='asc').sections[SECTION_ID] + (1 1 1 2))""" + ), + reader='asc', + ).sections[SECTION_ID] assert len(section.section_meander_angles(s)) == 0 @@ -211,11 +298,16 @@ def test_strahler_order(): def test_locate_segment_position(): - s = load_morphology(StringIO(u"""((CellBody) (-1 0 0 2) (1 0 0 2)) + s = load_morphology( + StringIO( + u"""((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 0) (3 0 4 200) - (6 4 4 400))"""), reader='asc').sections[SECTION_ID] + (6 4 4 400))""" + ), + reader='asc', + ).sections[SECTION_ID] assert section.locate_segment_position(s, 0.0) == (0, 0.0) assert section.locate_segment_position(s, 0.25) == (0, 2.5) @@ -229,12 +321,17 @@ def test_locate_segment_position(): def test_mean_radius(): - n = load_morphology(StringIO(u""" + n = load_morphology( + StringIO( + u""" ((CellBody) (-1 0 0 2) (1 0 0 2)) ((Dendrite) (0 0 0 0) (3 0 4 200) - (6 4 4 400))"""), reader='asc') + (6 4 4 400))""" + ), + reader='asc', + ) - assert section.section_mean_radius(n.neurites[0]) == 100. + assert section.section_mean_radius(n.neurites[0]) == 100.0 diff --git a/tests/geom/__init__.py b/tests/geom/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/geom/test_geom.py b/tests/geom/test_geom.py index 7141fd892..35a6db925 100644 --- a/tests/geom/test_geom.py +++ b/tests/geom/test_geom.py @@ -38,16 +38,13 @@ NRN = nm.load_morphology(SWC_DATA_PATH / 'Neuron.swc') SIMPLE = nm.load_morphology(SWC_DATA_PATH / 'simple.swc') + class PointObj: pass def test_bounding_box(): - - pts = np.array([[-1, -2, -3, -999], - [1, 2, 3, 1000], - [-100, 5, 33, 42], - [42, 55, 12, -3]]) + pts = np.array([[-1, -2, -3, -999], [1, 2, 3, 1000], [-100, 5, 33, 42], [42, 55, 12, -3]]) obj = PointObj() obj.points = pts @@ -56,25 +53,23 @@ def test_bounding_box(): def test_bounding_box_morphology(): - - ref = np.array([[-40.32853516, -57.600172, 0.], - [64.74726272, 48.51626225, 54.20408797]]) + ref = np.array([[-40.32853516, -57.600172, 0.0], [64.74726272, 48.51626225, 54.20408797]]) assert np.allclose(geom.bounding_box(NRN), ref) def test_bounding_box_soma(): - ref = np.array([[0., 0., 0.], [0.1, 0.2, 0.]]) + ref = np.array([[0.0, 0.0, 0.0], [0.1, 0.2, 0.0]]) assert np.allclose(geom.bounding_box(NRN.soma), ref) def test_bounding_box_neurite(): nrt = SIMPLE.neurites[0] - ref = np.array([[-5., 0., 0.], [ 6., 5., 0.]]) + ref = np.array([[-5.0, 0.0, 0.0], [6.0, 5.0, 0.0]]) np.testing.assert_allclose(geom.bounding_box(nrt), ref) -def test_convex_hull_points(): +def test_convex_hull_points(): # This leverages scipy ConvexHull and we don't want # to re-test scipy, so simply check that the points are the same. hull = geom.convex_hull(NRN) @@ -82,7 +77,6 @@ def test_convex_hull_points(): def test_convex_hull_volume(): - # This leverages scipy ConvexHull and we don't want # to re-test scipy, so simply regression test the volume hull = geom.convex_hull(NRN) diff --git a/tests/geom/test_transform.py b/tests/geom/test_transform.py index 88938d664..0f74975b2 100644 --- a/tests/geom/test_transform.py +++ b/tests/geom/test_transform.py @@ -27,6 +27,7 @@ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import math +import morphio from pathlib import Path import neurom.geom.transform as gtr @@ -36,8 +37,8 @@ import pytest from numpy.testing import assert_almost_equal -TEST_UVEC = np.array([0.01856633, 0.37132666, 0.92831665]) -TEST_ANGLE = np.pi / 3. +TEST_UVEC = np.array([0.01856633, 0.37132666, 0.92831665]) +TEST_ANGLE = np.pi / 3.0 DATA_PATH = Path(__file__).parent.parent / 'data' H5_NRN_PATH = DATA_PATH / 'h5/v1/Neuron.h5' SWC_NRN_PATH = DATA_PATH / 'swc/Neuron.swc' @@ -46,29 +47,24 @@ def _Rx(angle): sn = np.sin(angle) cs = np.cos(angle) - return np.array([[1., 0., 0.], - [0., cs, -sn], - [0., sn, cs]]) + return np.array([[1.0, 0.0, 0.0], [0.0, cs, -sn], [0.0, sn, cs]]) def _Ry(angle): sn = np.sin(angle) cs = np.cos(angle) - return np.array([[cs, 0., sn], - [0., 1., 0.], - [-sn, 0., cs]]) + return np.array([[cs, 0.0, sn], [0.0, 1.0, 0.0], [-sn, 0.0, cs]]) def _Rz(angle): sn = np.sin(angle) cs = np.cos(angle) - return np.array([[cs, -sn, 0.], - [sn, cs, 0.], - [0., 0., 1.]]) + return np.array([[cs, -sn, 0.0], [sn, cs, 0.0], [0.0, 0.0, 1.0]]) def test_not_implemented_transform_call_raises(): with pytest.raises(NotImplementedError): + class Dummy(gtr.Transform3D): pass @@ -87,36 +83,25 @@ def test_rotate_bad_type_raises(): def test_translate_point(): - t = gtr.Translation([100, -100, 100]) point = [1, 2, 3] assert t(point).tolist() == [101, -98, 103] def test_translate_points(): - t = gtr.Translation([100, -100, 100]) points = np.array([[1, 2, 3], [11, 22, 33], [111, 222, 333]]) - assert np.all(t(points) == np.array([[101, -98, 103], - [111, -78, 133], - [211, 122, 433]])) + assert np.all(t(points) == np.array([[101, -98, 103], [111, -78, 133], [211, 122, 433]])) -ROT_90 = np.array([[0, -1, 0], - [1, 0, 0], - [0, 0, 1]]) +ROT_90 = np.array([[0, -1, 0], [1, 0, 0], [0, 0, 1]]) -ROT_180 = np.array([[-1, 0, 0], - [0, -1, 0], - [0, 0, 1]]) +ROT_180 = np.array([[-1, 0, 0], [0, -1, 0], [0, 0, 1]]) -ROT_270 = np.array([[0, 1, 0], - [-1, 0, 0], - [0, 0, 1]]) +ROT_270 = np.array([[0, 1, 0], [-1, 0, 0], [0, 0, 1]]) def test_rotate_point(): - rot = gtr.Rotation(ROT_90) assert rot([2, 0, 0]).tolist() == [0, 2, 0] assert rot([0, 2, 0]).tolist() == [-2, 0, 0] @@ -134,37 +119,23 @@ def test_rotate_point(): def test_rotate_points(): - rot = gtr.Rotation(ROT_90) - points = np.array([[2, 0, 0], - [0, 2, 0], - [0, 0, 2], - [3, 0, 3]]) + points = np.array([[2, 0, 0], [0, 2, 0], [0, 0, 2], [3, 0, 3]]) - assert np.all(rot(points) == np.array([[0, 2, 0], - [-2, 0, 0], - [0, 0, 2], - [0, 3, 3]])) + assert np.all(rot(points) == np.array([[0, 2, 0], [-2, 0, 0], [0, 0, 2], [0, 3, 3]])) rot = gtr.Rotation(ROT_180) - assert np.all(rot(points) == np.array([[-2, 0, 0], - [0, -2, 0], - [0, 0, 2], - [-3, 0, 3]])) + assert np.all(rot(points) == np.array([[-2, 0, 0], [0, -2, 0], [0, 0, 2], [-3, 0, 3]])) rot = gtr.Rotation(ROT_270) - assert np.all(rot(points) == np.array([[0, -2, 0], - [2, 0, 0], - [0, 0, 2], - [0, -3, 3]])) + assert np.all(rot(points) == np.array([[0, -2, 0], [2, 0, 0], [0, 0, 2], [0, -3, 3]])) def test_pivot_rotate_point(): - point = [1, 2, 3] - new_orig = np.array([10., 45., 50.]) + new_orig = np.array([10.0, 45.0, 50.0]) t = gtr.Translation(new_orig) t_inv = gtr.Translation(new_orig * -1) @@ -183,13 +154,9 @@ def test_pivot_rotate_point(): def test_pivot_rotate_points(): + points = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]) - points = np.array([[1, 2, 3], - [4, 5, 6], - [7, 8, 9], - [10, 11, 12]]) - - new_orig = np.array([10., 45., 50.]) + new_orig = np.array([10.0, 45.0, 50.0]) t = gtr.Translation(new_orig) t_inv = gtr.Translation(new_orig * -1) @@ -208,10 +175,8 @@ def test_pivot_rotate_points(): def _check_morphology_translate(m_a, m_b, t): - # soma points - assert np.allclose( - (m_b.soma.points[:, COLS.XYZ] - m_a.soma.points[:, COLS.XYZ]), t) + assert np.allclose((m_b.soma.points[:, COLS.XYZ] - m_a.soma.points[:, COLS.XYZ]), t) _check_neurite_translate(m_a.neurites, m_b.neurites, t) @@ -222,22 +187,21 @@ def _check_neurite_translate(nrts_a, nrts_b, t): def test_translate_morphology_swc(): - - t = np.array([100., 100., 100.]) + t = np.array([100.0, 100.0, 100.0]) m = load_morphology(SWC_NRN_PATH) tm = gtr.translate(m, t) _check_morphology_translate(m, tm, t) def test_transform_translate_morphology_swc(): - t = np.array([100., 100., 100.]) + t = np.array([100.0, 100.0, 100.0]) m = load_morphology(SWC_NRN_PATH) tm = m.transform(gtr.Translation(t)) _check_morphology_translate(m, tm, t) def test_translate_morphology_h5(): - t = np.array([100., 100., 100.]) + t = np.array([100.0, 100.0, 100.0]) m = load_morphology(H5_NRN_PATH) tm = gtr.translate(m, t) @@ -245,21 +209,43 @@ def test_translate_morphology_h5(): def test_transform_translate_morphology_h5(): - t = np.array([100., 100., 100.]) + t = np.array([100.0, 100.0, 100.0]) m = load_morphology(H5_NRN_PATH) tm = m.transform(gtr.Translation(t)) _check_morphology_translate(m, tm, t) +def test_transform__mut_immut(): + t = np.array([100.0, 100.0, 100.0]) + + morph = morphio.Morphology(H5_NRN_PATH) + + m1 = load_morphology(morph) + m2 = m1.transform(gtr.Translation(t)) + + assert isinstance(m2.to_morphio(), morphio.Morphology), type(m2.to_morphio()) + + _check_morphology_translate(m1, m2, t) + + morph = morphio.mut.Morphology(H5_NRN_PATH) + + m3 = load_morphology(morph) + m4 = m3.transform(gtr.Translation(t)) + + assert isinstance(m4.to_morphio(), morphio.mut.Morphology), type(m4.to_morphio()) + + _check_morphology_translate(m3, m4, t) + + def _apply_rot(points, rot_mat): return np.dot(rot_mat, np.array(points).T).T def _check_morphology_rotate(m_a, m_b, rot_mat): - # soma points - assert np.allclose(_apply_rot(m_a.soma.points[:, COLS.XYZ], rot_mat), - m_b.soma.points[:, COLS.XYZ]) + assert np.allclose( + _apply_rot(m_a.soma.points[:, COLS.XYZ], rot_mat), m_b.soma.points[:, COLS.XYZ] + ) # neurite sections _check_neurite_rotate(m_a.neurites, m_b.neurites, rot_mat) @@ -267,14 +253,13 @@ def _check_morphology_rotate(m_a, m_b, rot_mat): def _check_neurite_rotate(nrt_a, nrt_b, rot_mat): for sa, sb in zip(iter_sections(nrt_a), iter_sections(nrt_b)): - assert np.allclose(sb.points[:, COLS.XYZ], - _apply_rot(sa.points[:, COLS.XYZ], rot_mat)) + assert np.allclose(sb.points[:, COLS.XYZ], _apply_rot(sa.points[:, COLS.XYZ], rot_mat)) def test_rotate_morphology_swc(): m_a = load_morphology(SWC_NRN_PATH) - m_b = gtr.rotate(m_a, [0, 0, 1], math.pi/2.0) - rot = gtr._rodrigues_to_dcm([0, 0, 1], math.pi/2.0) + m_b = gtr.rotate(m_a, [0, 0, 1], math.pi / 2.0) + rot = gtr._rodrigues_to_dcm([0, 0, 1], math.pi / 2.0) _check_morphology_rotate(m_a, m_b, rot) @@ -287,8 +272,8 @@ def test_transform_rotate_morphology_swc(): def test_rotate_morphology_h5(): m_a = load_morphology(H5_NRN_PATH) - m_b = gtr.rotate(m_a, [0, 0, 1], math.pi/2.0) - rot = gtr._rodrigues_to_dcm([0, 0, 1], math.pi/2.0) + m_b = gtr.rotate(m_a, [0, 0, 1], math.pi / 2.0) + rot = gtr._rodrigues_to_dcm([0, 0, 1], math.pi / 2.0) _check_morphology_rotate(m_a, m_b, rot) @@ -300,17 +285,20 @@ def test_transform_rotate_morphology_h5(): def test_rodrigues_to_dcm(): - - RES = np.array([[0.50017235, -0.80049871, 0.33019604], - [0.80739289, 0.56894174, 0.15627544], - [-0.3129606, 0.18843328, 0.9308859]]) + RES = np.array( + [ + [0.50017235, -0.80049871, 0.33019604], + [0.80739289, 0.56894174, 0.15627544], + [-0.3129606, 0.18843328, 0.9308859], + ] + ) R = gtr._rodrigues_to_dcm(TEST_UVEC, TEST_ANGLE) # assess rotation matrix properties: # detR = +=1 - assert_almost_equal(np.linalg.det(R), 1.) + assert_almost_equal(np.linalg.det(R), 1.0) # R.T = R^-1 assert np.allclose(np.linalg.inv(R), R.transpose()) @@ -324,11 +312,10 @@ def test_rodrigues_to_dcm(): assert np.allclose(np.dot(Rinv, R), np.identity(3)) # check basic rotations with a range of angles - for angle in np.linspace(0., 2. * np.pi, 10): - - Rx = gtr._rodrigues_to_dcm(np.array([1., 0., 0.]), angle) - Ry = gtr._rodrigues_to_dcm(np.array([0., 1., 0.]), angle) - Rz = gtr._rodrigues_to_dcm(np.array([0., 0., 1.]), angle) + for angle in np.linspace(0.0, 2.0 * np.pi, 10): + Rx = gtr._rodrigues_to_dcm(np.array([1.0, 0.0, 0.0]), angle) + Ry = gtr._rodrigues_to_dcm(np.array([0.0, 1.0, 0.0]), angle) + Rz = gtr._rodrigues_to_dcm(np.array([0.0, 0.0, 1.0]), angle) assert np.allclose(Rx, _Rx(angle)) assert np.allclose(Ry, _Ry(angle)) diff --git a/tests/io/__init__.py b/tests/io/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/io/test_io_utils.py b/tests/io/test_io_utils.py index 0efc94377..1a2112233 100644 --- a/tests/io/test_io_utils.py +++ b/tests/io/test_io_utils.py @@ -28,12 +28,21 @@ """Test neurom.io.utils.""" import warnings +import os +from contextlib import contextmanager from io import StringIO from pathlib import Path import numpy as np -from morphio import MissingParentError, RawDataError, SomaError, UnknownFileType, MorphioError, \ - set_raise_warnings +import morphio +from morphio import ( + MissingParentError, + RawDataError, + SomaError, + UnknownFileType, + MorphioError, + set_raise_warnings, +) from neurom import COLS, get, load_morphology from neurom.core.morphology import Morphology from neurom.exceptions import NeuroMError @@ -44,26 +53,39 @@ SWC_PATH = DATA_PATH / 'swc' VALID_DATA_PATH = DATA_PATH / 'valid_set' NRN_NAMES = ('Neuron.swc', 'Neuron_h5v1.h5') -FILES = [SWC_PATH / f - for f in ['Neuron.swc', - 'Single_apical_no_soma.swc', - 'Single_apical.swc', - 'Single_basal.swc', - 'Single_axon.swc', - 'sequential_trunk_off_0_16pt.swc', - 'sequential_trunk_off_1_16pt.swc', - 'sequential_trunk_off_42_16pt.swc', - 'Neuron_no_missing_ids_no_zero_segs.swc']] -FILENAMES = [VALID_DATA_PATH / f - for f in ['Neuron.swc', 'Neuron_h5v1.h5']] +FILES = [ + SWC_PATH / f + for f in [ + 'Neuron.swc', + 'Single_apical_no_soma.swc', + 'Single_apical.swc', + 'Single_basal.swc', + 'Single_axon.swc', + 'sequential_trunk_off_0_16pt.swc', + 'sequential_trunk_off_1_16pt.swc', + 'sequential_trunk_off_42_16pt.swc', + 'Neuron_no_missing_ids_no_zero_segs.swc', + ] +] +FILENAMES = [VALID_DATA_PATH / f for f in ['Neuron.swc', 'Neuron_h5v1.h5']] NRN = utils.load_morphology(VALID_DATA_PATH / 'Neuron.swc') NO_SOMA_FILE = SWC_PATH / 'Single_apical_no_soma.swc' DISCONNECTED_POINTS_FILE = SWC_PATH / 'Neuron_disconnected_components.swc' MISSING_PARENTS_FILE = SWC_PATH / 'Neuron_missing_parents.swc' -def _check_neurites_have_no_parent(m): +@contextmanager +def cwd(path): + """Context manager to temporarily change the working directory.""" + original_cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(original_cwd) + +def _check_neurites_have_no_parent(m): for n in m.neurites: assert n.root_node.parent is None @@ -82,7 +104,11 @@ def test_load_morphologies(): assert m.name == FILES[i].name with pytest.raises(NeuroMError): - list(utils.load_morphologies(MISSING_PARENTS_FILE, )) + list( + utils.load_morphologies( + MISSING_PARENTS_FILE, + ) + ) # Single string pop = utils.load_morphologies(str(FILES[0])) @@ -103,7 +129,9 @@ def test_load_morphologies(): assert m.name == file.name # string path to a directory - pop = utils.load_morphologies(str(SWC_PATH), ignored_exceptions=(MissingParentError, MorphioError)) + pop = utils.load_morphologies( + str(SWC_PATH), ignored_exceptions=(MissingParentError, MorphioError) + ) # is subset so that if new morpho are added to SWC_PATH, the test does not break assert {f.name for f in FILES}.issubset({m.name for m in pop}) @@ -113,9 +141,23 @@ def test_load_morphologies(): assert {f.name for f in FILES}.issubset({m.name for m in pop}) +def test_load_morphologies__resolve_paths(): + with cwd(DATA_PATH): + pop = utils.load_morphologies("swc/", ignored_exceptions=(MissingParentError, MorphioError)) + assert {f.name for f in FILES}.issubset({m.name for m in pop}) + + # move one up to break if the population is not using asbpaths + with cwd(DATA_PATH.parent): + assert {f.name for f in FILES}.issubset({m.name for m in pop}) + + def test_ignore_exceptions(): with pytest.raises(NeuroMError): - list(utils.load_morphologies(MISSING_PARENTS_FILE, )) + list( + utils.load_morphologies( + MISSING_PARENTS_FILE, + ) + ) count = 0 pop = utils.load_morphologies((MISSING_PARENTS_FILE,), ignored_exceptions=(RawDataError,)) for _ in pop: @@ -142,6 +184,64 @@ def test_load_morphology(): utils.load_morphology(StringIO(morphology_str), reader='swc') +def test_load_morphology__conversions(): + + morphology_str = u""" 1 1 0 0 0 1. -1 + 2 3 0 0 0 1. 1 + 3 3 0 5 0 1. 2 + 4 3 -5 5 0 0. 3 + 5 3 6 5 0 0. 3 + 6 2 0 0 0 1. 1 + 7 2 0 -4 0 1. 6 + 8 2 6 -4 0 0. 7 + 9 2 -5 -4 0 0. 7 + """ + filepath = FILENAMES[0] + morphio_mut = morphio.mut.Morphology(filepath) + morphio_immut = morphio_mut.as_immutable() + + # default readonly + morph = utils.load_morphology(filepath) + assert isinstance(morph.to_morphio(), morphio.Morphology) + + # should be same with mutable=False + morph = utils.load_morphology(filepath, mutable=False) + assert isinstance(morph.to_morphio(), morphio.Morphology) + + morph = utils.load_morphology(filepath, mutable=True) + assert isinstance(morph.to_morphio(), morphio.mut.Morphology) + + # default mutable=None maintains mutability + morph = utils.load_morphology(morphio_mut) + assert isinstance(morph.to_morphio(), morphio.mut.Morphology) + + morph = utils.load_morphology(morphio_mut, mutable=False) + assert isinstance(morph.to_morphio(), morphio.Morphology) + + morph = utils.load_morphology(morphio_mut, mutable=True) + assert isinstance(morph.to_morphio(), morphio.mut.Morphology) + + # default mutable=None maintains mutability + morph = utils.load_morphology(morphio_immut) + assert isinstance(morph.to_morphio(), morphio.Morphology) + + morph = utils.load_morphology(morphio_immut, mutable=False) + assert isinstance(morph.to_morphio(), morphio.Morphology) + + morph = utils.load_morphology(morphio_immut, mutable=True) + assert isinstance(morph.to_morphio(), morphio.mut.Morphology) + + # default mutable=None is readaonly + morph = utils.load_morphology(morphology_str, reader="swc") + assert isinstance(morph.to_morphio(), morphio.Morphology) + + morph = utils.load_morphology(morphology_str, mutable=False, reader="swc") + assert isinstance(morph.to_morphio(), morphio.Morphology) + + morph = utils.load_morphology(morphology_str, mutable=True, reader="swc") + assert isinstance(morph.to_morphio(), morphio.mut.Morphology) + + def test_morphology_name(): for fn, nn in zip(FILENAMES, NRN_NAMES): m = utils.load_morphology(fn) @@ -163,7 +263,6 @@ def test_load_neuromorpho_3pt_soma(): def test_neurites_have_no_parent(): - _check_neurites_have_no_parent(NRN) @@ -176,12 +275,12 @@ def test_morphology_sections_are_connected(): # check traversal by counting number of sections un trees for nrt in NRN.neurites: root_node = nrt.root_node - assert (sum(1 for _ in root_node.ipreorder()) == - sum(1 for _ in NRN.sections[root_node.id].ipreorder())) + assert sum(1 for _ in root_node.ipreorder()) == sum( + 1 for _ in NRN.sections[root_node.id].ipreorder() + ) def test_load_morphology_soma_only(): - m = utils.load_morphology(Path(DATA_PATH, 'swc', 'Soma_origin.swc')) assert len(m.neurites) == 0 assert m.name == 'Soma_origin.swc' @@ -238,8 +337,10 @@ def test_load_morphology_mixed_tree_swc(): m_mix = utils.load_morphology(Path(SWC_ORD_PATH, 'sample_mixed_tree_sections.swc')) assert_items_equal(get('number_of_sections_per_neurite', m_mix), [5, 3]) - assert_items_equal(get('number_of_sections_per_neurite', m_mix), - get('number_of_sections_per_neurite', SWC_ORD_REF)) + assert_items_equal( + get('number_of_sections_per_neurite', m_mix), + get('number_of_sections_per_neurite', SWC_ORD_REF), + ) assert get('number_of_segments', m_mix) == get('number_of_segments', SWC_ORD_REF) assert get('total_length', m_mix) == get('total_length', SWC_ORD_REF) @@ -248,8 +349,10 @@ def test_load_morphology_section_order_break_swc(): m_mix = utils.load_morphology(Path(SWC_ORD_PATH, 'sample_disordered.swc')) assert_items_equal(get('number_of_sections_per_neurite', m_mix), [5, 3]) - assert_items_equal(get('number_of_sections_per_neurite', m_mix), - get('number_of_sections_per_neurite', SWC_ORD_REF)) + assert_items_equal( + get('number_of_sections_per_neurite', m_mix), + get('number_of_sections_per_neurite', SWC_ORD_REF), + ) assert get('number_of_segments', m_mix) == get('number_of_segments', SWC_ORD_REF) assert get('total_length', m_mix) == get('total_length', SWC_ORD_REF) @@ -261,8 +364,10 @@ def test_load_morphology_section_order_break_swc(): def test_load_morphology_mixed_tree_h5(): m_mix = utils.load_morphology(Path(H5_PATH, 'sample_mixed_tree_sections.h5')) assert_items_equal(get('number_of_sections_per_neurite', m_mix), [5, 3]) - assert_items_equal(get('number_of_sections_per_neurite', m_mix), - get('number_of_sections_per_neurite', H5_ORD_REF)) + assert_items_equal( + get('number_of_sections_per_neurite', m_mix), + get('number_of_sections_per_neurite', H5_ORD_REF), + ) def test_load_h5_trunk_points_regression(): @@ -271,17 +376,19 @@ def test_load_h5_trunk_points_regression(): # of files with non-standard soma structure. # See #480. m = utils.load_morphology(Path(DATA_PATH, 'h5', 'v1', 'Neuron.h5')) - assert np.allclose(m.neurites[0].root_node.points[1, COLS.XYZR], - [0., 0., 0.1, 0.31646374]) + assert np.allclose(m.neurites[0].root_node.points[1, COLS.XYZR], [0.0, 0.0, 0.1, 0.31646374]) - assert np.allclose(m.neurites[1].root_node.points[1, COLS.XYZR], - [0., 0., 0.1, 1.84130445e-01]) + assert np.allclose( + m.neurites[1].root_node.points[1, COLS.XYZR], [0.0, 0.0, 0.1, 1.84130445e-01] + ) - assert np.allclose(m.neurites[2].root_node.points[1, COLS.XYZR], - [0., 0., 0.1, 5.62225521e-01]) + assert np.allclose( + m.neurites[2].root_node.points[1, COLS.XYZR], [0.0, 0.0, 0.1, 5.62225521e-01] + ) - assert np.allclose(m.neurites[3].root_node.points[1, COLS.XYZR], - [0., 0., 0.1, 7.28555262e-01]) + assert np.allclose( + m.neurites[3].root_node.points[1, COLS.XYZR], [0.0, 0.0, 0.1, 7.28555262e-01] + ) def test_load_unknown_type(): @@ -317,6 +424,7 @@ def test_get_files_by_path(): with pytest.raises(IOError): utils.get_files_by_path(Path('this/is/a/fake/path')) + def test_h5v2_raises(): with pytest.raises(RawDataError): utils.load_morphology(DATA_PATH / 'h5/v2/Neuron.h5') diff --git a/tests/io/test_neurolucida.py b/tests/io/test_neurolucida.py index 6853f20ee..606f6f1a4 100644 --- a/tests/io/test_neurolucida.py +++ b/tests/io/test_neurolucida.py @@ -25,10 +25,7 @@ def test_soma(): """ n = nm.load_morphology(string_section, reader='asc') - assert_array_equal(n.soma.points, - [[1, 1, 0, 0.5], - [-1, 1, 0, 0.5], - [-1, -1, 0, 1]]) + assert_array_equal(n.soma.points, [[1, 1, 0, 0.5], [-1, 1, 0, 0.5], [-1, -1, 0, 1]]) assert len(n.neurites) == 0 @@ -104,9 +101,10 @@ def test_single_neurite_no_soma(): assert_array_equal(n.soma.points, np.empty((0, 4))) assert len(n.neurites) == 1 - assert_array_equal(n.neurites[0].points, - np.array([[1.2, 2.7, 1.0, 6.5], - [1.2, 3.7, 2.0, 6.5]], dtype=np.float32)) + assert_array_equal( + n.neurites[0].points, + np.array([[1.2, 2.7, 1.0, 6.5], [1.2, 3.7, 2.0, 6.5]], dtype=np.float32), + ) def test_skip_header(): @@ -127,9 +125,10 @@ def test_skip_header(): n = nm.load_morphology(str_morph, reader='asc') assert len(n.neurites) == 1 - assert_array_equal(n.neurites[0].points, - np.array([[1.2, 2.7, 1.0, 6.5], - [1.2, 3.7, 2.0, 6.5]], dtype=np.float32)) + assert_array_equal( + n.neurites[0].points, + np.array([[1.2, 2.7, 1.0, 6.5], [1.2, 3.7, 2.0, 6.5]], dtype=np.float32), + ) without_duplicate = """ @@ -169,55 +168,59 @@ def test_skip_header(): def test_read_with_duplicates(): """Section points are duplicated in the file""" -# what I think the -# https://developer.humanbrainproject.eu/docs/projects/morphology-documentation/0.0.2/h5v1.html -# would look like + # what I think the + # https://developer.humanbrainproject.eu/docs/projects/morphology-documentation/0.0.2/h5v1.html + # would look like n = load_morphology(StringIO(with_duplicate), reader='asc') assert len(n.neurites) == 1 - assert_array_equal(n.neurites[0].points, - # Duplicate points are not present - [[3, -4, 0, 1], - [3, -6, 0, 1], - [3, -8, 0, 1], - [3, -10, 0, 1], - [0, -10, 0, 1], - [-3, -10, 0, 1], - [6, -10, 0, 1], - [9, -10, 0, 1]]) - - assert_array_equal(n.neurites[0].root_node.points, - [[3, -4, 0, 1], - [3, -6, 0, 1], - [3, -8, 0, 1], - [3, -10, 0, 1]]) - - assert_array_equal(n.neurites[0].root_node.children[0].points, - [[3, -10, 0, 1], - [0, -10, 0, 1], - [-3, -10, 0, 1]]) - - assert_array_equal(n.neurites[0].root_node.children[1].points, - [[3, -10, 0, 1], - [6, -10, 0, 1], - [9, -10, 0, 1]]) + assert_array_equal( + n.neurites[0].points, + # Duplicate points are not present + [ + [3, -4, 0, 1], + [3, -6, 0, 1], + [3, -8, 0, 1], + [3, -10, 0, 1], + [0, -10, 0, 1], + [-3, -10, 0, 1], + [6, -10, 0, 1], + [9, -10, 0, 1], + ], + ) + + assert_array_equal( + n.neurites[0].root_node.points, + [[3, -4, 0, 1], [3, -6, 0, 1], [3, -8, 0, 1], [3, -10, 0, 1]], + ) + + assert_array_equal( + n.neurites[0].root_node.children[0].points, + [[3, -10, 0, 1], [0, -10, 0, 1], [-3, -10, 0, 1]], + ) + + assert_array_equal( + n.neurites[0].root_node.children[1].points, [[3, -10, 0, 1], [6, -10, 0, 1], [9, -10, 0, 1]] + ) def test_read_without_duplicates(): n_with_duplicate = load_morphology(with_duplicate, reader='asc') n_without_duplicate = load_morphology(without_duplicate, reader='asc') - assert_array_equal(n_with_duplicate.neurites[0].root_node.children[0].points, - n_without_duplicate.neurites[0].root_node.children[0].points) + assert_array_equal( + n_with_duplicate.neurites[0].root_node.children[0].points, + n_without_duplicate.neurites[0].root_node.children[0].points, + ) - assert_array_equal(n_with_duplicate.neurites[0].points, - n_without_duplicate.neurites[0].points) + assert_array_equal(n_with_duplicate.neurites[0].points, n_without_duplicate.neurites[0].points) def test_unfinished_file(): with pytest.raises(RawDataError) as obj: - load_morphology(""" + load_morphology( + """ ((Dendrite) (3 -4 0 2) (3 -6 0 2) @@ -228,13 +231,16 @@ def test_unfinished_file(): (0 -10 0 2) (-3 -10 0 2) | - """, reader='asc') + """, + reader='asc', + ) assert obj.match("Hit end of of file while consuming a neurite ") def test_empty_sibling(): - n = load_morphology(""" + n = load_morphology( + """ ((Dendrite) (3 -4 0 2) (3 -6 0 2) @@ -247,21 +253,30 @@ def test_empty_sibling(): | ) ) - """, reader='asc') - - assert_array_equal(n.neurites[0].points, - np.array([[3, -4, 0, 1], - [3, -6, 0, 1], - [3, -8, 0, 1], - [3, -10, 0, 1], - [0, -10, 0, 1], - [-3, -10, 0, 1]], - dtype=np.float32)) + """, + reader='asc', + ) + + assert_array_equal( + n.neurites[0].points, + np.array( + [ + [3, -4, 0, 1], + [3, -6, 0, 1], + [3, -8, 0, 1], + [3, -10, 0, 1], + [0, -10, 0, 1], + [-3, -10, 0, 1], + ], + dtype=np.float32, + ), + ) def test_single_children(): - n = load_morphology(StringIO( - """ + n = load_morphology( + StringIO( + """ ((Dendrite) (3 -4 0 2) (3 -6 0 2) @@ -273,25 +288,26 @@ def test_single_children(): (-3 -10 0 2) ) ) - """), 'asc') + """ + ), + 'asc', + ) assert len(n.sections) == 2 - assert_array_equal(n.sections[0].points, - np.array([[3, -4, 0, 1], - [3, -6, 0, 1], - [3, -8, 0, 1], - [3, -10, 0, 1]], - dtype=np.float32)) - assert_array_equal(n.sections[1].points, - np.array([[3, -10, 0, 1], - [0, -10, 0, 1], - [-3, -10, 0, 1]], - dtype=np.float32)) + assert_array_equal( + n.sections[0].points, + np.array([[3, -4, 0, 1], [3, -6, 0, 1], [3, -8, 0, 1], [3, -10, 0, 1]], dtype=np.float32), + ) + assert_array_equal( + n.sections[1].points, + np.array([[3, -10, 0, 1], [0, -10, 0, 1], [-3, -10, 0, 1]], dtype=np.float32), + ) def test_markers(): """Test that markers do not prevent file from being read correctly""" - n = load_morphology(""" + n = load_morphology( + """ ( (Color White) ; [10,1] (Dendrite) ( -290.87 -113.09 -16.32 2.06) ; Root @@ -333,22 +349,27 @@ def test_markers(): ) ; End of split ) ; End of split ) -""", reader='asc') +""", + reader='asc', + ) assert len(n.neurites) == 1 - res = np.array([[-290.87, -113.09, -16.32, 1.03], - [-290.87, -113.09, -16.32, 1.03], - [-277.14, -119.13, -18.02, 0.345], - [-275.54, -119.99, -16.67, 0.345], - [-277.80, -120.28, -19.48, 0.46], - [-276.65, -121.14, -20.20, 0.46], - [-267.94, -128.61, -22.57, 0.345], - [-204.90, -157.63, -42.45, 0.345], - [-269.77, -129.47, -22.57, 0.46], - [-268.17, -130.62, -24.75, 0.46], - [-266.79, -131.77, -26.13, 0.46]], - dtype=np.float32) - - assert_array_equal(n.neurites[0].points, - res) + res = np.array( + [ + [-290.87, -113.09, -16.32, 1.03], + [-290.87, -113.09, -16.32, 1.03], + [-277.14, -119.13, -18.02, 0.345], + [-275.54, -119.99, -16.67, 0.345], + [-277.80, -120.28, -19.48, 0.46], + [-276.65, -121.14, -20.20, 0.46], + [-267.94, -128.61, -22.57, 0.345], + [-204.90, -157.63, -42.45, 0.345], + [-269.77, -129.47, -22.57, 0.46], + [-268.17, -130.62, -24.75, 0.46], + [-266.79, -131.77, -26.13, 0.46], + ], + dtype=np.float32, + ) + + assert_array_equal(n.neurites[0].points, res) diff --git a/tests/io/test_swc_reader.py b/tests/io/test_swc_reader.py index 9eed5ae1b..4a4b1bd4e 100644 --- a/tests/io/test_swc_reader.py +++ b/tests/io/test_swc_reader.py @@ -55,44 +55,35 @@ def test_read_single_neurite(): n = load_morphology(SWC_PATH / 'point_soma_single_neurite.swc') assert len(n.neurites) == 1 assert n.neurites[0].root_node.id == 0 - assert_array_equal(n.soma.points, - [[0, 0, 0, 3.0]]) + assert_array_equal(n.soma.points, [[0, 0, 0, 3.0]]) assert len(n.neurites) == 1 assert len(n.sections) == 1 - assert_array_equal(n.neurites[0].points, - np.array([[0, 0, 2, 0.5], - [0, 0, 3, 0.5], - [0, 0, 4, 0.5], - [0, 0, 5, 0.5]])) + assert_array_equal( + n.neurites[0].points, + np.array([[0, 0, 2, 0.5], [0, 0, 3, 0.5], [0, 0, 4, 0.5], [0, 0, 5, 0.5]]), + ) def test_read_split_soma(): n = load_morphology(SWC_PATH / 'split_soma_two_neurites.swc') - assert_array_equal(n.soma.points, - [[1, 0, 1, 4.0], - [2, 0, 0, 4.0], - [3, 0, 0, 4.0]]) + assert_array_equal(n.soma.points, [[1, 0, 1, 4.0], [2, 0, 0, 4.0], [3, 0, 0, 4.0]]) assert len(n.neurites) == 2 - assert_array_equal(n.neurites[0].points, - [[0, 0, 2, 0.5], - [0, 0, 3, 0.5], - [0, 0, 4, 0.5], - [0, 0, 5, 0.5]]) - - assert_array_equal(n.neurites[1].points, - [[0, 0, 6, 0.5], - [0, 0, 7, 0.5], - [0, 0, 8, 0.5], - [0, 0, 9, 0.5]]) + assert_array_equal( + n.neurites[0].points, [[0, 0, 2, 0.5], [0, 0, 3, 0.5], [0, 0, 4, 0.5], [0, 0, 5, 0.5]] + ) + + assert_array_equal( + n.neurites[1].points, [[0, 0, 6, 0.5], [0, 0, 7, 0.5], [0, 0, 8, 0.5], [0, 0, 9, 0.5]] + ) assert len(n.sections) == 2 def test_weird_indent(): - - n = load_morphology(""" + n = load_morphology( + """ # this is the same as simple.swc @@ -112,16 +103,18 @@ def test_weird_indent(): 8 2 6 -4 0 0. 7 9 2 -5 -4 0 0. 7 -""", reader='swc') +""", + reader='swc', + ) simple = load_morphology(SWC_PATH / 'simple.swc') - assert_array_equal(simple.points, - n.points) + assert_array_equal(simple.points, n.points) def test_cyclic(): with pytest.raises(RawDataError): - load_morphology(""" + load_morphology( + """ 1 1 0 0 0 1. -1 2 3 0 0 0 1. 1 3 3 0 5 0 1. 2 @@ -130,25 +123,22 @@ def test_cyclic(): 6 2 0 0 0 1. 6 # <-- cyclic point 7 2 0 -4 0 1. 6 8 2 6 -4 0 0. 7 - 9 2 -5 -4 0 0. 7""", reader='swc') + 9 2 -5 -4 0 0. 7""", + reader='swc', + ) def test_simple_reversed(): n = load_morphology(SWC_PATH / 'simple_reversed.swc') - assert_array_equal(n.soma.points, - [[0, 0, 0, 1]]) + assert_array_equal(n.soma.points, [[0, 0, 0, 1]]) assert len(n.neurites) == 2 assert len(n.neurites[0].points) == 4 - assert_array_equal(n.neurites[0].points, - [[0, 0, 0, 1], - [0, 5, 0, 1], - [-5, 5, 0, 0], - [6, 5, 0, 0]]) - assert_array_equal(n.neurites[1].points, - [[0, 0, 0, 1], - [0, -4, 0, 1], - [6, -4, 0, 0], - [-5, -4, 0, 0]]) + assert_array_equal( + n.neurites[0].points, [[0, 0, 0, 1], [0, 5, 0, 1], [-5, 5, 0, 0], [6, 5, 0, 0]] + ) + assert_array_equal( + n.neurites[1].points, [[0, 0, 0, 1], [0, -4, 0, 1], [6, -4, 0, 0], [-5, -4, 0, 0]] + ) def test_custom_type(): diff --git a/tests/test_examples.py b/tests/test_examples.py index 0b3c26933..0d39b1185 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -10,14 +10,13 @@ EXAMPLES_DIR = TESTS_DIR.parent / "examples" print(EXAMPLES_DIR) + @pytest.mark.parametrize("filepath", EXAMPLES_DIR.glob("*.py")) def test_example(filepath): - spec = importlib.util.spec_from_file_location(filepath.stem, filepath) module = spec.loader.load_module() with tempfile.TemporaryDirectory() as tempdir: - # change directory to avoid creating files in the root folder try: cwd = os.getcwd() diff --git a/tests/test_import.py b/tests/test_import.py index 1629a69a2..89d960188 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -26,5 +26,6 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + def test_import_neurom(): import neurom diff --git a/tests/test_mixed.py b/tests/test_mixed.py new file mode 100644 index 000000000..b3bed778b --- /dev/null +++ b/tests/test_mixed.py @@ -0,0 +1,1142 @@ +import re +import copy +import json +import pickle +import sys +import warnings +from copy import deepcopy +from pathlib import Path +import pytest +import numpy as np +import pandas as pd +import numpy.testing as npt +from enum import Enum +import collections.abc + +from morphio import SectionType + +import neurom +import neurom.apps.morph_stats +import neurom.core.morphology +import neurom.features.neurite +from neurom import NeuriteType +from neurom.core import Population +from neurom.core import types +from neurom.core.morphology import Section, iter_neurites, iter_sections +from neurom.core.types import _ALL_SUBTYPE +from neurom.core.types import _SOMA_SUBTYPE +from neurom.core.types import NeuriteType +from neurom.core.types import tree_type_checker as is_type +from neurom.exceptions import NeuroMError +from neurom.features import _POPULATION_FEATURES, _MORPHOLOGY_FEATURES, _NEURITE_FEATURES +from neurom.features import get + + +@pytest.mark.parametrize( + "value, expected", + [ + (0, ""), + (32, ""), + ((3, 2), ""), + ], +) +def test_neurite_type__repr(value, expected): + assert repr(NeuriteType(value)) == expected + + +@pytest.mark.parametrize( + "value, expected", + [ + (0, "NeuriteType.undefined"), + (32, "NeuriteType.all"), + ((3, 2), "NeuriteType.axon_carrying_dendrite"), + ], +) +def test_neurite_type__str(value, expected): + assert str(NeuriteType(value)) == expected + + +@pytest.mark.parametrize( + "values,expected", + [ + (2, 2), + (SectionType(2), 2), + (NeuriteType(2), 2), + ((3, 2), (3, 2)), + ([3, 2], (3, 2)), + ((NeuriteType.basal_dendrite, SectionType.axon), (3, 2)), + ], +) +def test_int_or_tuple(values, expected): + res = types._int_or_tuple(values) + assert res == expected + + +@pytest.mark.parametrize( + "values, expected", + [ + (NeuriteType.axon, NeuriteType.axon), + (SectionType.axon, NeuriteType.axon), + (2, NeuriteType.axon), + ((3, 2), NeuriteType.axon_carrying_dendrite), + ((SectionType.basal_dendrite, NeuriteType.axon), NeuriteType.axon_carrying_dendrite), + ], +) +def test_neurite_type__call(values, expected): + res = NeuriteType(values) + assert res == expected + + +def test_create_neurite_type(): + res = types._create_neurite_type(NeuriteType, 2, name="axon") + + assert res.name == "axon" + assert res.subtypes == (2,) + assert res.root_type == 2 + assert res.value == 2 + + +def test_create_neurite_type__mixed(): + res = types._create_neurite_type(NeuriteType, (3, 2), name="axon_carrying_dendrite") + + assert res.name == "axon_carrying_dendrite" + assert res.subtypes == (3, 2) + assert res.root_type == 3 + assert res.value == (3, 2) + + +@pytest.mark.parametrize( + "left, right, expected", + [ + (0, 0, True), + (0, "asdf", False), + (32, 32, True), + (3, 1, False), + (3, 3, True), + (3, 2, False), + (3, 4, False), + (3, (3, 2), True), + ((3, 2), (3, 2), True), + ((3, 2), (2, 3), False), + ((3, 2), 2, True), + ((3, 2), 3, True), + ((3, 2), 4, False), + ], +) +def test_neurite_type__eq(left, right, expected): + assert (NeuriteType(left) == right) is expected + + +@pytest.mark.parametrize("type_", [NeuriteType.axon, NeuriteType.axon_carrying_dendrite]) +def test_neurite_type__pickle(type_): + res = pickle.loads(pickle.dumps(type_)) + assert res == type_ + + +@pytest.mark.parametrize("value", [None, {"WRONG_TYPE": 999}, "UNKNOWN VALUE", (2, 3, 4)]) +def test_neurite_type__raises(value): + with pytest.raises(ValueError, match="is not a valid NeuriteType"): + NeuriteType(value) + + +@pytest.fixture +def reset_NeuriteType(): + current_value2member_map_ = copy.deepcopy(NeuriteType._value2member_map_) + current_member_map_ = copy.deepcopy(NeuriteType._member_map_) + current_member_names_ = copy.deepcopy(NeuriteType._member_names_) + yield + NeuriteType._value2member_map_ = current_value2member_map_ + NeuriteType._member_map_ = current_member_map_ + NeuriteType._member_names_ = current_member_names_ + + +DATA_DIR = Path(__file__).parent / "data/mixed" + + +@pytest.fixture +def mixed_morph(): + """ + (1, 4, 1) + | + S7:B | + | + (1, 4, -1)-----(1, 4, 0) (2, 4, 0) (3, 3, 1) + S8:B | | | + | S10:A | S12:A | + | | S11:A | + S6:B | (2, 3, 0)-----(3, 3, 0) + | / | + | S9:A / S13:A | + | / | + (1, 2, 0) (3, 3, -1) + / + S5:B / + / Axon on basal dendrite + (-3, 0, 1) (-2, 1, 0) (0, 1, 0) + | | + S2 | S4 | + | S1 | S0 + (-3, 0, 0)-----(-2, 0, 0)-----(-1, 0, 0) (0, 0, 0) Soma + | + S3 | Basal Dendrite + | + (-3, 0, -1) (0, -1, 0) + | + S14 | + | S18 + Apical Dendrite (0, -2, 0)-----(1, -2, 0) + | + S15 | + S17 | S16 + (0, -3, -1)-----(0, -3, 0)-----(0, -3, 1) + + basal_dendrite: homogeneous + section ids: [0, 1, 2, 3, 4] + + axon_on_basal_dendrite: heterogeneous + section_ids: + - basal: [5, 6, 7, 8] + - axon : [9, 10, 11, 12, 13] + + apical_dendrite: homogeneous: + section_ids: [14, 15, 16, 17, 18] + """ + return neurom.load_morphology( + """ + 1 1 0 0 0 0.5 -1 + 2 3 -1 0 0 0.1 1 + 3 3 -2 0 0 0.1 2 + 4 3 -3 0 0 0.1 3 + 5 3 -3 0 1 0.1 4 + 6 3 -3 0 -1 0.1 4 + 7 3 -2 1 0 0.1 3 + 8 3 0 1 0 0.1 1 + 9 3 1 2 0 0.1 8 + 10 3 1 4 0 0.1 9 + 11 3 1 4 1 0.1 10 + 12 3 1 4 -1 0.1 10 + 13 2 2 3 0 0.1 9 + 14 2 2 4 0 0.1 13 + 15 2 3 3 0 0.1 13 + 16 2 3 3 1 0.1 15 + 17 2 3 3 -1 0.1 15 + 18 4 0 -1 0 0.1 1 + 19 4 0 -2 0 0.1 18 + 20 4 0 -3 0 0.1 19 + 21 4 0 -3 1 0.1 20 + 22 4 0 -3 -1 0.1 20 + 23 4 1 -2 0 0.1 19 + """, + reader="swc", + ) + + +@pytest.fixture +def population(mixed_morph): + return Population([mixed_morph, mixed_morph]) + + +@pytest.fixture +def three_types_neurite_morph(): + return neurom.load_morphology( + """ + 1 1 0 0 0 0.5 -1 + 2 3 0 1 0 0.1 1 + 3 3 1 2 0 0.1 2 + 4 3 1 4 0 0.1 3 + 5 3 1 4 1 0.1 4 + 6 3 1 4 -1 0.1 4 + 7 2 2 3 0 0.1 3 + 8 2 2 4 0 0.1 7 + 9 2 3 3 0 0.1 7 + 10 2 3 3 1 0.1 9 + 11 4 3 3 -1 0.1 9 + """, + reader="swc", + ) + + +def test_heterogeneous_neurites(mixed_morph): + assert not mixed_morph.neurites[0].is_heterogeneous() + assert mixed_morph.neurites[1].is_heterogeneous() + assert not mixed_morph.neurites[2].is_heterogeneous() + + +def test_iter_sections(mixed_morph): + # Test homogenous trees + mixed_morph.process_subtrees = False + # # Iterate with ipreorder iterator + assert [i.id for i in iter_sections(mixed_morph)] == list(range(19)) + assert [ + i.id for i in iter_sections(mixed_morph, neurite_filter=is_type(NeuriteType.all)) + ] == list(range(19)) + assert [ + i.id for i in iter_sections(mixed_morph, neurite_filter=is_type(NeuriteType.axon)) + ] == [] + assert [ + i.id + for i in iter_sections( + mixed_morph, + neurite_filter=is_type(NeuriteType.axon), + section_filter=is_type(NeuriteType.all), + ) + ] == [] + + # # Iterate with ibifurcation_point iterator + assert [ + i.id for i in iter_sections(mixed_morph, iterator_type=Section.ibifurcation_point) + ] == [0, 1, 5, 6, 9, 11, 14, 15] # fmt: skip + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + neurite_filter=is_type(NeuriteType.all), + ) + ] == [0, 1, 5, 6, 9, 11, 14, 15] + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + neurite_filter=is_type(NeuriteType.axon), + ) + ] == [] + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + neurite_filter=is_type(NeuriteType.axon), + section_filter=is_type(NeuriteType.all), + ) + ] == [] + + # Test heterogenous trees + mixed_morph.process_subtrees = True + # # Iterate with ipreorder iterator + assert [i.id for i in iter_sections(mixed_morph)] == list(range(19)) + assert [ + i.id for i in iter_sections(mixed_morph, neurite_filter=is_type(NeuriteType.all)) + ] == list(range(19)) + assert [ + i.id + for i in iter_sections( + mixed_morph, + neurite_filter=is_type(NeuriteType.axon), + section_filter=is_type(NeuriteType.all), + ) + ] == [5, 6, 7, 8, 9, 10, 11, 12, 13] + assert [ + i.id + for i in iter_sections( + mixed_morph, + neurite_filter=is_type(NeuriteType.axon), + section_filter=is_type(NeuriteType.axon), + ) + ] == [9, 10, 11, 12, 13] + assert [ + i.id + for i in iter_sections( + mixed_morph, + neurite_filter=is_type(NeuriteType.axon), + section_filter=is_type(NeuriteType.basal_dendrite), + ) + ] == [5, 6, 7, 8] + + # # Iterate with ibifurcation_point iterator + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + ) + ] == [0, 1, 5, 6, 9, 11, 14, 15] + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + neurite_filter=is_type(NeuriteType.all), + ) + ] == [0, 1, 5, 6, 9, 11, 14, 15] + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + neurite_filter=is_type(NeuriteType.axon), + ) + ] == [5, 6, 9, 11] + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + neurite_filter=is_type(NeuriteType.axon), + section_filter=is_type(NeuriteType.all), + ) + ] == [5, 6, 9, 11] + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + neurite_filter=is_type(NeuriteType.axon), + section_filter=is_type(NeuriteType.axon), + ) + ] == [9, 11] + assert [ + i.id + for i in iter_sections( + mixed_morph, + iterator_type=Section.ibifurcation_point, + neurite_filter=is_type(NeuriteType.axon), + section_filter=is_type(NeuriteType.basal_dendrite), + ) + ] == [5, 6] + + +def test_is_homogeneous_point(mixed_morph): + heterogeneous_neurite = mixed_morph.neurites[1] + + sections = heterogeneous_neurite.sections + + # first section has one axon and one basal children + assert not sections[0].is_homogeneous_point() + + # second section is pure basal + assert sections[1].is_homogeneous_point() + + +def test_subtypes(mixed_morph): + homogeneous_neurite = mixed_morph.neurites[0] + heterogeneous_neurite = mixed_morph.neurites[1] + + assert homogeneous_neurite.subtree_types == [NeuriteType.basal_dendrite, NeuriteType.axon] + assert homogeneous_neurite.type == NeuriteType.basal_dendrite + + assert heterogeneous_neurite.subtree_types == [NeuriteType.basal_dendrite, NeuriteType.axon] + assert heterogeneous_neurite.type == NeuriteType.axon_carrying_dendrite + + +def test_number_of_sections(mixed_morph, population): + # Count number of sections with process_subtrees == False + # # Population + # # In this case only the neurite_type argument is considered but the section_type argument is ignored. + assert get('number_of_sections', population) == [19, 19] + assert get('number_of_sections', population, neurite_type=NeuriteType.all) == [19, 19] + assert get('number_of_sections', population, neurite_type=NeuriteType.axon) == [0, 0] + assert get('number_of_sections', population, neurite_type=NeuriteType.apical_dendrite) == [5, 5] + assert get('number_of_sections', population, neurite_type=NeuriteType.basal_dendrite) == [ + 14, + 14, + ] + with pytest.raises(NeuroMError, match='Can not apply "section_type" arg to a Population'): + get('number_of_sections', population, section_type=NeuriteType.soma) + + # # Morphology + # # In this case only the neurite_type argument is considered but the section_type argument is ignored. + assert get('number_of_sections', mixed_morph) == 19 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.all) == 19 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.axon) == 0 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.apical_dendrite) == 5 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.basal_dendrite) == 14 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.soma) == 0 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.undefined) == 0 + with pytest.raises(NeuroMError, match='Can not apply "section_type" arg to a Morphology'): + get('number_of_sections', mixed_morph, section_type=NeuriteType.soma) + + # # List of neurites + # # In this case the process_subtrees flag is ignored. So only the section with the proper + # # section type are considered but all bifurcation points are kept, even heterogeneous ones. + assert get('number_of_sections', mixed_morph.neurites) == [5, 9, 5] + assert get('number_of_sections', mixed_morph.neurites, section_type=NeuriteType.all) == [ + 5, + 9, + 5, + ] + assert get('number_of_sections', mixed_morph.neurites, section_type=NeuriteType.axon) == [ + 0, + 5, + 0, + ] + assert get( + 'number_of_sections', mixed_morph.neurites, section_type=NeuriteType.apical_dendrite + ) == [0, 0, 5] + assert get( + 'number_of_sections', mixed_morph.neurites, section_type=NeuriteType.basal_dendrite + ) == [5, 4, 0] + with pytest.raises( + NeuroMError, match='Can not apply "neurite_type" arg to a Neurite with a neurite feature' + ): + assert get('number_of_sections', mixed_morph.neurites, neurite_type=NeuriteType.all) + + # # One neurite (in this case the process_subtrees flag is ignored) + assert get('number_of_sections', mixed_morph.neurites[1]) == 9 + assert get('number_of_sections', mixed_morph.neurites[1], section_type=NeuriteType.all) == 9 + assert get('number_of_sections', mixed_morph.neurites[1], section_type=NeuriteType.axon) == 5 + assert ( + get('number_of_sections', mixed_morph.neurites[1], section_type=NeuriteType.apical_dendrite) + == 0 + ) + assert ( + get('number_of_sections', mixed_morph.neurites[1], section_type=NeuriteType.basal_dendrite) + == 4 + ) + with pytest.raises( + NeuroMError, match='Can not apply "neurite_type" arg to a Neurite with a neurite feature' + ): + assert get('number_of_sections', mixed_morph.neurites[1], neurite_type=NeuriteType.all) + + # Count number of sections with process_subtrees == True + population.process_subtrees = True + for i in population: + assert i.process_subtrees is True + mixed_morph.process_subtrees = True + assert mixed_morph.process_subtrees is True + + # # Population + # # In this case only the neurite_type argument is considered but the section_type argument is ignored. + assert get('number_of_sections', population) == [19, 19] + assert get('number_of_sections', population, neurite_type=NeuriteType.all) == [19, 19] + assert get('number_of_sections', population, neurite_type=NeuriteType.axon) == [5, 5] + assert get('number_of_sections', population, neurite_type=NeuriteType.apical_dendrite) == [5, 5] + assert get('number_of_sections', population, neurite_type=NeuriteType.basal_dendrite) == [ + 9, + 9, + ] # This is weird: we skip bifurcation points but we still count 2 sections around heterogeneous bifurcation points + with pytest.raises(NeuroMError, match='Can not apply "section_type" arg to a Population'): + get('number_of_sections', population, section_type=NeuriteType.soma) + + # # Morphology + # # In this case only the neurite_type argument is considered but the section_type argument is ignored. + assert get('number_of_sections', mixed_morph) == 19 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.all) == 19 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.axon) == 5 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.apical_dendrite) == 5 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.basal_dendrite) == 9 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.soma) == 0 + assert get('number_of_sections', mixed_morph, neurite_type=NeuriteType.undefined) == 0 + with pytest.raises(NeuroMError, match='Can not apply "section_type" arg to a Morphology'): + get('number_of_sections', mixed_morph, section_type=NeuriteType.soma) + + # # List of neurites + # # In this case the process_subtrees flag is ignored. So only the section with the proper + # # section type are considered but all bifurcation points are kept, even heterogeneous ones. + assert get('number_of_sections', mixed_morph.neurites) == [5, 9, 5] + assert get('number_of_sections', mixed_morph.neurites, section_type=NeuriteType.all) == [ + 5, + 9, + 5, + ] + assert get('number_of_sections', mixed_morph.neurites, section_type=NeuriteType.axon) == [ + 0, + 5, + 0, + ] + assert get( + 'number_of_sections', mixed_morph.neurites, section_type=NeuriteType.apical_dendrite + ) == [0, 0, 5] + assert get( + 'number_of_sections', mixed_morph.neurites, section_type=NeuriteType.basal_dendrite + ) == [5, 4, 0] + with pytest.raises( + NeuroMError, match='Can not apply "neurite_type" arg to a Neurite with a neurite feature' + ): + assert get('number_of_sections', mixed_morph.neurites, neurite_type=NeuriteType.all) + + # # One neurite (in this case the process_subtrees flag is ignored) + assert get('number_of_sections', mixed_morph.neurites[1]) == 9 + assert get('number_of_sections', mixed_morph.neurites[1], section_type=NeuriteType.all) == 9 + assert get('number_of_sections', mixed_morph.neurites[1], section_type=NeuriteType.axon) == 5 + assert ( + get('number_of_sections', mixed_morph.neurites[1], section_type=NeuriteType.apical_dendrite) + == 0 + ) + assert ( + get('number_of_sections', mixed_morph.neurites[1], section_type=NeuriteType.basal_dendrite) + == 4 + ) + with pytest.raises( + NeuroMError, match='Can not apply "neurite_type" arg to a Neurite with a neurite feature' + ): + assert get('number_of_sections', mixed_morph.neurites[1], neurite_type=NeuriteType.all) + + +def test_multine_neurite_types(mixed_morph): + for process_subtrees in [False, True]: + mixed_morph.process_subtrees = process_subtrees + res = get( + "number_of_sections", + mixed_morph, + neurite_type=[NeuriteType.apical_dendrite, NeuriteType.basal_dendrite], + ) + res1 = get("number_of_sections", mixed_morph, neurite_type=NeuriteType.apical_dendrite) + res2 = get("number_of_sections", mixed_morph, neurite_type=NeuriteType.basal_dendrite) + + assert res == res1 + res2, (res, res1, res2) + + res = get( + "number_of_sections", + mixed_morph, + neurite_type=[NeuriteType.apical_dendrite, NeuriteType.axon_carrying_dendrite], + ) + res1 = get("number_of_sections", mixed_morph, neurite_type=NeuriteType.apical_dendrite) + res2 = get("number_of_sections", mixed_morph, neurite_type=NeuriteType.basal_dendrite) + res3 = get("number_of_sections", mixed_morph, neurite_type=NeuriteType.axon) + + assert res == res1 + res2 + res3, (res, res1, res2, res3) + + +def test_iter_neurites__heterogeneous(mixed_morph): + mixed_morph.process_subtrees = True + + neurites = list(iter_neurites(mixed_morph)) + + assert len(neurites) == 3 + assert neurites[0].type == NeuriteType.basal_dendrite + assert neurites[1].type == NeuriteType.basal_dendrite + assert neurites[2].type == NeuriteType.apical_dendrite + + +def test_iter_neurites__homogeneous(mixed_morph): + mixed_morph.process_subtrees = False + + neurites = list(iter_neurites(mixed_morph)) + + assert len(neurites) == 3 + assert neurites[0].type == NeuriteType.basal_dendrite + assert neurites[1].type == NeuriteType.axon_carrying_dendrite + assert neurites[2].type == NeuriteType.apical_dendrite + + +def test_core_iter_sections__heterogeneous(mixed_morph): + mixed_morph.process_subtrees = True + + def assert_sections(neurite, section_type, expected_section_ids): + it = neurom.core.morphology.iter_sections(neurite, section_filter=is_type(section_type)) + assert [s.id for s in it] == expected_section_ids + + basal, axon_on_basal, apical = mixed_morph.neurites + + assert_sections(basal, NeuriteType.all, [0, 1, 2, 3, 4]) + assert_sections(basal, NeuriteType.basal_dendrite, [0, 1, 2, 3, 4]) + assert_sections(basal, NeuriteType.axon, []) + + assert_sections(axon_on_basal, NeuriteType.all, [5, 6, 7, 8, 9, 10, 11, 12, 13]) + assert_sections(axon_on_basal, NeuriteType.basal_dendrite, [5, 6, 7, 8]) + assert_sections(axon_on_basal, NeuriteType.axon, [9, 10, 11, 12, 13]) + assert_sections( + axon_on_basal, + (NeuriteType.axon, NeuriteType.basal_dendrite), + [5, 6, 7, 8, 9, 10, 11, 12, 13], + ) + + assert_sections(apical, NeuriteType.all, [14, 15, 16, 17, 18]) + assert_sections(apical, NeuriteType.apical_dendrite, [14, 15, 16, 17, 18]) + + +def test_features_neurite_map_sections__heterogeneous(mixed_morph): + mixed_morph.process_subtrees = True + + def assert_sections(neurite, section_type, iterator_type, expected_section_ids): + function = lambda section: section.id + section_ids = neurom.features.neurite._map_sections( + function, neurite, iterator_type=iterator_type, section_type=section_type + ) + assert section_ids == expected_section_ids + + basal, axon_on_basal, apical = mixed_morph.neurites + + # homogeneous tree, no difference between all and basal_dendrite types. + assert_sections( + basal, + NeuriteType.all, + neurom.core.morphology.Section.ibifurcation_point, + [0, 1], + ) + assert_sections( + basal, + NeuriteType.basal_dendrite, + neurom.core.morphology.Section.ibifurcation_point, + [0, 1], + ) + # heterogeneous tree, forks cannot be heterogeneous if a type other than all is specified + # Section with id 5 is the transition section, which has a basal and axon children sections + assert_sections( + axon_on_basal, + NeuriteType.all, + neurom.core.morphology.Section.ibifurcation_point, + [5, 6, 9, 11], + ) + assert_sections( + axon_on_basal, + NeuriteType.basal_dendrite, + neurom.core.morphology.Section.ibifurcation_point, + [6], + ) + assert_sections( + axon_on_basal, + NeuriteType.axon, + neurom.core.morphology.Section.ibifurcation_point, + [9, 11], + ) + # homogeneous tree, no difference between all and basal_dendrite types. + assert_sections( + apical, + NeuriteType.all, + neurom.core.morphology.Section.ibifurcation_point, + [14, 15], + ) + assert_sections( + apical, + NeuriteType.apical_dendrite, + neurom.core.morphology.Section.ibifurcation_point, + [14, 15], + ) + # with composite type the whole heterogeneous tree is kept + assert_sections( + axon_on_basal, + NeuriteType.axon_carrying_dendrite, + neurom.core.morphology.Section.ibifurcation_point, + [5, 6, 9, 11], + ) + + +@pytest.mark.parametrize( + "iterator_type, neurite_type, expected_count", + [ + (Section.ipreorder, NeuriteType.all, 9), + (Section.ipreorder, NeuriteType.axon, 5), + (Section.ipreorder, NeuriteType.basal_dendrite, 4), + (Section.ipreorder, NeuriteType.axon_carrying_dendrite, 9), + (Section.ibifurcation_point, NeuriteType.all, 4), + (Section.ibifurcation_point, NeuriteType.basal_dendrite, 1), + (Section.ibifurcation_point, NeuriteType.axon, 2), + (Section.ibifurcation_point, NeuriteType.axon_carrying_dendrite, 4), + ], +) +def test_features_neurite_map_sections(mixed_morph, iterator_type, neurite_type, expected_count): + mixed_morph.process_subtrees = False + acd = mixed_morph.neurites[1] + + res = sum( + neurom.features.neurite._map_sections( + fun=lambda s: 1, + neurite=acd, + iterator_type=iterator_type, + section_type=neurite_type, + ) + ) + + assert res == expected_count + + +def _assert_stats_equal(actual_dict, expected_dict): + assert actual_dict.keys() == expected_dict.keys() + for key, value in actual_dict.items(): + expected_value = expected_dict[key] + if value is None or expected_value is None: + assert expected_value is value + else: + npt.assert_almost_equal(value, expected_value, decimal=3, err_msg=f"\nKey: {key}") + + +@pytest.fixture +def stats_cfg(): + return { + 'neurite': { + 'max_radial_distance': ['mean'], + 'number_of_sections': ['min'], + 'number_of_bifurcations': ['max'], + 'number_of_leaves': ['median'], + 'total_length': ['min'], + 'total_area': ['max'], + 'total_volume': ['median'], + 'section_lengths': ['mean'], + 'section_term_lengths': ['mean'], + 'section_bif_lengths': ['mean'], + 'section_branch_orders': ['mean'], + 'section_bif_branch_orders': ['mean'], + 'section_term_branch_orders': ['mean'], + 'section_path_distances': ['mean'], + 'section_taper_rates': ['median'], + 'local_bifurcation_angles': ['mean'], + 'remote_bifurcation_angles': ['mean'], + 'partition_asymmetry': ['mean'], + 'partition_asymmetry_length': ['mean'], + 'sibling_ratios': ['mean'], + 'diameter_power_relations': ['median'], + 'section_radial_distances': ['mean'], + 'section_term_radial_distances': ['mean'], + 'section_bif_radial_distances': ['mean'], + 'terminal_path_lengths': ['mean'], + 'section_volumes': ['min'], + 'section_areas': ['mean'], + 'section_tortuosity': ['mean'], + 'section_strahler_orders': ['min'], + }, + 'morphology': { + 'soma_surface_area': ['mean'], + 'soma_radius': ['max'], + 'max_radial_distance': ['mean'], + 'number_of_sections_per_neurite': ['median'], + 'total_length_per_neurite': ['mean'], + 'total_area_per_neurite': ['mean'], + 'total_volume_per_neurite': ['mean'], + 'number_of_neurites': ['median'], + }, + 'neurite_type': ['AXON', 'BASAL_DENDRITE', 'APICAL_DENDRITE'], + } + + +def test_mixed__extract_stats__homogeneous(stats_cfg, mixed_morph): + mixed_morph.process_subtrees = False + res = neurom.apps.morph_stats.extract_stats(mixed_morph, stats_cfg) + + expected = { + 'max_number_of_bifurcations': 0, + 'max_total_area': 0, + 'mean_local_bifurcation_angles': None, + 'mean_max_radial_distance': 0.0, + 'mean_partition_asymmetry': None, + 'mean_partition_asymmetry_length': None, + 'mean_remote_bifurcation_angles': None, + 'mean_section_areas': None, + 'mean_section_bif_branch_orders': None, + 'mean_section_bif_lengths': None, + 'mean_section_bif_radial_distances': None, + 'mean_section_branch_orders': None, + 'mean_section_lengths': None, + 'mean_section_path_distances': None, + 'mean_section_radial_distances': None, + 'mean_section_term_branch_orders': None, + 'mean_section_term_lengths': None, + 'mean_section_term_radial_distances': None, + 'mean_section_tortuosity': None, + 'mean_sibling_ratios': None, + 'mean_terminal_path_lengths': None, + 'median_diameter_power_relations': None, + 'median_number_of_leaves': 0, + 'median_section_taper_rates': None, + 'median_total_volume': 0, + 'min_number_of_sections': 0, + 'min_section_strahler_orders': None, + 'min_section_volumes': None, + 'min_total_length': 0, + } + + _assert_stats_equal(res["axon"], expected) + + res_df = neurom.apps.morph_stats.extract_dataframe(mixed_morph, stats_cfg) + + # get axon column and tranform it to look like the expected values above + values = res_df.loc[pd.IndexSlice[:, "axon"]].iloc[0, :].to_dict() + _assert_stats_equal(values, expected) + + +def test_mixed__extract_stats__heterogeneous(stats_cfg, mixed_morph): + mixed_morph.process_subtrees = True + res = neurom.apps.morph_stats.extract_stats(mixed_morph, stats_cfg) + + expected = { + 'max_number_of_bifurcations': 2, + 'max_total_area': 3.4018507611950346, + 'mean_local_bifurcation_angles': 2.356194490192345, + 'mean_max_radial_distance': 4.472136, + 'mean_partition_asymmetry': 0.25, + 'mean_partition_asymmetry_length': 0.1846990320847273, + 'mean_remote_bifurcation_angles': 2.356194490192345, + 'mean_section_areas': 0.6803701522390069, + 'mean_section_bif_branch_orders': 1.5, + 'mean_section_bif_lengths': 1.2071068, + 'mean_section_bif_radial_distances': 3.9240959, + 'mean_section_branch_orders': 2.2, + 'mean_section_lengths': 1.0828427, + 'mean_section_path_distances': 4.028427076339722, + 'mean_section_radial_distances': 4.207625, + 'mean_section_term_branch_orders': 2.6666666666666665, + 'mean_section_term_lengths': 1.0, + 'mean_section_term_radial_distances': 4.396645, + 'mean_section_tortuosity': 1.0, + 'mean_sibling_ratios': 1.0, + 'mean_terminal_path_lengths': 4.495093743006389, + 'median_diameter_power_relations': 2.0, + 'median_number_of_leaves': 3, + 'median_section_taper_rates': 8.6268466e-17, + 'median_total_volume': 0.17009254152367845, + 'min_number_of_sections': 5, + 'min_section_strahler_orders': 1, + 'min_section_volumes': 0.03141592778425469, + 'min_total_length': 5.414213538169861, + } + + _assert_stats_equal(res["axon"], expected) + + res_df = neurom.apps.morph_stats.extract_dataframe(mixed_morph, stats_cfg) + + # get axon column and tranform it to look like the expected values above + values = res_df.loc[pd.IndexSlice[:, "axon"]].iloc[0, :].to_dict() + _assert_stats_equal(values, expected) + + +def _assert_feature_equal(values, expected_values, per_neurite=False): + def innermost_value(iterable): + while isinstance(iterable, collections.abc.Iterable): + try: + iterable = iterable[0] + except IndexError: + # empty list + return None + return iterable + + assert_equal = lambda a, b: npt.assert_equal( + a, b, err_msg=f"ACTUAL: {a}\nDESIRED: {b}", verbose=False + ) + + def check(values, expected_values): + # handle empty lists because allclose always passes in that case. + # See: https://github.com/numpy/numpy/issues/11071 + if isinstance(values, collections.abc.Iterable): + if isinstance(expected_values, collections.abc.Iterable): + if isinstance(innermost_value(values), (float, np.floating)): + npt.assert_allclose(values, expected_values, atol=1e-5) + else: + assert_equal(values, expected_values) + else: + assert_equal(values, expected_values) + else: + if isinstance(expected_values, collections.abc.Iterable): + assert_equal(values, expected_values) + else: + if isinstance(values, (float, np.floating)): + npt.assert_allclose(values, expected_values, atol=1e-5) + else: + assert_equal(values, expected_values) + + if per_neurite: + assert len(values) == len(expected_values) + for neurite_values, expected_neurite_values in zip(values, expected_values): + check(neurite_values, expected_neurite_values) + else: + check(values, expected_values) + + +def _dispatch_features(features, mode=None): + for feature_name, configurations in features.items(): + for cfg in configurations: + kwargs = cfg["kwargs"] if "kwargs" in cfg else {} + + if mode == "with-subtrees": + expected = cfg["expected_with_subtrees"] + elif mode == "wout-subtrees": + expected = cfg["expected_wout_subtrees"] + else: + expected = cfg["expected"] + + yield feature_name, kwargs, expected + + +def _population_features(mode): + features = json.loads(Path(DATA_DIR / "expected_population_features.json").read_bytes()) + + features_not_tested = list(set(_POPULATION_FEATURES) - set(features.keys())) + + assert not features_not_tested, ( + "The following morphology tests need to be included in the tests:\n\n" + + "\n".join(sorted(features_not_tested)) + + "\n" + ) + + return _dispatch_features(features, mode) + + +def _cast_types(mapping): + mapping = deepcopy(mapping) + for name, value in mapping.items(): + if name in {"neurite_type", "source_neurite_type", "target_neurite_type"}: + mapping[name] = NeuriteType(value) + elif name in {"section_type"}: + mapping[name] = SectionType(value) + return mapping + + +@pytest.mark.parametrize( + "feature_name, kwargs, expected", _population_features(mode="wout-subtrees") +) +@pytest.mark.filterwarnings('ignore::UserWarning') +def test_population__population_features_wout_subtrees(feature_name, kwargs, expected, population): + population.process_subtrees = False + values = get(feature_name, population, **_cast_types(kwargs)) + _assert_feature_equal(values, expected) + + +@pytest.mark.parametrize( + "feature_name, kwargs, expected", _population_features(mode="with-subtrees") +) +@pytest.mark.filterwarnings('ignore::UserWarning') +def test_population__population_features_with_subtrees(feature_name, kwargs, expected, population): + population.process_subtrees = True + values = get(feature_name, population, **_cast_types(kwargs)) + _assert_feature_equal(values, expected) + + +def _morphology_features(mode): + features = json.loads(Path(DATA_DIR / "expected_morphology_features.json").read_bytes()) + + features_not_tested = (set(_MORPHOLOGY_FEATURES) | set(_NEURITE_FEATURES)) - set( + features.keys() + ) + + assert not features_not_tested, ( + "The following morphology tests need to be included in the mixed morphology tests:\n" + f"{features_not_tested}" + ) + + return _dispatch_features(features, mode) + + +@pytest.mark.parametrize( + "feature_name, kwargs, expected", _morphology_features(mode="wout-subtrees") +) +@pytest.mark.filterwarnings('ignore::UserWarning') +def test_morphology__morphology_features_wout_subtrees(feature_name, kwargs, expected, mixed_morph): + mixed_morph.process_subtrees = False + values = get(feature_name, mixed_morph, **_cast_types(kwargs)) + _assert_feature_equal(values, expected) + + +@pytest.mark.parametrize( + "feature_name, kwargs, expected", _morphology_features(mode="with-subtrees") +) +@pytest.mark.filterwarnings('ignore::UserWarning') +def test_morphology__morphology_features_with_subtrees(feature_name, kwargs, expected, mixed_morph): + mixed_morph.process_subtrees = True + values = get(feature_name, mixed_morph, **_cast_types(kwargs)) + _assert_feature_equal(values, expected) + + +def _neurite_features(): + features = json.loads(Path(DATA_DIR / "expected_neurite_features.json").read_bytes()) + + # features that exist in both the neurite and morphology level, which indicates a different + # implementation in each level + features_not_tested = list( + (set(_NEURITE_FEATURES) & set(_MORPHOLOGY_FEATURES)) - features.keys() + ) + + assert not features_not_tested, ( + "The following morphology tests need to be included in the mixed neurite tests:\n\n" + + "\n".join(sorted(features_not_tested)) + + "\n" + ) + + return _dispatch_features(features) + + +@pytest.mark.parametrize("feature_name, kwargs, expected", _neurite_features()) +def test_morphology__neurite_features(feature_name, kwargs, expected, mixed_morph): + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + values = get(feature_name, mixed_morph.neurites, **_cast_types(kwargs)) + _assert_feature_equal(values, expected, per_neurite=True) + + +def test_sholl_crossings(mixed_morph): + mixed_morph.process_subtrees = True + center = mixed_morph.soma.center + radii = [] + assert list(get("sholl_crossings", mixed_morph, center=center, radii=radii)) == [] + assert list(get("sholl_crossings", mixed_morph, radii=radii)) == [] + assert list(get("sholl_crossings", mixed_morph)) == [0] + + radii = [1.0] + assert list(get("sholl_crossings", mixed_morph, center=center, radii=radii)) == [3] + + radii = [1.0, 4.0] + assert list(get("sholl_crossings", mixed_morph, center=center, radii=radii)) == [3, 3] + + radii = [1.0, 4.0, 5.0] + assert list(get("sholl_crossings", mixed_morph, center=center, radii=radii)) == [3, 3, 0] + + radii = [1.0, 4.0, 5.0, 10] + assert list( + get( + "sholl_crossings", mixed_morph, neurite_type=NeuriteType.all, center=center, radii=radii + ) + ) == [3, 3, 0, 0] + assert list( + get( + "sholl_crossings", + mixed_morph, + neurite_type=NeuriteType.basal_dendrite, + center=center, + radii=radii, + ) + ) == [2, 1, 0, 0] + assert list( + get( + "sholl_crossings", + mixed_morph, + neurite_type=NeuriteType.apical_dendrite, + center=center, + radii=radii, + ) + ) == [1, 0, 0, 0] + assert list( + get( + "sholl_crossings", + mixed_morph, + neurite_type=NeuriteType.axon, + center=center, + radii=radii, + ) + ) == [0, 2, 0, 0] + + +def test_sholl_frequency(mixed_morph): + mixed_morph.process_subtrees = True + assert list(get("sholl_frequency", mixed_morph)) == [0] + assert list(get("sholl_frequency", mixed_morph, step_size=3)) == [0, 2] + assert list(get("sholl_frequency", mixed_morph, bins=[1, 3, 5])) == [3, 8, 0] + + assert list(get("sholl_frequency", mixed_morph, neurite_type=NeuriteType.basal_dendrite)) == [0] + assert list( + get("sholl_frequency", mixed_morph, neurite_type=NeuriteType.basal_dendrite, step_size=3) + ) == [0, 1] + assert list( + get("sholl_frequency", mixed_morph, neurite_type=NeuriteType.basal_dendrite, bins=[1, 3, 5]) + ) == [2, 4, 0] + + assert list(get("sholl_frequency", mixed_morph, neurite_type=NeuriteType.axon)) == [0] + assert list( + get("sholl_frequency", mixed_morph, neurite_type=NeuriteType.axon, step_size=3) + ) == [0, 1] + assert list( + get("sholl_frequency", mixed_morph, neurite_type=NeuriteType.axon, bins=[1, 3, 5]) + ) == [0, 1, 0] + + +def test_sholl_frequency_pop(mixed_morph): + pop = Population([mixed_morph, mixed_morph]) + pop.process_subtrees = True + assert list(get("sholl_frequency", pop)) == [0] + assert list(get("sholl_frequency", pop, step_size=3)) == [0, 4] + assert list(get("sholl_frequency", pop, bins=[1, 3, 5])) == [6, 16, 0] + + assert list(get("sholl_frequency", pop, neurite_type=NeuriteType.basal_dendrite)) == [0] + assert list( + get("sholl_frequency", pop, neurite_type=NeuriteType.basal_dendrite, step_size=3) + ) == [0, 2] + assert list( + get("sholl_frequency", pop, neurite_type=NeuriteType.basal_dendrite, bins=[1, 3, 5]) + ) == [4, 8, 0] + + assert list(get("sholl_frequency", pop, neurite_type=NeuriteType.axon)) == [0] + assert list(get("sholl_frequency", pop, neurite_type=NeuriteType.axon, step_size=3)) == [0, 2] + assert list(get("sholl_frequency", pop, neurite_type=NeuriteType.axon, bins=[1, 3, 5])) == [ + 0, + 2, + 0, + ] diff --git a/tests/test_morphmath.py b/tests/test_morphmath.py index 43bb4dc8d..198b8aa01 100644 --- a/tests/test_morphmath.py +++ b/tests/test_morphmath.py @@ -47,163 +47,164 @@ def test_vector(): def test_linear_interpolate(): - p0 = np.array([-1., -1., -1.]) - p1 = np.array([1., 1., 1.]) + p0 = np.array([-1.0, -1.0, -1.0]) + p1 = np.array([1.0, 1.0, 1.0]) res = mm.linear_interpolate(p0, p1, 0.0) - assert np.allclose(res, (-1., -1., -1.)) + assert np.allclose(res, (-1.0, -1.0, -1.0)) res = mm.linear_interpolate(p0, p1, 0.25) assert np.allclose(res, (-0.5, -0.5, -0.5)) res = mm.linear_interpolate(p0, p1, 0.5) - assert np.allclose(res, (0., 0., 0.)) + assert np.allclose(res, (0.0, 0.0, 0.0)) res = mm.linear_interpolate(p0, p1, 0.75) assert np.allclose(res, (0.5, 0.5, 0.5)) res = mm.linear_interpolate(p0, p1, 1.0) - assert np.allclose(res, (1., 1., 1.)) + assert np.allclose(res, (1.0, 1.0, 1.0)) def test_interpolate_radius_r1_g_r2(): - res = mm.interpolate_radius(2., 1., 0.1) + res = mm.interpolate_radius(2.0, 1.0, 0.1) assert res == 1.9 def test_interpolate_radius_r2_g_r1(): - res = mm.interpolate_radius(1., 2., 0.2) + res = mm.interpolate_radius(1.0, 2.0, 0.2) assert res == 1.2 def test_interpolate_radius_extreme_cases(): - res = mm.interpolate_radius(1., 1., 0.2) - assert res == 1. - res = mm.interpolate_radius(0., 2., 0.3) - assert res == 2. * 0.3 - res = mm.interpolate_radius(3., 0., 0.15) - assert res == 3. * (1. - 0.15) + res = mm.interpolate_radius(1.0, 1.0, 0.2) + assert res == 1.0 + res = mm.interpolate_radius(0.0, 2.0, 0.3) + assert res == 2.0 * 0.3 + res = mm.interpolate_radius(3.0, 0.0, 0.15) + assert res == 3.0 * (1.0 - 0.15) def test_path_fraction_point_two_points(): - - points = [np.array([-1., -1., -1.]), np.array([1., 1., 1.])] + points = [np.array([-1.0, -1.0, -1.0]), np.array([1.0, 1.0, 1.0])] res = mm.path_fraction_point(points, 0.0) - assert np.allclose(res, (-1., -1., -1.)) + assert np.allclose(res, (-1.0, -1.0, -1.0)) res = mm.path_fraction_point(points, 0.25) assert np.allclose(res, (-0.5, -0.5, -0.5)) res = mm.path_fraction_point(points, 1.0) - assert np.allclose(res, (1., 1., 1.)) + assert np.allclose(res, (1.0, 1.0, 1.0)) def test_path_fraction_three_symmetric_points(): - - points = [np.array((1., 0., 0.)), - np.array((0., 0., 0.)), - np.array((0., 0., 1.))] + points = [np.array((1.0, 0.0, 0.0)), np.array((0.0, 0.0, 0.0)), np.array((0.0, 0.0, 1.0))] res = mm.path_fraction_point(points, 0.0) - assert np.allclose(res, (1., 0., 0.)) + assert np.allclose(res, (1.0, 0.0, 0.0)) res = mm.path_fraction_point(points, 0.25) - assert np.allclose(res, (0.5, 0., 0.)) + assert np.allclose(res, (0.5, 0.0, 0.0)) res = mm.path_fraction_point(points, 0.5) - assert np.allclose(res, (0., 0., 0.)) + assert np.allclose(res, (0.0, 0.0, 0.0)) res = mm.path_fraction_point(points, 0.75) - assert np.allclose(res, (0., 0., 0.5)) + assert np.allclose(res, (0.0, 0.0, 0.5)) res = mm.path_fraction_point(points, 1.0) - assert np.allclose(res, (0., 0., 1.)) + assert np.allclose(res, (0.0, 0.0, 1.0)) def test_path_fraction_many_points(): - def x(theta): return np.cos(theta) - def y(theta): return np.sin(theta) - points = [np.array((x(theta), y(theta), 2.)) - for theta in (0., np.pi/4., np.pi/2., 3.*np.pi/4., np.pi)] + def x(theta): + return np.cos(theta) + + def y(theta): + return np.sin(theta) + + points = [ + np.array((x(theta), y(theta), 2.0)) + for theta in (0.0, np.pi / 4.0, np.pi / 2.0, 3.0 * np.pi / 4.0, np.pi) + ] - res = mm.path_fraction_point(points, 0.) - assert np.allclose(res, (x(0.), y(0.), 2.)) + res = mm.path_fraction_point(points, 0.0) + assert np.allclose(res, (x(0.0), y(0.0), 2.0)) res = mm.path_fraction_point(points, 0.25) - assert np.allclose(res, (x(np.pi / 4.), y(np.pi / 4.), 2.)) + assert np.allclose(res, (x(np.pi / 4.0), y(np.pi / 4.0), 2.0)) res = mm.path_fraction_point(points, 0.5) - assert np.allclose(res, (x(np.pi / 2.), y(np.pi / 2.), 2.)) + assert np.allclose(res, (x(np.pi / 2.0), y(np.pi / 2.0), 2.0)) res = mm.path_fraction_point(points, 0.75) - assert np.allclose(res, (x(3. * np.pi / 4.), y(3. * np.pi / 4.), 2.)) + assert np.allclose(res, (x(3.0 * np.pi / 4.0), y(3.0 * np.pi / 4.0), 2.0)) - res = mm.path_fraction_point(points, 1.) - assert np.allclose(res, (x(np.pi), y(np.pi), 2.)) + res = mm.path_fraction_point(points, 1.0) + assert np.allclose(res, (x(np.pi), y(np.pi), 2.0)) def test_scalar_projection(): - v1 = np.array([4., 1., 0.]) - v2 = np.array([2., 3., 0.]) + v1 = np.array([4.0, 1.0, 0.0]) + v2 = np.array([2.0, 3.0, 0.0]) res = mm.scalar_projection(v1, v2) assert np.isclose(res, 3.0508510792387602) def test_scalar_projection_collinear(): - v1 = np.array([1., 2., 0.]) - v2 = np.array([4., 8., 0.]) + v1 = np.array([1.0, 2.0, 0.0]) + v2 = np.array([4.0, 8.0, 0.0]) res = mm.scalar_projection(v1, v2) - assert np.allclose(res, 20. / np.linalg.norm(v2)) + assert np.allclose(res, 20.0 / np.linalg.norm(v2)) def test_scalar_projection_perpendicular(): - v1 = np.array([3., 0., 0.]) - v2 = np.array([0., 1.5, 0.]) + v1 = np.array([3.0, 0.0, 0.0]) + v2 = np.array([0.0, 1.5, 0.0]) res = mm.scalar_projection(v1, v2) - assert np.allclose(res, 0.) + assert np.allclose(res, 0.0) def test_vector_projection(): - v1 = np.array([4., 1., 0.]) - v2 = np.array([2., 3., 0.]) + v1 = np.array([4.0, 1.0, 0.0]) + v2 = np.array([2.0, 3.0, 0.0]) res = mm.vector_projection(v1, v2) - assert np.allclose(res, (1.6923076923076923, 2.5384615384615383, 0.)) + assert np.allclose(res, (1.6923076923076923, 2.5384615384615383, 0.0)) def test_vector_projection_collinear(): - - v1 = np.array([1., 2., 3.]) - v2 = np.array([4., 8., 12.]) + v1 = np.array([1.0, 2.0, 3.0]) + v2 = np.array([4.0, 8.0, 12.0]) res = mm.vector_projection(v1, v2) assert np.allclose(res, v1) def test_vector_projection_perpendicular(): - - v1 = np.array([2., 0., 0.]) - v2 = np.array([0., 3., 0.]) + v1 = np.array([2.0, 0.0, 0.0]) + v2 = np.array([0.0, 3.0, 0.0]) res = mm.vector_projection(v1, v2) - assert np.allclose(res, (0., 0., 0.)) + assert np.allclose(res, (0.0, 0.0, 0.0)) def test_dist_point_line(): - # an easy one: - res = mm.dist_point_line(np.array([0., 0., 0.]), np.array([0., 1., 0.]), np.array([1., 0., 0.])) - assert np.isclose(res, np.sqrt(2) / 2.) + res = mm.dist_point_line( + np.array([0.0, 0.0, 0.0]), np.array([0.0, 1.0, 0.0]), np.array([1.0, 0.0, 0.0]) + ) + assert np.isclose(res, np.sqrt(2) / 2.0) # check the distance of the line 3x - 4y + 1 = 0 # with parametric form of (t, (4t - 1)/3) # two points that satisfy this equation: - l1 = np.array([0., 1. / 4., 0.]) - l2 = np.array([1., 1., 0.]) + l1 = np.array([0.0, 1.0 / 4.0, 0.0]) + l2 = np.array([1.0, 1.0, 0.0]) - p = np.array([2., 3., 0.]) + p = np.array([2.0, 3.0, 0.0]) res = mm.dist_point_line(p, l1, l2) - assert res == 1. + assert res == 1.0 def test_point_dist2(): @@ -337,7 +338,7 @@ def test_angle_between_vectors(): def soma_points(radius=5, number_points=20): - phi = uniform(0, 2*pi, number_points) + phi = uniform(0, 2 * pi, number_points) costheta = uniform(-1, 1, number_points) theta = np.arccos(costheta) x = radius * np.sin(theta) * np.cos(phi) @@ -355,7 +356,7 @@ def test_polygon_diameter(): assert dia == 2.0 surfpoint = soma_points() dia1 = mm.polygon_diameter(surfpoint) - assert fabs(dia1-10.0) < 0.1 + assert fabs(dia1 - 10.0) < 0.1 def test_average_points_dist(): @@ -448,8 +449,9 @@ def test_segment_z_coordinate(): def test_segment_radial_dist(): seg = ((11, 11, 11), (33, 33, 33)) - assert_almost_equal(mm.segment_radial_dist(seg, (0, 0, 0)), - mm.point_dist((0, 0, 0), (22, 22, 22))) + assert_almost_equal( + mm.segment_radial_dist(seg, (0, 0, 0)), mm.point_dist((0, 0, 0), (22, 22, 22)) + ) def test_taper_rate(): @@ -473,31 +475,29 @@ def test_segment_taper_rate(): def test_pca(): + p = np.array( + [[4.0, 2.0, 0.6], [4.2, 2.1, 0.59], [3.9, 2.0, 0.58], [4.3, 2.1, 0.62], [4.1, 2.2, 0.63]] + ) - p = np.array([[4., 2., 0.6], - [4.2, 2.1, 0.59], - [3.9, 2.0, 0.58], - [4.3, 2.1, 0.62], - [4.1, 2.2, 0.63]]) - - RES_COV = np.array([[0.025, 0.0075, 0.00175], - [0.0075, 0.0070, 0.00135], - [0.00175, 0.00135, 0.00043]]) + RES_COV = np.array( + [[0.025, 0.0075, 0.00175], [0.0075, 0.0070, 0.00135], [0.00175, 0.00135, 0.00043]] + ) - RES_EIGV = np.array([[0.93676841, 0.34958469, -0.0159843], - [0.34148069, -0.92313136, -0.1766902], - [0.0765238, -0.16005947, 0.98413672]]) + RES_EIGV = np.array( + [ + [0.93676841, 0.34958469, -0.0159843], + [0.34148069, -0.92313136, -0.1766902], + [0.0765238, -0.16005947, 0.98413672], + ] + ) RES_EIGS = np.array([0.0278769, 0.00439387, 0.0001592]) eigs, eigv = mm.pca(p) assert np.allclose(eigs, RES_EIGS) - assert (np.allclose(eigv[:, 0], RES_EIGV[:, 0]) - or np.allclose(eigv[:, 0], -1. * RES_EIGV[:, 0])) - assert (np.allclose(eigv[:, 1], RES_EIGV[:, 1]) - or np.allclose(eigv[:, 1], -1. * RES_EIGV[:, 1])) - assert (np.allclose(eigv[:, 2], RES_EIGV[:, 2]) - or np.allclose(eigv[:, 2], -1. * RES_EIGV[:, 2])) + assert np.allclose(eigv[:, 0], RES_EIGV[:, 0]) or np.allclose(eigv[:, 0], -1.0 * RES_EIGV[:, 0]) + assert np.allclose(eigv[:, 1], RES_EIGV[:, 1]) or np.allclose(eigv[:, 1], -1.0 * RES_EIGV[:, 1]) + assert np.allclose(eigv[:, 2], RES_EIGV[:, 2]) or np.allclose(eigv[:, 2], -1.0 * RES_EIGV[:, 2]) def test_sphere_area(): @@ -506,12 +506,14 @@ def test_sphere_area(): def test_interval_lengths(): - assert_array_almost_equal(mm.interval_lengths([[0, 0, 0], [1, 1, 0], [2, 11, 0]]), - [1.414214, 10.049876]) + assert_array_almost_equal( + mm.interval_lengths([[0, 0, 0], [1, 1, 0], [2, 11, 0]]), [1.414214, 10.049876] + ) - assert_array_almost_equal(mm.interval_lengths([[0, 0, 0], [1, 1, 0], [2, 11, 0]], - prepend_zero=True), - [0, 1.414214, 10.049876]) + assert_array_almost_equal( + mm.interval_lengths([[0, 0, 0], [1, 1, 0], [2, 11, 0]], prepend_zero=True), + [0, 1.414214, 10.049876], + ) def test_spherical_coordinates(): @@ -537,254 +539,198 @@ def test_spherical_coordinates(): def test_principal_direction_extent(): - # test with points on a circle with radius 0.5, and center at 0.0 - circle_points = np.array([ - [ 5.0e-01, 0.0e+00, 0.0e+00], - [ 4.7e-01, 1.6e-01, 0.0e+00], - [ 3.9e-01, 3.1e-01, 0.0e+00], - [ 2.7e-01, 4.2e-01, 0.0e+00], - [ 1.2e-01, 4.8e-01, 0.0e+00], - [-4.1e-02, 5.0e-01, 0.0e+00], - [-2.0e-01, 4.6e-01, 0.0e+00], - [-3.4e-01, 3.7e-01, 0.0e+00], - [-4.4e-01, 2.4e-01, 0.0e+00], - [-5.0e-01, 8.2e-02, 0.0e+00], - [-5.0e-01, -8.2e-02, 0.0e+00], - [-4.4e-01, -2.4e-01, 0.0e+00], - [-3.4e-01, -3.7e-01, 0.0e+00], - [-2.0e-01, -4.6e-01, 0.0e+00], - [-4.1e-02, -5.0e-01, 0.0e+00], - [ 1.2e-01, -4.8e-01, 0.0e+00], - [ 2.7e-01, -4.2e-01, 0.0e+00], - [ 3.9e-01, -3.1e-01, 0.0e+00], - [ 4.7e-01, -1.6e-01, 0.0e+00], - [ 5.0e-01, -1.2e-16, 0.0e+00] - ]) + circle_points = np.array( + [ + [5.0e-01, 0.0e00, 0.0e00], + [4.7e-01, 1.6e-01, 0.0e00], + [3.9e-01, 3.1e-01, 0.0e00], + [2.7e-01, 4.2e-01, 0.0e00], + [1.2e-01, 4.8e-01, 0.0e00], + [-4.1e-02, 5.0e-01, 0.0e00], + [-2.0e-01, 4.6e-01, 0.0e00], + [-3.4e-01, 3.7e-01, 0.0e00], + [-4.4e-01, 2.4e-01, 0.0e00], + [-5.0e-01, 8.2e-02, 0.0e00], + [-5.0e-01, -8.2e-02, 0.0e00], + [-4.4e-01, -2.4e-01, 0.0e00], + [-3.4e-01, -3.7e-01, 0.0e00], + [-2.0e-01, -4.6e-01, 0.0e00], + [-4.1e-02, -5.0e-01, 0.0e00], + [1.2e-01, -4.8e-01, 0.0e00], + [2.7e-01, -4.2e-01, 0.0e00], + [3.9e-01, -3.1e-01, 0.0e00], + [4.7e-01, -1.6e-01, 0.0e00], + [5.0e-01, -1.2e-16, 0.0e00], + ] + ) npt.assert_allclose( mm.principal_direction_extent(circle_points), - [1., 1., 0.], atol=1e-6, + [1.0, 1.0, 0.0], + atol=1e-6, ) # extent should be invariant to translations npt.assert_allclose( - mm.principal_direction_extent(circle_points + 100.), - [1., 1., 0.], atol=1e-6, + mm.principal_direction_extent(circle_points + 100.0), + [1.0, 1.0, 0.0], + atol=1e-6, ) npt.assert_allclose( - mm.principal_direction_extent(circle_points - 100.), - [1., 1., 0.], atol=1e-6, + mm.principal_direction_extent(circle_points - 100.0), + [1.0, 1.0, 0.0], + atol=1e-6, ) - cross_3D_points = np.array([ - [-5.2, 0.0, 0.0], - [ 4.8, 0.0, 0.0], - [ 0.0,-1.3, 0.0], - [ 0.0, 4.7, 0.0], - [ 0.0, 0.0,-11.2], - [ 0.0, 0.0, 0.8], - ]) + cross_3D_points = np.array( + [ + [-5.2, 0.0, 0.0], + [4.8, 0.0, 0.0], + [0.0, -1.3, 0.0], + [0.0, 4.7, 0.0], + [0.0, 0.0, -11.2], + [0.0, 0.0, 0.8], + ] + ) npt.assert_allclose( mm.principal_direction_extent(cross_3D_points), - [12.0, 10.0, 6.0], atol=0.1, + [12.0, 10.0, 6.0], + atol=0.1, ) def test_convex_hull_invalid(): - assert mm.convex_hull([]) is None - assert mm.convex_hull([[1., 0., 0.], [1., 0., 0.]]) is None + assert mm.convex_hull([[1.0, 0.0, 0.0], [1.0, 0.0, 0.0]]) is None def _shape_datasets(): - return { - "cross-3D": np.array([ - [-5.2, 0.0, 0.0], - [ 4.8, 0.0, 0.0], - [ 0.0,-1.3, 0.0], - [ 0.0, 4.7, 0.0], - [ 0.0, 0.0,-11.2], - [ 0.0, 0.0, 0.8], - ]), - "cross-2D": np.array([ - [ 0.0, 0.0], - [ 0.0, 0.0], - [-1.3, 0.0], - [ 4.7, 0.0], - [ 0.0,-11.2], - [ 0.0, 0.8], - ]), - "circle-2D": np.array([ - [ 5.0e-01, 0.0e+00], - [ 4.7e-01, 1.6e-01], - [ 3.9e-01, 3.1e-01], - [ 2.7e-01, 4.2e-01], - [ 1.2e-01, 4.8e-01], - [-4.1e-02, 5.0e-01], - [-2.0e-01, 4.6e-01], - [-3.4e-01, 3.7e-01], - [-4.4e-01, 2.4e-01], - [-5.0e-01, 8.2e-02], - [-5.0e-01, -8.2e-02], - [-4.4e-01, -2.4e-01], - [-3.4e-01, -3.7e-01], - [-2.0e-01, -4.6e-01], - [-4.1e-02, -5.0e-01], - [ 1.2e-01, -4.8e-01], - [ 2.7e-01, -4.2e-01], - [ 3.9e-01, -3.1e-01], - [ 4.7e-01, -1.6e-01], - [ 5.0e-01, -1.2e-16], - ]), - "square-2D": np.array([ - [ 0.0, 0.0 ], - [ 5.0, 0.0 ], - [10.0, 0.0 ], - [ 0.0, 5.0 ], - [ 0.0, 10.0], - [ 5.0, 10.0], - [10.0, 10.0], - [10.0, 5.0 ], - ]), - "rectangle-2D": np.array([ - [ 0.0, 0.0 ], - [ 5.0, 0.0 ], - [20.0, 0.0 ], - [ 0.0, 5.0 ], - [ 0.0, 10.0], - [ 5.0, 10.0], - [20.0, 10.0], - [20.0, 5.0 ], - ]), - "oval-2D": np.array([ - [ 5.00e-01, 0.00e+00], - [ 4.70e-01, 4.80e-01], - [ 3.90e-01, 9.30e-01], - [ 2.70e-01, 1.26e+00], - [ 1.20e-01, 1.44e+00], - [-4.10e-02, 1.50e+00], - [-2.00e-01, 1.38e+00], - [-3.40e-01, 1.11e+00], - [-4.40e-01, 7.20e-01], - [-5.00e-01, 2.46e-01], - [-5.00e-01, -2.46e-01], - [-4.40e-01, -7.20e-01], - [-3.40e-01, -1.11e+00], - [-2.00e-01, -1.38e+00], - [-4.10e-02, -1.50e+00], - [ 1.20e-01, -1.44e+00], - [ 2.70e-01, -1.26e+00], - [ 3.90e-01, -9.30e-01], - [ 4.70e-01, -4.80e-01], - [ 5.00e-01, -3.60e-16] - ]), + "cross-3D": np.array( + [ + [-5.2, 0.0, 0.0], + [4.8, 0.0, 0.0], + [0.0, -1.3, 0.0], + [0.0, 4.7, 0.0], + [0.0, 0.0, -11.2], + [0.0, 0.0, 0.8], + ] + ), + "cross-2D": np.array( + [ + [0.0, 0.0], + [0.0, 0.0], + [-1.3, 0.0], + [4.7, 0.0], + [0.0, -11.2], + [0.0, 0.8], + ] + ), + "circle-2D": np.array( + [ + [5.0e-01, 0.0e00], + [4.7e-01, 1.6e-01], + [3.9e-01, 3.1e-01], + [2.7e-01, 4.2e-01], + [1.2e-01, 4.8e-01], + [-4.1e-02, 5.0e-01], + [-2.0e-01, 4.6e-01], + [-3.4e-01, 3.7e-01], + [-4.4e-01, 2.4e-01], + [-5.0e-01, 8.2e-02], + [-5.0e-01, -8.2e-02], + [-4.4e-01, -2.4e-01], + [-3.4e-01, -3.7e-01], + [-2.0e-01, -4.6e-01], + [-4.1e-02, -5.0e-01], + [1.2e-01, -4.8e-01], + [2.7e-01, -4.2e-01], + [3.9e-01, -3.1e-01], + [4.7e-01, -1.6e-01], + [5.0e-01, -1.2e-16], + ] + ), + "square-2D": np.array( + [ + [0.0, 0.0], + [5.0, 0.0], + [10.0, 0.0], + [0.0, 5.0], + [0.0, 10.0], + [5.0, 10.0], + [10.0, 10.0], + [10.0, 5.0], + ] + ), + "rectangle-2D": np.array( + [ + [0.0, 0.0], + [5.0, 0.0], + [20.0, 0.0], + [0.0, 5.0], + [0.0, 10.0], + [5.0, 10.0], + [20.0, 10.0], + [20.0, 5.0], + ] + ), + "oval-2D": np.array( + [ + [5.00e-01, 0.00e00], + [4.70e-01, 4.80e-01], + [3.90e-01, 9.30e-01], + [2.70e-01, 1.26e00], + [1.20e-01, 1.44e00], + [-4.10e-02, 1.50e00], + [-2.00e-01, 1.38e00], + [-3.40e-01, 1.11e00], + [-4.40e-01, 7.20e-01], + [-5.00e-01, 2.46e-01], + [-5.00e-01, -2.46e-01], + [-4.40e-01, -7.20e-01], + [-3.40e-01, -1.11e00], + [-2.00e-01, -1.38e00], + [-4.10e-02, -1.50e00], + [1.20e-01, -1.44e00], + [2.70e-01, -1.26e00], + [3.90e-01, -9.30e-01], + [4.70e-01, -4.80e-01], + [5.00e-01, -3.60e-16], + ] + ), } def test_aspect_ratio(): - shapes = _shape_datasets() - npt.assert_allclose( - mm.aspect_ratio(shapes["cross-3D"]), - 0.5, - atol=1e-5 - ) - npt.assert_allclose( - mm.aspect_ratio(shapes["cross-2D"]), - 0.5, - atol=1e-5 - ) - npt.assert_allclose( - mm.aspect_ratio(shapes["circle-2D"]), - 1.0, - atol=1e-5 - ) - npt.assert_allclose( - mm.aspect_ratio(shapes["square-2D"]), - 1.0, - atol=1e-5 - ) - npt.assert_allclose( - mm.aspect_ratio(shapes["rectangle-2D"]), - 0.5, - atol=1e-5 - ) - npt.assert_allclose( - mm.aspect_ratio(shapes["oval-2D"]), - 0.333333, - atol=1e-5 - ) + npt.assert_allclose(mm.aspect_ratio(shapes["cross-3D"]), 0.5, atol=1e-5) + npt.assert_allclose(mm.aspect_ratio(shapes["cross-2D"]), 0.5, atol=1e-5) + npt.assert_allclose(mm.aspect_ratio(shapes["circle-2D"]), 1.0, atol=1e-5) + npt.assert_allclose(mm.aspect_ratio(shapes["square-2D"]), 1.0, atol=1e-5) + npt.assert_allclose(mm.aspect_ratio(shapes["rectangle-2D"]), 0.5, atol=1e-5) + npt.assert_allclose(mm.aspect_ratio(shapes["oval-2D"]), 0.333333, atol=1e-5) def test_circularity(): - shapes = _shape_datasets() - npt.assert_allclose( - mm.circularity(shapes["cross-3D"]), - 0.051904, - atol=1e-5 - ) - npt.assert_allclose( - mm.circularity(shapes["cross-2D"]), - 0.512329, - atol=1e-5 - ) - npt.assert_allclose( - mm.circularity(shapes["circle-2D"]), - 0.99044, - atol=1e-5 - ) - npt.assert_allclose( - mm.circularity(shapes["square-2D"]), - 0.785398, - atol=1e-5 - ) - npt.assert_allclose( - mm.circularity(shapes["rectangle-2D"]), - 0.698132, - atol=1e-5 - ) - npt.assert_allclose( - mm.circularity(shapes["oval-2D"]), - 0.658071, - atol=1e-5 - ) + npt.assert_allclose(mm.circularity(shapes["cross-3D"]), 0.051904, atol=1e-5) + npt.assert_allclose(mm.circularity(shapes["cross-2D"]), 0.512329, atol=1e-5) + npt.assert_allclose(mm.circularity(shapes["circle-2D"]), 0.99044, atol=1e-5) + npt.assert_allclose(mm.circularity(shapes["square-2D"]), 0.785398, atol=1e-5) + npt.assert_allclose(mm.circularity(shapes["rectangle-2D"]), 0.698132, atol=1e-5) + npt.assert_allclose(mm.circularity(shapes["oval-2D"]), 0.658071, atol=1e-5) + def test_shape_factor(): shapes = _shape_datasets() - npt.assert_allclose( - mm.shape_factor(shapes["cross-3D"]), - 0.786988, - atol=1e-5 - ) - npt.assert_allclose( - mm.shape_factor(shapes["cross-2D"]), - 0.244018, - atol=1e-5 - ) - npt.assert_allclose( - mm.shape_factor(shapes["circle-2D"]), - 0.766784, - atol=1e-5 - ) - npt.assert_allclose( - mm.shape_factor(shapes["square-2D"]), - 0.5, - atol=1e-5 - ) - npt.assert_allclose( - mm.shape_factor(shapes["rectangle-2D"]), - 0.4, - atol=1e-5 - ) - npt.assert_allclose( - mm.shape_factor(shapes["oval-2D"]), - 0.257313, - atol=1e-5 - ) + npt.assert_allclose(mm.shape_factor(shapes["cross-3D"]), 0.786988, atol=1e-5) + npt.assert_allclose(mm.shape_factor(shapes["cross-2D"]), 0.244018, atol=1e-5) + npt.assert_allclose(mm.shape_factor(shapes["circle-2D"]), 0.766784, atol=1e-5) + npt.assert_allclose(mm.shape_factor(shapes["square-2D"]), 0.5, atol=1e-5) + npt.assert_allclose(mm.shape_factor(shapes["rectangle-2D"]), 0.4, atol=1e-5) + npt.assert_allclose(mm.shape_factor(shapes["oval-2D"]), 0.257313, atol=1e-5) diff --git a/tests/test_stats.py b/tests/test_stats.py index 3daf27e44..13977f693 100644 --- a/tests/test_stats.py +++ b/tests/test_stats.py @@ -39,13 +39,13 @@ np.random.seed(42) -NORMAL_MU = 10. +NORMAL_MU = 10.0 NORMAL_SIGMA = 1.0 NORMAL = np.random.normal(NORMAL_MU, NORMAL_SIGMA, 1000) -EXPON_LAMBDA = 10. +EXPON_LAMBDA = 10.0 EXPON = np.random.exponential(EXPON_LAMBDA, 1000) -UNIFORM_MIN = -1. -UNIFORM_MAX = 1. +UNIFORM_MIN = -1.0 +UNIFORM_MAX = 1.0 UNIFORM = np.random.uniform(UNIFORM_MIN, UNIFORM_MAX, 1000) @@ -54,6 +54,7 @@ def test_fit_normal_params(): assert_almost_equal(fit_.params[0], NORMAL_MU, 1) assert_almost_equal(fit_.params[1], NORMAL_SIGMA, 1) + def test_fit_normal_dict(): fit_ = st.fit(NORMAL, 'norm') d = st.fit_results_to_dict(fit_, min_bound=-123, max_bound=123) @@ -62,6 +63,7 @@ def test_fit_normal_dict(): assert_almost_equal(d['min'], -123, 1) assert_almost_equal(d['max'], 123, 1) + def test_fit_normal_regression(): fit_ = st.fit(NORMAL, 'norm') assert_almost_equal(fit_.params[0], 10.019332055822, 12) @@ -69,6 +71,7 @@ def test_fit_normal_regression(): assert_almost_equal(fit_.errs[0], 0.021479979161, 12) assert_almost_equal(fit_.errs[1], 0.7369569123250506, 12) + def test_fit_default_is_normal(): fit0_ = st.fit(NORMAL) fit1_ = st.fit(NORMAL, 'norm') @@ -92,7 +95,6 @@ def test_optimal_distribution_uniform(): def test_get_test(): - stat_test_enums = (st.StatTests.ks, st.StatTests.wilcoxon, st.StatTests.ttest) expected_stat_test_strings = ("ks_2samp", "wilcoxon", "ttest_ind") @@ -105,14 +107,15 @@ def test_get_test(): def test_fit_results_dict_uniform(): - a = st.FitResults(params=[1, 2], errs=[3,4], type='uniform') + a = st.FitResults(params=[1, 2], errs=[3, 4], type='uniform') d = st.fit_results_to_dict(a) assert d['min'] == 1 assert d['max'] == 3 assert d['type'] == 'uniform' + def test_fit_results_dict_uniform_min_max(): - a = st.FitResults(params=[1, 2], errs=[3,4], type='uniform') + a = st.FitResults(params=[1, 2], errs=[3, 4], type='uniform') d = st.fit_results_to_dict(a, min_bound=-100, max_bound=100) assert d['min'] == 1 assert d['max'] == 3 @@ -120,7 +123,7 @@ def test_fit_results_dict_uniform_min_max(): def test_fit_results_dict_normal(): - a = st.FitResults(params=[1, 2], errs=[3,4], type='norm') + a = st.FitResults(params=[1, 2], errs=[3, 4], type='norm') d = st.fit_results_to_dict(a) assert d['mu'] == 1 assert d['sigma'] == 2 @@ -128,7 +131,7 @@ def test_fit_results_dict_normal(): def test_fit_results_dict_normal_min_max(): - a = st.FitResults(params=[1, 2], errs=[3,4], type='norm') + a = st.FitResults(params=[1, 2], errs=[3, 4], type='norm') d = st.fit_results_to_dict(a, min_bound=-100, max_bound=100) assert d['mu'] == 1 assert d['sigma'] == 2 @@ -138,38 +141,36 @@ def test_fit_results_dict_normal_min_max(): def test_fit_results_dict_exponential(): - a = st.FitResults(params=[2, 2], errs=[3,4], type='expon') + a = st.FitResults(params=[2, 2], errs=[3, 4], type='expon') d = st.fit_results_to_dict(a) - assert d['lambda'] == 1./2 + assert d['lambda'] == 1.0 / 2 assert d['type'] == 'exponential' def test_fit_results_dict_exponential_min_max(): - a = st.FitResults(params=[2, 2], errs=[3,4], type='expon') + a = st.FitResults(params=[2, 2], errs=[3, 4], type='expon') d = st.fit_results_to_dict(a, min_bound=-100, max_bound=100) - assert d['lambda'] == 1./2 + assert d['lambda'] == 1.0 / 2 assert d['min'] == -100 assert d['max'] == 100 assert d['type'] == 'exponential' -def test_scalar_stats(): - data = np.array([1.,2.,3.,4.,5.]) +def test_scalar_stats(): + data = np.array([1.0, 2.0, 3.0, 4.0, 5.0]) result = st.scalar_stats(data) - RESULT = {'mean': 3., - 'max': 5., - 'min': 1., - 'std': 1.4142135623730951} + RESULT = {'mean': 3.0, 'max': 5.0, 'min': 1.0, 'std': 1.4142135623730951} assert RESULT == result + def test_compare_two(): - data = np.array([1., 1., 2., 2.]) + data = np.array([1.0, 1.0, 2.0, 2.0]) data_same = np.array([1.0, 1.0, 2.0, 2.0]) data_close = np.array([1.02, 1.01, 2.001, 2.0003]) - data_far = np.array([200., 100., 201]) + data_far = np.array([200.0, 100.0, 201]) results1 = st.compare_two(data, data_same, test=st.StatTests.ks) assert_almost_equal(results1.dist, 0.0) @@ -181,11 +182,12 @@ def test_compare_two(): results3 = st.compare_two(data, data_far, test=st.StatTests.ks) assert_almost_equal(results3.dist, 1.0) + distr1 = np.ones(100) -distr2 = 2*np.ones(100) +distr2 = 2 * np.ones(100) -def test_compare_two_ks(): +def test_compare_two_ks(): results1 = st.compare_two(distr1, distr1, test=st.StatTests.ks) assert_almost_equal(results1.dist, 0.0, decimal=5) assert_almost_equal(results1.pvalue, 1.0, decimal=5) @@ -194,8 +196,8 @@ def test_compare_two_ks(): assert_almost_equal(results2.dist, 1.0, decimal=5) assert_almost_equal(results2.pvalue, 0.0, decimal=5) -def test_compare_two_wilcoxon(): +def test_compare_two_wilcoxon(): results2 = st.compare_two(distr1, distr2, test=st.StatTests.wilcoxon) assert_almost_equal(results2.dist, 0.0, decimal=5) assert_almost_equal(results2.pvalue, 0.0, decimal=5) @@ -203,7 +205,6 @@ def test_compare_two_wilcoxon(): @pytest.mark.filterwarnings("ignore") # Ignore precision warnings def test_compare_two_ttest(): - results1 = st.compare_two(distr1, distr1, test=st.StatTests.ttest) assert np.isnan(results1.dist) @@ -216,26 +217,23 @@ def test_compare_two_ttest(): def test_compare_two_error(): with pytest.raises(TypeError): - data = np.array([1., 1., 2., 2.]) + data = np.array([1.0, 1.0, 2.0, 2.0]) data_same = np.array([1.0, 1.0, 2.0, 2.0]) results1 = st.compare_two(data, data_same, test='test') -def test_total_score(): - testList1 = (([1.,1., 1],[1.,1.,1.]), - ([2.,3.,4.,5.],[2.,3.,4.,5.])) +def test_total_score(): + testList1 = (([1.0, 1.0, 1], [1.0, 1.0, 1.0]), ([2.0, 3.0, 4.0, 5.0], [2.0, 3.0, 4.0, 5.0])) score = st.total_score(testList1) - assert_almost_equal(score, 0.) + assert_almost_equal(score, 0.0) - testList2 = (([1.,1., 1],[2.,2.,2.]), - ([2.,3.,4.,5.],[2.,3.,4.,5.])) + testList2 = (([1.0, 1.0, 1], [2.0, 2.0, 2.0]), ([2.0, 3.0, 4.0, 5.0], [2.0, 3.0, 4.0, 5.0])) score = st.total_score(testList2, p=1) - assert_almost_equal(score, 1.) + assert_almost_equal(score, 1.0) - testList3 = (([1.,1., 1],[2.,2.,2.]), - ([3.,3.,3.,3.],[4., 4., 4., 4.])) + testList3 = (([1.0, 1.0, 1], [2.0, 2.0, 2.0]), ([3.0, 3.0, 3.0, 3.0], [4.0, 4.0, 4.0, 4.0])) score = st.total_score(testList3, p=2) - assert_almost_equal(score, np.sqrt(2.)) + assert_almost_equal(score, np.sqrt(2.0)) diff --git a/tests/test_utils.py b/tests/test_utils.py index 2cdbd3cb7..386e18d97 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -39,7 +39,6 @@ def test_warn_deprecated(): - with pytest.warns(NeuroMDeprecationWarning, match="foo"): nu.warn_deprecated(msg="foo") @@ -62,19 +61,10 @@ def test_deprecated_module(): def test_NeuromJSON(): - ex = {'zero': 0, - 'one': np.int64(1), - 'two': np.float32(2.0), - 'three': np.array([1, 2, 3]) - } + ex = {'zero': 0, 'one': np.int64(1), 'two': np.float32(2.0), 'three': np.array([1, 2, 3])} output = json.dumps(ex, cls=nu.NeuromJSON) loaded = json.loads(output) - assert (loaded == - {'zero': 0, - 'one': 1, - 'two': 2.0, - 'three': [1, 2, 3] - }) + assert loaded == {'zero': 0, 'one': 1, 'two': 2.0, 'three': [1, 2, 3]} enc = nu.NeuromJSON() assert enc.default(ex['one']) == 1 @@ -109,7 +99,6 @@ class Grade(nu.OrderedEnum): def test_flatten(): - a = [[1, 2], [3, 4, 5], [6], [7, 8, 9, 10]] assert list(nu.flatten(a)) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] diff --git a/tests/test_viewer.py b/tests/test_viewer.py deleted file mode 100644 index 13ad6058d..000000000 --- a/tests/test_viewer.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project -# All rights reserved. -# -# This file is part of NeuroM -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# 3. Neither the name of the copyright holder nor the names of -# its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -import os -import warnings -import tempfile -from pathlib import Path - -import matplotlib - -if 'DISPLAY' not in os.environ: # noqa - matplotlib.use('Agg') # noqa - -from neurom.exceptions import NeuroMDeprecationWarning - -with warnings.catch_warnings(): - # no need to warn about the deprecated module in this test - warnings.simplefilter("ignore", category=NeuroMDeprecationWarning) - from neurom import viewer - -from neurom import NeuriteType, load_morphology -from neurom.view import matplotlib_utils - -import pytest -from numpy.testing import assert_allclose - -DATA_PATH = Path(__file__).parent / 'data/swc' -MORPH_FILENAME = DATA_PATH / 'Neuron.swc' -m = load_morphology(MORPH_FILENAME) - - -def test_draw_morphology(): - viewer.draw(m) - matplotlib_utils.plt.close('all') - - -def test_draw_filter_neurite(): - for mode in ['2d', '3d']: - viewer.draw(m, mode=mode, neurite_type=NeuriteType.basal_dendrite) - assert_allclose(matplotlib_utils.plt.gca().get_ylim(), - [-30., 78], atol=5) - - matplotlib_utils.plt.close('all') - - -def test_draw_morphology3d(): - viewer.draw(m, mode='3d') - matplotlib_utils.plt.close('all') - - with pytest.raises(NotImplementedError): - viewer.draw(m, mode='3d', realistic_diameters=True) - - # for coverage - viewer.draw(m, mode='3d', realistic_diameters=False) - matplotlib_utils.plt.close('all') - - -def test_draw_tree(): - viewer.draw(m.neurites[0]) - matplotlib_utils.plt.close('all') - - -def test_draw_tree3d(): - viewer.draw(m.neurites[0], mode='3d') - matplotlib_utils.plt.close('all') - - -def test_draw_soma(): - viewer.draw(m.soma) - matplotlib_utils.plt.close('all') - - -def test_draw_soma3d(): - viewer.draw(m.soma, mode='3d') - matplotlib_utils.plt.close('all') - - -def test_draw_dendrogram(): - viewer.draw(m, mode='dendrogram') - matplotlib_utils.plt.close('all') - - viewer.draw(m.neurites[0], mode='dendrogram') - matplotlib_utils.plt.close('all') - -def test_draw_dendrogram_empty_segment(): - m = load_morphology(DATA_PATH / 'empty_segments.swc') - viewer.draw(m, mode='dendrogram') - matplotlib_utils.plt.close('all') - - - -def test_invalid_draw_mode_raises(): - with pytest.raises(viewer.InvalidDrawModeError): - viewer.draw(m, mode='4d') - - -def test_invalid_object_raises(): - with pytest.raises(viewer.NotDrawableError): - class Dummy: - pass - viewer.draw(Dummy()) - - -def test_invalid_combo_raises(): - with pytest.raises(viewer.NotDrawableError): - viewer.draw(m.soma, mode='dendrogram') - - -def test_writing_output(): - with tempfile.TemporaryDirectory() as folder: - output_dir = Path(folder, 'subdir') - viewer.draw(m, mode='2d', output_path=output_dir) - assert (output_dir / 'Figure.png').is_file() - matplotlib_utils.plt.close('all') diff --git a/tests/view/__init__.py b/tests/view/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/view/conftest.py b/tests/view/conftest.py index 414ffa353..78fe99aab 100644 --- a/tests/view/conftest.py +++ b/tests/view/conftest.py @@ -1,10 +1,12 @@ import os import matplotlib + if 'DISPLAY' not in os.environ: # noqa matplotlib.use('Agg') # noqa from neurom.view import matplotlib_utils + matplotlib_utils._get_plt() from neurom.view.matplotlib_utils import plt diff --git a/tests/view/test_dendrogram.py b/tests/view/test_dendrogram.py index df7611495..f0b8ab770 100644 --- a/tests/view/test_dendrogram.py +++ b/tests/view/test_dendrogram.py @@ -20,18 +20,16 @@ def test_create_dendrogram_morphology(): assert soma_len == dendrogram.height assert soma_len == dendrogram.width assert_array_almost_equal( - [[-.5, 0], [-.5, soma_len], [.5, soma_len], [.5, 0]], - dendrogram.coords) + [[-0.5, 0], [-0.5, soma_len], [0.5, soma_len], [0.5, 0]], dendrogram.coords + ) assert len(m.neurites) == len(dendrogram.children) def test_dendrogram_get_coords(): segment_lengts = np.array([0, 1, 1]) - segment_radii = np.array([.5, 1, .25]) + segment_radii = np.array([0.5, 1, 0.25]) coords = dm.Dendrogram.get_coords(segment_lengts, segment_radii) - assert_array_almost_equal( - [[-.5, 0], [-1, 1], [-.25, 2], [.25, 2], [1, 1], [.5, 0]], - coords) + assert_array_almost_equal([[-0.5, 0], [-1, 1], [-0.25, 2], [0.25, 2], [1, 1], [0.5, 0]], coords) def test_create_dendrogram_neurite(): @@ -71,15 +69,13 @@ def test_layout_dendrogram(): def assert_layout(dendrogram): for i, child in enumerate(dendrogram.children): # child is higher than parent in Y coordinate - assert ( - positions[child][1] >= - positions[dendrogram][1] + dendrogram.height) + assert positions[child][1] >= positions[dendrogram][1] + dendrogram.height if i < len(dendrogram.children) - 1: next_child = dendrogram.children[i + 1] # X space between child is enough for their widths - assert ( - positions[next_child][0] - positions[child][0] > - .5 * (next_child.width + child.width)) + assert positions[next_child][0] - positions[child][0] > 0.5 * ( + next_child.width + child.width + ) assert_layout(child) m = load_morphology(NEURON_PATH) diff --git a/tests/view/test_matplotlib_impl.py b/tests/view/test_matplotlib_impl.py index c191a59e5..810a5c1a3 100644 --- a/tests/view/test_matplotlib_impl.py +++ b/tests/view/test_matplotlib_impl.py @@ -39,10 +39,10 @@ DATA_PATH = Path(__file__).parent.parent / 'data' SWC_PATH = DATA_PATH / 'swc' -tree_colors = {'black': np.array([[0., 0., 0., 1.] for _ in range(3)]), - None: [[1., 0., 0., 1.], - [1., 0., 0., 1.], - [0.501961, 0., 0.501961, 1.]]} +tree_colors = { + 'black': np.array([[0.0, 0.0, 0.0, 1.0] for _ in range(3)]), + None: [[1.0, 0.0, 0.0, 1.0], [1.0, 0.0, 0.0, 1.0], [0.501961, 0.0, 0.501961, 1.0]], +} def test_tree_diameter_scale(get_fig_2d): @@ -50,7 +50,9 @@ def test_tree_diameter_scale(get_fig_2d): fig, ax = get_fig_2d tree = m.neurites[0] for input_color, expected_colors in tree_colors.items(): - matplotlib_impl.plot_tree(tree, ax, color=input_color, diameter_scale=None, alpha=1., linewidth=1.2) + matplotlib_impl.plot_tree( + tree, ax, color=input_color, diameter_scale=None, alpha=1.0, linewidth=1.2 + ) collection = ax.collections[0] assert collection.get_linewidth()[0] == 1.2 assert_array_almost_equal(collection.get_colors(), expected_colors) @@ -62,7 +64,9 @@ def test_tree_diameter_real(get_fig_2d): fig, ax = get_fig_2d tree = m.neurites[0] for input_color, expected_colors in tree_colors.items(): - matplotlib_impl.plot_tree(tree, ax, color=input_color, alpha=1., linewidth=1.2, realistic_diameters=True) + matplotlib_impl.plot_tree( + tree, ax, color=input_color, alpha=1.0, linewidth=1.2, realistic_diameters=True + ) collection = ax.collections[0] assert collection.get_linewidth()[0] == 1.0 assert_array_almost_equal(collection.get_facecolors(), expected_colors) @@ -80,7 +84,7 @@ def test_tree_bounds(get_fig_2d): m = load_morphology(SWC_PATH / 'simple-different-section-types.swc') fig, ax = get_fig_2d matplotlib_impl.plot_tree(m.neurites[0], ax=ax) - np.testing.assert_allclose(ax.dataLim.bounds, (-5., 0., 11., 5.)) + np.testing.assert_allclose(ax.dataLim.bounds, (-5.0, 0.0, 11.0, 5.0)) def test_morph(get_fig_2d): @@ -88,9 +92,13 @@ def test_morph(get_fig_2d): fig, ax = get_fig_2d matplotlib_impl.plot_morph(m, ax=ax) assert ax.get_title() == m.name - assert_allclose(ax.dataLim.get_points(), - [[-40.32853516, -57.600172], - [64.74726272, 48.51626225], ]) + assert_allclose( + ax.dataLim.get_points(), + [ + [-40.32853516, -57.600172], + [64.74726272, 48.51626225], + ], + ) with pytest.raises(AssertionError): matplotlib_impl.plot_tree(m, ax, plane='wrong') @@ -102,9 +110,9 @@ def test_tree3d(get_fig_3d): tree = m.neurites[0] matplotlib_impl.plot_tree3d(tree, ax) xy_bounds = ax.xy_dataLim.bounds - np.testing.assert_allclose(xy_bounds, (-5., 0., 11., 5.)) + np.testing.assert_allclose(xy_bounds, (-5.0, 0.0, 11.0, 5.0)) zz_bounds = ax.zz_dataLim.bounds - np.testing.assert_allclose(zz_bounds, (0., 0., 1., 1.)) + np.testing.assert_allclose(zz_bounds, (0.0, 0.0, 1.0, 1.0)) def test_morph3d(get_fig_3d): @@ -112,11 +120,14 @@ def test_morph3d(get_fig_3d): fig, ax = get_fig_3d matplotlib_impl.plot_morph3d(m, ax) assert ax.get_title() == m.name - assert_allclose(ax.xy_dataLim.get_points(), - [[-40.32853516, -57.600172], - [64.74726272, 48.51626225], ]) - assert_allclose(ax.zz_dataLim.get_points().T[0], - (-00.09999862, 54.20408797)) + assert_allclose( + ax.xy_dataLim.get_points(), + [ + [-40.32853516, -57.600172], + [64.74726272, 48.51626225], + ], + ) + assert_allclose(ax.zz_dataLim.get_points().T[0], (-00.09999862, 54.20408797)) def test_morph_no_neurites(): @@ -133,10 +144,10 @@ def test_dendrogram(get_fig_2d): m = load_morphology(SWC_PATH / 'Neuron.swc') fig, ax = get_fig_2d matplotlib_impl.plot_dendrogram(m, ax) - assert_allclose(ax.get_xlim(), (-10., 180.), rtol=0.25) + assert_allclose(ax.get_xlim(), (-10.0, 180.0), rtol=0.25) matplotlib_impl.plot_dendrogram(m, ax, show_diameters=False) - assert_allclose(ax.get_xlim(), (-10., 180.), rtol=0.25) + assert_allclose(ax.get_xlim(), (-10.0, 180.0), rtol=0.25) matplotlib_impl.plot_dendrogram(m.neurites[0], ax, show_diameters=False) @@ -146,31 +157,43 @@ def test_dendrogram(get_fig_2d): with warnings.catch_warnings(record=True): # upright, uniform radius, multiple cylinders - soma_3pt_normal = load_morphology(StringIO(u"""1 1 0 -10 0 10 -1 + soma_3pt_normal = load_morphology( + StringIO( + u"""1 1 0 -10 0 10 -1 2 1 0 0 0 10 1 - 3 1 0 10 0 10 2"""), reader='swc').soma + 3 1 0 10 0 10 2""" + ), + reader='swc', + ).soma # increasing radius, multiple cylinders - soma_4pt_normal_cylinder = load_morphology(StringIO(u"""1 1 0 0 0 1 -1 + soma_4pt_normal_cylinder = load_morphology( + StringIO( + u"""1 1 0 0 0 1 -1 2 1 0 -10 0 2 1 3 1 0 -10 10 4 2 - 4 1 -10 -10 -10 4 3"""), reader='swc').soma - - soma_4pt_normal_contour = load_morphology(StringIO(u"""((CellBody) + 4 1 -10 -10 -10 4 3""" + ), + reader='swc', + ).soma + + soma_4pt_normal_contour = load_morphology( + StringIO( + u"""((CellBody) (0 0 0 1) (0 -10 0 2) (0 -10 10 4) - (-10 -10 -10 4))"""), reader='asc').soma + (-10 -10 -10 4))""" + ), + reader='asc', + ).soma def test_soma(get_fig_2d): m = load_morphology(SWC_PATH / 'Neuron.swc') soma0 = m.soma fig, ax = get_fig_2d - for s in (soma0, - soma_3pt_normal, - soma_4pt_normal_cylinder, - soma_4pt_normal_contour): + for s in (soma0, soma_3pt_normal, soma_4pt_normal_cylinder, soma_4pt_normal_contour): matplotlib_impl.plot_soma(s, ax) matplotlib_utils.plt.close(fig) @@ -181,9 +204,9 @@ def test_soma(get_fig_2d): def test_soma3d(get_fig_3d): _, ax = get_fig_3d matplotlib_impl.plot_soma3d(soma_3pt_normal, ax) - assert_allclose(ax.get_xlim(), (-11., 11.), atol=2) - assert_allclose(ax.get_ylim(), (-11., 11.), atol=2) - assert_allclose(ax.get_zlim(), (-10., 10.), atol=2) + assert_allclose(ax.get_xlim(), (-11.0, 11.0), atol=2) + assert_allclose(ax.get_ylim(), (-11.0, 11.0), atol=2) + assert_allclose(ax.get_zlim(), (-10.0, 10.0), atol=2) def test_get_color(): @@ -202,11 +225,11 @@ def test_filter_neurite(): fig, ax = matplotlib_utils.get_figure(params={'projection': '3d'}) matplotlib_impl.plot_morph3d(m, ax, neurite_type=NeuriteType.basal_dendrite) matplotlib_utils.plot_style(fig=fig, ax=ax) - assert_allclose(matplotlib_utils.plt.gca().get_ylim(), [-30., 78], atol=5) + assert_allclose(matplotlib_utils.plt.gca().get_ylim(), [-30.0, 78], atol=5) matplotlib_utils.plt.close('all') fig, ax = matplotlib_utils.get_figure() matplotlib_impl.plot_morph(m, ax, neurite_type=NeuriteType.basal_dendrite) matplotlib_utils.plot_style(fig=fig, ax=ax) - assert_allclose(matplotlib_utils.plt.gca().get_ylim(), [-30., 78], atol=5) + assert_allclose(matplotlib_utils.plt.gca().get_ylim(), [-30.0, 78], atol=5) matplotlib_utils.plt.close('all') diff --git a/tests/view/test_matplotlib_utils.py b/tests/view/test_matplotlib_utils.py index 8afa57ede..2bbb4277d 100644 --- a/tests/view/test_matplotlib_utils.py +++ b/tests/view/test_matplotlib_utils.py @@ -29,9 +29,20 @@ import tempfile import numpy as np -from neurom.view.matplotlib_utils import (plt, figure_naming, get_figure, save_plot, plot_style, - plot_title, plot_labels, plot_legend, update_plot_limits, plot_ticks, - plot_sphere, plot_cylinder) +from neurom.view.matplotlib_utils import ( + plt, + figure_naming, + get_figure, + save_plot, + plot_style, + plot_title, + plot_labels, + plot_legend, + update_plot_limits, + plot_ticks, + plot_sphere, + plot_cylinder, +) import pytest @@ -42,7 +53,9 @@ def test_figure_naming(): assert prefile == "" assert postfile == "_3" - pretitle, posttitle, prefile, postfile = figure_naming(pretitle='', posttitle="Test", prefile="test", postfile="") + pretitle, posttitle, prefile, postfile = figure_naming( + pretitle='', posttitle="Test", prefile="test", postfile="" + ) assert pretitle == "" assert posttitle == " -- Test" assert prefile == "test_" @@ -203,13 +216,13 @@ def test_plot_style(get_fig_2d): def test_plot_cylinder(): fig0, ax0 = get_figure(params={'projection': '3d'}) start, end = np.array([0, 0, 0]), np.array([1, 0, 0]) - plot_cylinder(ax0, start=start, end=end, - start_radius=0, end_radius=10., - color='black', alpha=1.) + plot_cylinder( + ax0, start=start, end=end, start_radius=0, end_radius=10.0, color='black', alpha=1.0 + ) assert ax0.has_data() def test_plot_sphere(): fig0, ax0 = get_figure(params={'projection': '3d'}) - plot_sphere(ax0, [0, 0, 0], 10., color='black', alpha=1.) + plot_sphere(ax0, [0, 0, 0], 10.0, color='black', alpha=1.0) assert ax0.has_data() diff --git a/tests/view/test_plotly_impl.py b/tests/view/test_plotly_impl.py index 4523c9f7c..5872a9daa 100644 --- a/tests/view/test_plotly_impl.py +++ b/tests/view/test_plotly_impl.py @@ -16,6 +16,7 @@ def _reload_module(module): """Force module reload.""" import importlib + importlib.reload(module) @@ -25,17 +26,19 @@ def test_plotly_extra_not_installed(): _reload_module(neurom.view.plotly_impl) assert False, "ImportError not triggered" except ImportError as e: - assert (str(e) == - 'neurom[plotly] is not installed. ' - 'Please install it by doing: pip install neurom[plotly]') + assert ( + str(e) == 'neurom[plotly] is not installed. ' + 'Please install it by doing: pip install neurom[plotly]' + ) def test_plotly_draw_morph3d(): plotly_impl.plot_morph3d(m, auto_open=False) plotly_impl.plot_morph3d(m.neurites[0], auto_open=False) - fig = plotly_impl.plot_morph3d(load_morphology(SWC_PATH / 'simple-different-soma.swc'), - auto_open=False) + fig = plotly_impl.plot_morph3d( + load_morphology(SWC_PATH / 'simple-different-soma.swc'), auto_open=False + ) x, y, z = [fig['data'][2][key] for key in str('xyz')] assert_allclose(x[0, 0], 2) assert_allclose(x[33, 33], -1.8971143170299758) diff --git a/tox.ini b/tox.ini index 829d8e1e3..ca60be8e4 100644 --- a/tox.ini +++ b/tox.ini @@ -16,9 +16,10 @@ envlist = deps = {[base]testdeps} pytest-cov + coverage[toml]>=6.3 extras = plotly -commands = pytest \ - --cov={envsitepackagesdir}/{[base]name} \ +commands = python -m pytest \ + --cov={[base]name} \ --cov-report term-missing \ --cov-fail-under=100 \ --cov-report=xml \ @@ -26,28 +27,45 @@ commands = pytest \ {posargs} [testenv:lint] -basepython=python3.8 deps = + isort + black pycodestyle pydocstyle astroid pylint commands = + isort --check-only --diff {[base]name} + black --check . pycodestyle --exclude=tests neurom pydocstyle --match-dir='(?!test).*' {toxinidir}/neurom pylint --rcfile=pylintrc --extension-pkg-whitelist=numpy --ignore=tests neurom +[testenv:format] +skip_install = true +deps = + isort + black +commands = + isort {[base]name} + black . + [testenv:docs] -basepython=python3.9 -changedir = doc extras = docs commands = - # remove autosummary output - rm -rf {toxinidir}/doc/source/_neurom_build - make clean - make html SPHINXOPTS=-W + # remove autosummary output and cleanup + rm -rf {toxinidir}/doc/source/_neurom_build {toxinidir}/doc/build/* + sphinx-build -b doctest \ + {toxinidir}/doc/source \ + {toxinidir}/doc/build/doctest \ + -d {toxinidir}/doc/build/doctrees \ + -W + sphinx-build -b html \ + {toxinidir}/doc/source \ + {toxinidir}/doc/build/html \ + -d {toxinidir}/doc/build/doctrees \ + -W allowlist_externals = - make rm [testenv:tutorial] @@ -71,6 +89,8 @@ commands = [pycodestyle] max-line-length=100 +# E203,W503 needed for black +ignore = E203,W503 [pydocstyle] convention = google