From 8649c064f811b73fee2f4c59012f5bba2fda5eb9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 22:16:32 +0000 Subject: [PATCH 1/2] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v5.0.0) - [github.com/psf/black: 23.3.0 → 24.10.0](https://github.com/psf/black/compare/23.3.0...24.10.0) - [github.com/PyCQA/isort: 5.12.0 → 5.13.2](https://github.com/PyCQA/isort/compare/5.12.0...5.13.2) - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.0.267 → v0.7.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.267...v0.7.2) - [github.com/executablebooks/mdformat: 0.7.16 → 0.7.18](https://github.com/executablebooks/mdformat/compare/0.7.16...0.7.18) --- .pre-commit-config.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 36a517b8..15c41768 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -20,11 +20,11 @@ repos: - id: sort-simple-yaml - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 24.10.0 hooks: - id: black - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort # - repo: https://github.com/codespell-project/codespell @@ -32,14 +32,14 @@ repos: # hooks: # - id: codespell # additional_dependencies: ["tomli"] - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.267' + rev: 'v0.7.2' hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/executablebooks/mdformat - rev: 0.7.16 + rev: 0.7.18 hooks: - id: mdformat args: ["--wrap=80"] From 9ecc5188cee443fefc106f7effad66816314bd1e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 22:16:51 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- CI/functional_tests/test_molten_salts.py | 3 +++ CI/functional_tests/test_water_study.py | 3 +++ .../calculators/__test_structure_factor.py | 1 + .../_test_green_kubo_thermal_conductivity.py | 1 + .../calculators/_test_green_kubo_viscosity.py | 1 + ...test_nernst_einstein_ionic_conductivity.py | 1 + .../test_angular_distribution_function.py | 1 + .../calculators/test_coordination_numbers.py | 1 + .../test_einstein_diffusion_coefficients.py | 1 + ...instein_distinct_diffusion_coefficients.py | 1 + ...est_einstein_helfand_ionic_conductivity.py | 2 ++ ...en_kubo_distinct_diffusion_coefficients.py | 1 + .../test_green_kubo_ionic_conductivity.py | 1 + ..._green_kubo_self_diffusion_coefficients.py | 5 ++--- .../test_kirkwood_buff_integrals.py | 1 + .../test_potential_of_mean_force.py | 1 + .../test_radial_distribution_function.py | 1 + .../test_molecular_mapping_results.py | 4 ++++ .../test_transformation_run_options.py | 4 ++++ .../visualizer/znvis_visualizer.py | 2 ++ .../database/test_experiment_database.py | 1 + CI/unit_tests/database/test_file_read.py | 1 + .../database/test_simulation_database.py | 6 ++++++ CI/unit_tests/experiment/test_Experiment.py | 1 + .../experiment/test_run_computation.py | 1 + .../graph_modules/test_molecular_graph.py | 4 ++++ .../memory_manager/test_memory_manager.py | 8 +++++++ .../project/test_project_add_experiment.py | 1 + .../project/test_project_database.py | 1 + .../project/test_project_instantiation.py | 1 + .../project/test_project_load_experiments.py | 1 + .../test_transformator_parent.py | 1 + .../utils/test_calculator_helper_methods.py | 4 ++++ CI/unit_tests/utils/test_constants.py | 1 + CI/unit_tests/utils/test_meta_functions.py | 14 +++++++++++++ CI/unit_tests/utils/test_molecule_class.py | 2 ++ CI/unit_tests/utils/test_scaling_functions.py | 5 +++++ CI/unit_tests/utils/test_testing.py | 2 ++ CI/unit_tests/utils/test_units.py | 1 + docs/source/conf.py | 1 + examples/notebooks/Mapping_Molecules.ipynb | 6 +----- .../notebooks/Molten_Salt_Comparison.ipynb | 5 +---- mdsuite/__init__.py | 1 + mdsuite/calculators/__init__.py | 1 + .../angular_distribution_function.py | 10 +++++++++ mdsuite/calculators/calculator.py | 6 ++++++ .../coordination_number_calculation.py | 8 +++++++ .../einstein_diffusion_coefficients.py | 5 +++++ ...instein_distinct_diffusion_coefficients.py | 8 +++++++ .../einstein_helfand_ionic_conductivity.py | 5 +++++ .../einstein_helfand_thermal_conductivity.py | 5 +++++ .../einstein_helfand_thermal_kinaci.py | 4 ++++ ...en_kubo_distinct_diffusion_coefficients.py | 10 +++++++++ .../green_kubo_ionic_conductivity.py | 5 +++++ .../green_kubo_self_diffusion_coefficients.py | 7 +++++++ .../green_kubo_thermal_conductivity.py | 5 +++++ mdsuite/calculators/green_kubo_viscosity.py | 5 +++++ .../calculators/green_kubo_viscosity_flux.py | 5 +++++ .../calculators/kirkwood_buff_integrals.py | 5 +++++ .../nernst_einstein_ionic_conductivity.py | 8 +++++++ .../calculators/potential_of_mean_force.py | 9 ++++++++ .../radial_distribution_function.py | 21 +++++++++++++++++++ .../spatial_distribution_function.py | 5 +++++ mdsuite/calculators/structure_factor.py | 9 ++++++++ mdsuite/calculators/trajectory_calculator.py | 9 ++++++++ .../calculators/transformations_reference.py | 1 + mdsuite/database/calculator_database.py | 4 ++++ mdsuite/database/data_manager.py | 11 ++++++++++ mdsuite/database/database_base.py | 2 ++ mdsuite/database/experiment_database.py | 7 +++++++ mdsuite/database/mdsuite_properties.py | 1 + mdsuite/database/project_database.py | 2 ++ mdsuite/database/scheme.py | 5 +++++ mdsuite/database/simulation_database.py | 18 ++++++++++++++++ mdsuite/experiment/experiment.py | 15 +++++++++++++ mdsuite/experiment/run.py | 3 +++ mdsuite/experiment/run_module.py | 5 +++++ mdsuite/file_io/chemfiles_read.py | 3 +++ mdsuite/file_io/extxyz_files.py | 5 +++++ mdsuite/file_io/file_read.py | 2 ++ mdsuite/file_io/lammps_flux_files.py | 3 +++ mdsuite/file_io/lammps_trajectory_files.py | 2 ++ mdsuite/file_io/script_input.py | 3 +++ mdsuite/file_io/tabular_text_files.py | 10 +++++++++ mdsuite/graph_modules/molecular_graph.py | 11 ++++++++++ mdsuite/memory_management/__init__.py | 1 + mdsuite/memory_management/memory_manager.py | 11 ++++++++++ mdsuite/project/project.py | 9 ++++++++ mdsuite/time_series/__init__.py | 1 + mdsuite/time_series/base.py | 2 ++ mdsuite/time_series/energies.py | 1 + mdsuite/transformations/__init__.py | 1 + .../integrated_heat_current.py | 1 + mdsuite/transformations/ionic_current.py | 1 + .../kinaci_integrated_heat_current.py | 1 + mdsuite/transformations/map_molecules.py | 9 ++++++++ mdsuite/transformations/momentum_flux.py | 1 + mdsuite/transformations/scale_coordinates.py | 1 + mdsuite/transformations/test_trafos.py | 1 + mdsuite/transformations/thermal_flux.py | 1 + .../transformations/transformation_dict.py | 1 + mdsuite/transformations/transformations.py | 10 +++++++++ .../translational_dipole_moment.py | 1 + mdsuite/transformations/unwrap_coordinates.py | 1 + mdsuite/transformations/unwrap_via_indices.py | 1 + .../velocity_from_positions.py | 1 + mdsuite/transformations/wrap_coordinates.py | 1 + mdsuite/utils/calculator_helper_methods.py | 5 +++++ mdsuite/utils/colours.py | 1 + mdsuite/utils/config.py | 2 ++ mdsuite/utils/constants.py | 1 + mdsuite/utils/helpers.py | 4 ++++ mdsuite/utils/linalg.py | 4 ++++ mdsuite/utils/meta_functions.py | 14 +++++++++++++ mdsuite/utils/molecule.py | 2 ++ mdsuite/utils/neighbour_list.py | 1 + .../utils/report_computer_characteristics.py | 1 + mdsuite/utils/scale_functions.py | 6 ++++++ mdsuite/utils/tensor_flow/layers.py | 2 ++ mdsuite/utils/testing.py | 1 + mdsuite/utils/units.py | 1 + mdsuite/visualizer/d2_data_visualization.py | 3 +++ mdsuite/visualizer/d3_data_visualizer.py | 6 ++++++ mdsuite/visualizer/znvis_visualizer.py | 5 +++++ setup.py | 1 + 125 files changed, 479 insertions(+), 12 deletions(-) diff --git a/CI/functional_tests/test_molten_salts.py b/CI/functional_tests/test_molten_salts.py index 6d30a73c..9d27e630 100644 --- a/CI/functional_tests/test_molten_salts.py +++ b/CI/functional_tests/test_molten_salts.py @@ -25,6 +25,7 @@ ------- Perform a functional test on two molten salts. """ + from typing import Tuple import pytest @@ -73,6 +74,7 @@ def mdsuite_project(traj_files, tmp_path) -> mds.Project: ------- project: mdsuite.Project An MDSuite project to be tested. + """ project = mds.Project(storage_path=tmp_path.as_posix()) @@ -114,6 +116,7 @@ def test_analysis(mdsuite_project): ----- See the link below for similar data for CNs for molten salts. https://link.springer.com/article/10.1007/s10800-018-1197-z + """ NaCl_experiment = mdsuite_project.experiments.NaCl KCl_experiment = mdsuite_project.experiments.KCl diff --git a/CI/functional_tests/test_water_study.py b/CI/functional_tests/test_water_study.py index f289581f..60b1f8b4 100644 --- a/CI/functional_tests/test_water_study.py +++ b/CI/functional_tests/test_water_study.py @@ -25,6 +25,7 @@ ------- Functional test for the analysis of a GROMACS water simulation. """ + from typing import List import pytest @@ -65,6 +66,7 @@ def mdsuite_project(traj_files, tmp_path) -> mdsuite.Project: ------- project: mdsuite.Project An MDSuite project to be tested. + """ gmx_units = Units( time=1e-12, @@ -102,6 +104,7 @@ def test_water_analysis(mdsuite_project): ----- The diffusion, angle, and eventually coordination data tested here are comparable with values taken from experiment and published studies. + """ water = mdsuite_project.experiments["water_sim"] diff --git a/CI/integration_tests/calculators/__test_structure_factor.py b/CI/integration_tests/calculators/__test_structure_factor.py index 80edfbf3..e705bc91 100644 --- a/CI/integration_tests/calculators/__test_structure_factor.py +++ b/CI/integration_tests/calculators/__test_structure_factor.py @@ -24,6 +24,7 @@ Summary ------- """ + import json import os from pathlib import Path diff --git a/CI/integration_tests/calculators/_test_green_kubo_thermal_conductivity.py b/CI/integration_tests/calculators/_test_green_kubo_thermal_conductivity.py index 75cc696e..792ca3c0 100644 --- a/CI/integration_tests/calculators/_test_green_kubo_thermal_conductivity.py +++ b/CI/integration_tests/calculators/_test_green_kubo_thermal_conductivity.py @@ -24,6 +24,7 @@ Summary ------- """ + import json import os from pathlib import Path diff --git a/CI/integration_tests/calculators/_test_green_kubo_viscosity.py b/CI/integration_tests/calculators/_test_green_kubo_viscosity.py index 058dcfb8..2bc74440 100644 --- a/CI/integration_tests/calculators/_test_green_kubo_viscosity.py +++ b/CI/integration_tests/calculators/_test_green_kubo_viscosity.py @@ -24,6 +24,7 @@ Summary ------- """ + import json import os from pathlib import Path diff --git a/CI/integration_tests/calculators/_test_nernst_einstein_ionic_conductivity.py b/CI/integration_tests/calculators/_test_nernst_einstein_ionic_conductivity.py index 4dca532f..36877c73 100644 --- a/CI/integration_tests/calculators/_test_nernst_einstein_ionic_conductivity.py +++ b/CI/integration_tests/calculators/_test_nernst_einstein_ionic_conductivity.py @@ -24,6 +24,7 @@ Summary ------- """ + import json import os from pathlib import Path diff --git a/CI/integration_tests/calculators/test_angular_distribution_function.py b/CI/integration_tests/calculators/test_angular_distribution_function.py index 85903312..47f48551 100644 --- a/CI/integration_tests/calculators/test_angular_distribution_function.py +++ b/CI/integration_tests/calculators/test_angular_distribution_function.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import pytest diff --git a/CI/integration_tests/calculators/test_coordination_numbers.py b/CI/integration_tests/calculators/test_coordination_numbers.py index 231cb4c7..d904a9a3 100644 --- a/CI/integration_tests/calculators/test_coordination_numbers.py +++ b/CI/integration_tests/calculators/test_coordination_numbers.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import pytest diff --git a/CI/integration_tests/calculators/test_einstein_diffusion_coefficients.py b/CI/integration_tests/calculators/test_einstein_diffusion_coefficients.py index 24d67c08..f40d82b1 100644 --- a/CI/integration_tests/calculators/test_einstein_diffusion_coefficients.py +++ b/CI/integration_tests/calculators/test_einstein_diffusion_coefficients.py @@ -24,6 +24,7 @@ Summary ------- """ + import dataclasses import os diff --git a/CI/integration_tests/calculators/test_einstein_distinct_diffusion_coefficients.py b/CI/integration_tests/calculators/test_einstein_distinct_diffusion_coefficients.py index ac2c80f2..e8c24397 100644 --- a/CI/integration_tests/calculators/test_einstein_distinct_diffusion_coefficients.py +++ b/CI/integration_tests/calculators/test_einstein_distinct_diffusion_coefficients.py @@ -31,6 +31,7 @@ values. """ + import os import pytest diff --git a/CI/integration_tests/calculators/test_einstein_helfand_ionic_conductivity.py b/CI/integration_tests/calculators/test_einstein_helfand_ionic_conductivity.py index 29b6689b..df32c116 100644 --- a/CI/integration_tests/calculators/test_einstein_helfand_ionic_conductivity.py +++ b/CI/integration_tests/calculators/test_einstein_helfand_ionic_conductivity.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import pytest @@ -61,6 +62,7 @@ def test_project(traj_file, true_values, tmp_path, desired_memory): Notes ----- Test uncertainty is very high! + """ with mds.utils.helpers.change_memory_fraction(desired_memory=desired_memory): os.chdir(tmp_path) diff --git a/CI/integration_tests/calculators/test_green_kubo_distinct_diffusion_coefficients.py b/CI/integration_tests/calculators/test_green_kubo_distinct_diffusion_coefficients.py index 6bff4151..5a2aae30 100644 --- a/CI/integration_tests/calculators/test_green_kubo_distinct_diffusion_coefficients.py +++ b/CI/integration_tests/calculators/test_green_kubo_distinct_diffusion_coefficients.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import pytest diff --git a/CI/integration_tests/calculators/test_green_kubo_ionic_conductivity.py b/CI/integration_tests/calculators/test_green_kubo_ionic_conductivity.py index e7b86513..4bb4ca29 100644 --- a/CI/integration_tests/calculators/test_green_kubo_ionic_conductivity.py +++ b/CI/integration_tests/calculators/test_green_kubo_ionic_conductivity.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import numpy as np diff --git a/CI/integration_tests/calculators/test_green_kubo_self_diffusion_coefficients.py b/CI/integration_tests/calculators/test_green_kubo_self_diffusion_coefficients.py index c9b6c1a2..25b857d4 100644 --- a/CI/integration_tests/calculators/test_green_kubo_self_diffusion_coefficients.py +++ b/CI/integration_tests/calculators/test_green_kubo_self_diffusion_coefficients.py @@ -24,6 +24,7 @@ Summary ------- """ + import dataclasses import os @@ -103,9 +104,7 @@ def test_calculator(tmp_path, desired_memory): time_should_be = time_step * np.arange(0, vacf_range) * units.time thermal_vel_SI = np.sqrt(3 * kT / mass) * units.length / units.time relaxation_time_SI = relaxation_time * units.time - vacf_should_be = thermal_vel_SI**2 * np.exp( - -time_should_be / relaxation_time_SI - ) + vacf_should_be = thermal_vel_SI**2 * np.exp(-time_should_be / relaxation_time_SI) diff_coeff_should_be = diff_coeff * units.length**2 / units.time np.testing.assert_allclose(res["time"], time_should_be, atol=1e-6) diff --git a/CI/integration_tests/calculators/test_kirkwood_buff_integrals.py b/CI/integration_tests/calculators/test_kirkwood_buff_integrals.py index 6aa275e3..7225b7ca 100644 --- a/CI/integration_tests/calculators/test_kirkwood_buff_integrals.py +++ b/CI/integration_tests/calculators/test_kirkwood_buff_integrals.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import pytest diff --git a/CI/integration_tests/calculators/test_potential_of_mean_force.py b/CI/integration_tests/calculators/test_potential_of_mean_force.py index f1799656..014e0d00 100644 --- a/CI/integration_tests/calculators/test_potential_of_mean_force.py +++ b/CI/integration_tests/calculators/test_potential_of_mean_force.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import pytest diff --git a/CI/integration_tests/calculators/test_radial_distribution_function.py b/CI/integration_tests/calculators/test_radial_distribution_function.py index 787c3c3a..db1e8c23 100644 --- a/CI/integration_tests/calculators/test_radial_distribution_function.py +++ b/CI/integration_tests/calculators/test_radial_distribution_function.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import pytest diff --git a/CI/integration_tests/transformations/test_molecular_mapping_results.py b/CI/integration_tests/transformations/test_molecular_mapping_results.py index 8e76ee68..d93f4fb3 100644 --- a/CI/integration_tests/transformations/test_molecular_mapping_results.py +++ b/CI/integration_tests/transformations/test_molecular_mapping_results.py @@ -25,6 +25,7 @@ ------- Test the outcome of molecular mapping. """ + from typing import List, Tuple import pytest @@ -73,6 +74,7 @@ def mdsuite_project(traj_files, tmp_path) -> mdsuite.Project: ------- project: mdsuite.Project An MDSuite project to be tested. + """ water_files = traj_files[0] bmim_file = traj_files[1] @@ -138,6 +140,7 @@ def test_water_molecule_smiles(self, mdsuite_project): ------- Tests that the molecule groups detected are done so correctly and that the constructed trajectory is also correct. + """ reference_molecules = { "water": MoleculeInfo( @@ -187,6 +190,7 @@ def test_water_molecule_reference_dict(self, mdsuite_project): ------- Tests that the molecule groups detected are done so correctly and that the constructed trajectory is also correct. + """ mdsuite_project.experiments["ligand_water"].species["OW"].mass = [15.999] mdsuite_project.experiments["ligand_water"].species["HW1"].mass = [1.00784] diff --git a/CI/integration_tests/transformations/test_transformation_run_options.py b/CI/integration_tests/transformations/test_transformation_run_options.py index 55654ee2..cb08adf3 100644 --- a/CI/integration_tests/transformations/test_transformation_run_options.py +++ b/CI/integration_tests/transformations/test_transformation_run_options.py @@ -45,6 +45,7 @@ def test_from_project(mdsuite_project): Notes ----- Does not check actual values just runs the transformation. + """ mdsuite_project.run.CoordinateUnwrapper() @@ -57,6 +58,7 @@ def test_from_project_twice(mdsuite_project): Notes ----- Does not check actual values just runs the transformation. + """ mdsuite_project.run.CoordinateUnwrapper() mdsuite_project.run.CoordinateUnwrapper() @@ -69,6 +71,7 @@ def test_from_experiment(mdsuite_project): Notes ----- Does not check actual values just runs the transformation. + """ mdsuite_project.experiments.NaCl.run.CoordinateUnwrapper() @@ -81,6 +84,7 @@ def test_from_experiment_twice(mdsuite_project): Notes ----- Does not check actual values just runs the transformation. + """ mdsuite_project.experiments.NaCl.run.CoordinateUnwrapper() mdsuite_project.experiments.NaCl.run.CoordinateUnwrapper() diff --git a/CI/integration_tests/visualizer/znvis_visualizer.py b/CI/integration_tests/visualizer/znvis_visualizer.py index 16c1f808..a2acec83 100644 --- a/CI/integration_tests/visualizer/znvis_visualizer.py +++ b/CI/integration_tests/visualizer/znvis_visualizer.py @@ -25,6 +25,7 @@ ------- Test that the visualizer runs. """ + import os import tempfile import time @@ -54,6 +55,7 @@ def test_run(self): test isn't prematurely closed and passes when it should have failed. For now 120 seconds is more than enough time for this test but we can think of some improvements later. + """ process = MDSuiteProcess(target=self._run_app) process.start() diff --git a/CI/unit_tests/database/test_experiment_database.py b/CI/unit_tests/database/test_experiment_database.py index a830ce62..f6f8516e 100644 --- a/CI/unit_tests/database/test_experiment_database.py +++ b/CI/unit_tests/database/test_experiment_database.py @@ -24,6 +24,7 @@ Summary ------- """ + import dataclasses import os diff --git a/CI/unit_tests/database/test_file_read.py b/CI/unit_tests/database/test_file_read.py index 53afd57d..eeba9173 100644 --- a/CI/unit_tests/database/test_file_read.py +++ b/CI/unit_tests/database/test_file_read.py @@ -1,4 +1,5 @@ """Test MDSuite file reading.""" + import numpy as np import mdsuite diff --git a/CI/unit_tests/database/test_simulation_database.py b/CI/unit_tests/database/test_simulation_database.py index 8c0d7043..c827e682 100644 --- a/CI/unit_tests/database/test_simulation_database.py +++ b/CI/unit_tests/database/test_simulation_database.py @@ -23,6 +23,7 @@ ------- Test for module for the simulation database. """ + import os import tempfile import unittest @@ -43,6 +44,7 @@ def test_build_path_input(self): Returns ------- Asserts that the correct path is generated for a given input. + """ temp_dir = tempfile.TemporaryDirectory() os.chdir(temp_dir.name) @@ -70,6 +72,7 @@ def test_add_dataset(self): Returns ------- Assert that a dataset of the correct size is built. + """ temp_dir = tempfile.TemporaryDirectory() os.chdir(temp_dir.name) @@ -93,6 +96,7 @@ def test_resize_array(self): Returns ------- Resizes a built dataset and checks that the size is now correct. + """ temp_dir = tempfile.TemporaryDirectory() os.chdir(temp_dir.name) @@ -118,6 +122,7 @@ def test_database_exists(self): Returns ------- Checks for a False and then True result. + """ temp_dir = tempfile.TemporaryDirectory() os.chdir(temp_dir.name) @@ -135,6 +140,7 @@ def test_check_existence(self): Returns ------- Checks for a True and False result. + """ temp_dir = tempfile.TemporaryDirectory() os.chdir(temp_dir.name) diff --git a/CI/unit_tests/experiment/test_Experiment.py b/CI/unit_tests/experiment/test_Experiment.py index 991247b1..e484ddfb 100644 --- a/CI/unit_tests/experiment/test_Experiment.py +++ b/CI/unit_tests/experiment/test_Experiment.py @@ -1,4 +1,5 @@ """Test MDSuite Experiment class.""" + import pytest from mdsuite.experiment.experiment import Experiment diff --git a/CI/unit_tests/experiment/test_run_computation.py b/CI/unit_tests/experiment/test_run_computation.py index 7d648498..d68c8124 100644 --- a/CI/unit_tests/experiment/test_run_computation.py +++ b/CI/unit_tests/experiment/test_run_computation.py @@ -9,6 +9,7 @@ Description: """ + from unittest.mock import Mock from mdsuite.experiment.run import RunComputation diff --git a/CI/unit_tests/graph_modules/test_molecular_graph.py b/CI/unit_tests/graph_modules/test_molecular_graph.py index 2c4fefa6..dbbc9e25 100644 --- a/CI/unit_tests/graph_modules/test_molecular_graph.py +++ b/CI/unit_tests/graph_modules/test_molecular_graph.py @@ -25,6 +25,7 @@ ------- Module to test the molecular graph module. """ + from dataclasses import dataclass from pathlib import Path @@ -54,6 +55,7 @@ class SmilesTestData: species : dict A dictionary of species information for the test stating how many of each particle species is in the group e.g. {'C': 6, 'H': 14} + """ name: str @@ -78,6 +80,7 @@ def test_apply_system_cutoff(self): Returns ------- Checks whether or not the cutoff has been enforced. + """ zeros = np.array([0, 0, 0, 0, 0]) cutoff_data = [ @@ -123,6 +126,7 @@ def test_build_smiles_graph(self): ------- This test checks that the SMILES graphs built by the module return the correct molecule information for several scenarios. + """ emim = SmilesTestData( name="emim", diff --git a/CI/unit_tests/memory_manager/test_memory_manager.py b/CI/unit_tests/memory_manager/test_memory_manager.py index b89ad00a..22f6450f 100644 --- a/CI/unit_tests/memory_manager/test_memory_manager.py +++ b/CI/unit_tests/memory_manager/test_memory_manager.py @@ -19,6 +19,7 @@ ------- Test for the memory manager module. """ + import unittest import numpy as np @@ -68,6 +69,7 @@ def test_linear_scale_function(self): ----- Each test will check that the function parameters are return correctly and that the function is called correctly and returns proper values. + """ # Test linear function scale_function = {"linear": {"scale_factor": 2}} @@ -83,6 +85,7 @@ def test_log_linear_scale_function(self): ----- Each test will check that the function parameters are return correctly and that the function is called correctly and returns proper values. + """ # Test log-linear function scale_function = {"log-linear": {"scale_factor": 2}} @@ -98,6 +101,7 @@ def test_quadratic_scale_function(self): ----- Each test will check that the function parameters are return correctly and that the function is called correctly and returns proper values. + """ # Test quadratic function scale_function = {"quadratic": {"inner_scale_factor": 2, "outer_scale_factor": 2}} @@ -114,6 +118,7 @@ def test_polynomial_scale_function(self): ----- Each test will check that the function parameters are return correctly and that the function is called correctly and returns proper values. + """ # Test polynomial function scale_function = { @@ -164,6 +169,7 @@ def test_hdf5_load_time(self): Returns ------- Tests that the method returns the correct load time. + """ data = self.memory_manager.hdf5_load_time(10) self.assertEqual(data, np.log(10)) @@ -176,6 +182,7 @@ def test_get_optimal_batch_size(self): ------- Test that this method returns the expected value. Currently this is just the same value that is passed to it. + """ data = self.memory_manager._get_optimal_batch_size(10) self.assertEqual(data, data) # Todo: no shit, sherlock @@ -188,6 +195,7 @@ def test_compute_atomwise_minibatch(self): ------- Test the atom wise minibatch method. The test ensures for only a single case that the correct numbers are returned. + """ self.memory_manager.database = TestDatabase() self.memory_manager.data_path = ["Test/Path"] diff --git a/CI/unit_tests/project/test_project_add_experiment.py b/CI/unit_tests/project/test_project_add_experiment.py index 795a1d1d..e9d5b67b 100644 --- a/CI/unit_tests/project/test_project_add_experiment.py +++ b/CI/unit_tests/project/test_project_add_experiment.py @@ -24,6 +24,7 @@ Summary ------- """ + import os import pathlib diff --git a/CI/unit_tests/project/test_project_database.py b/CI/unit_tests/project/test_project_database.py index 950a1a12..ff2870c9 100644 --- a/CI/unit_tests/project/test_project_database.py +++ b/CI/unit_tests/project/test_project_database.py @@ -24,6 +24,7 @@ Summary ------- """ + import os from pathlib import Path from tempfile import TemporaryDirectory diff --git a/CI/unit_tests/project/test_project_instantiation.py b/CI/unit_tests/project/test_project_instantiation.py index cb3c8462..dfc9af82 100644 --- a/CI/unit_tests/project/test_project_instantiation.py +++ b/CI/unit_tests/project/test_project_instantiation.py @@ -9,6 +9,7 @@ Description: """ + import pathlib import mdsuite diff --git a/CI/unit_tests/project/test_project_load_experiments.py b/CI/unit_tests/project/test_project_load_experiments.py index 7b857645..c90f113c 100644 --- a/CI/unit_tests/project/test_project_load_experiments.py +++ b/CI/unit_tests/project/test_project_load_experiments.py @@ -24,6 +24,7 @@ Summary ------- """ + import os from tempfile import TemporaryDirectory diff --git a/CI/unit_tests/transformations/test_transformator_parent.py b/CI/unit_tests/transformations/test_transformator_parent.py index 4c901eda..bbd59cbf 100644 --- a/CI/unit_tests/transformations/test_transformator_parent.py +++ b/CI/unit_tests/transformations/test_transformator_parent.py @@ -21,6 +21,7 @@ -------- If you use this module please cite us with: """ + import os import numpy as np diff --git a/CI/unit_tests/utils/test_calculator_helper_methods.py b/CI/unit_tests/utils/test_calculator_helper_methods.py index 07cb6f44..05b2a319 100644 --- a/CI/unit_tests/utils/test_calculator_helper_methods.py +++ b/CI/unit_tests/utils/test_calculator_helper_methods.py @@ -25,6 +25,7 @@ ------- Module for testing the calculator helper methods. """ + import numpy as np import pytest from numpy.testing import assert_array_equal, assert_raises @@ -49,6 +50,7 @@ def test_fit_einstein_curve(self): * Returns correct gradient on a straight line * Returns correct gradient on a multi-regime line + """ x_data = np.linspace(0, 1000, 1000) @@ -79,6 +81,7 @@ def test_correlate(self): The first signal is auto-correlated, the second is perfectly anti-correlated. Therefore, when summed, they should cancel to zero. + """ # generate 10 points t = np.arange(10) @@ -111,6 +114,7 @@ def test_msd_operation(self): The first signal is auto-correlated, the second is perfectly anti-correlated. Therefore, when summed, they should cancel to zero. + """ # generate 10 points t = np.arange(10) diff --git a/CI/unit_tests/utils/test_constants.py b/CI/unit_tests/utils/test_constants.py index e74bcbb7..b6a18fd1 100644 --- a/CI/unit_tests/utils/test_constants.py +++ b/CI/unit_tests/utils/test_constants.py @@ -1,4 +1,5 @@ """Test for MDSuite utils.constants.""" + import dataclasses import pytest diff --git a/CI/unit_tests/utils/test_meta_functions.py b/CI/unit_tests/utils/test_meta_functions.py index 226d3bbe..f7470afc 100644 --- a/CI/unit_tests/utils/test_meta_functions.py +++ b/CI/unit_tests/utils/test_meta_functions.py @@ -25,6 +25,7 @@ ------- Test the meta functions module. """ + import os import numpy as np @@ -57,6 +58,7 @@ def test_join_path(self): Returns ------- assert that join_path('a', 'b') is 'a/b' + """ assert join_path("a", "b") == "a/b" @@ -67,6 +69,7 @@ def test_get_dimensionality(self): Returns ------- assert that for all choices of dimension array the correct dimension comes out. + """ one_d = [1, 0, 0] two_d = [1, 1, 0] @@ -82,6 +85,7 @@ def test_get_machine_properties(self): Returns ------- This test will just run the method and check for a failure. + """ get_machine_properties() @@ -92,6 +96,7 @@ def test_line_counter(self): Returns ------- Check that the correct number of lines is return for a test file. + """ data = [["ayy"], ["bee"], ["cee"], ["dee"]] name = "line_counter_test.txt" @@ -108,6 +113,7 @@ def test_optimize_batch_size(self): Returns ------- assert the correct batch size is returned for several inputs. + """ # Assert that the batch number is the full trajectory. number_of_configurations = 10 @@ -134,6 +140,7 @@ def test_linear_fitting_function(self): Returns ------- Assert the correction function values come out. + """ a = 5 b = 3 @@ -148,6 +155,7 @@ def test_simple_file_read(self): Returns ------- Assert that the arrays read in are as expected. + """ data = [["ayy"], ["bee"], ["cee"], ["dee"]] name = "line_counter_test.txt" @@ -164,6 +172,7 @@ def test_golden_section_search(self): Returns ------- Asserts that the correct minimum is found. + """ def func(x: np.ndarray): @@ -177,6 +186,7 @@ def func(x: np.ndarray): Returns ------- x**2 + """ return x**2 @@ -193,6 +203,7 @@ def test_round_down(self): Returns ------- Assert the correct rounding occurs. + """ b = 10 a = 9 @@ -206,6 +217,7 @@ def test_split_arrays(self): Returns ------- assert that array splitting has been performed correctly. + """ a = np.array([1, 2, 3, 10, 20, 30]) assert np.array_equal( @@ -219,6 +231,7 @@ def test_find_item(self): Returns ------- assert that a deep item is retrieved from a dictionary. + """ test_1 = {"a": 4} # test the first if statement test_2 = { @@ -246,6 +259,7 @@ def test_check_a_in_b(self): * If a is in b a True is returned * If a is not in b a False is returned. + """ b = tf.constant([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) a = tf.constant([4, 5, 6]) diff --git a/CI/unit_tests/utils/test_molecule_class.py b/CI/unit_tests/utils/test_molecule_class.py index 79430ce3..a0d48be2 100644 --- a/CI/unit_tests/utils/test_molecule_class.py +++ b/CI/unit_tests/utils/test_molecule_class.py @@ -25,6 +25,7 @@ ------- Unit tests for the molecule data class. """ + import pytest import mdsuite @@ -42,6 +43,7 @@ def test_instantiation(): * Fail when essential data is not provided. * Store the correct information when provided. * Use the correct defaults. + """ with pytest.raises(TypeError): Molecule() diff --git a/CI/unit_tests/utils/test_scaling_functions.py b/CI/unit_tests/utils/test_scaling_functions.py index 6d00fb03..ce9ec221 100644 --- a/CI/unit_tests/utils/test_scaling_functions.py +++ b/CI/unit_tests/utils/test_scaling_functions.py @@ -20,6 +20,7 @@ ------- Unit tests for the scaling functions module. """ + import unittest import numpy as np @@ -42,6 +43,7 @@ def test_linear_scaling_function(self): Returns ------- assert that the output is correct for several cases. + """ # Assert simple multiplication data = linear_scale_function(10, 1) @@ -62,6 +64,7 @@ def test_linearithmic_scaling_function(self): Returns ------- assert that the output is correct for several cases. + """ # Assert simple multiplication data = linearithmic_scale_function(10, 1) @@ -82,6 +85,7 @@ def test_quadratic_scaling_function(self): Returns ------- assert that the output is correct for several cases. + """ # Assert simple multiplication data = quadratic_scale_function(10, 1, 1) @@ -106,6 +110,7 @@ def test_polynomial_scaling_function(self): Returns ------- assert that the output is correct for several cases. + """ # Repeat quadratic test with poly api data = polynomial_scale_function(10, 1, 1, 2) diff --git a/CI/unit_tests/utils/test_testing.py b/CI/unit_tests/utils/test_testing.py index b96b3e75..0aa6e093 100644 --- a/CI/unit_tests/utils/test_testing.py +++ b/CI/unit_tests/utils/test_testing.py @@ -25,6 +25,7 @@ ------- Test the mdsuite testing modules. """ + import time import unittest @@ -101,6 +102,7 @@ def test_almost_equal(self): Notes ----- Taken from the module __main__ + """ dict_2a = {"a": {"b": np.array([1, 2, 3, 4])}} dict_2b = {"a": {"b": [1, 2, 3, 4]}} diff --git a/CI/unit_tests/utils/test_units.py b/CI/unit_tests/utils/test_units.py index 688131a5..d02f7192 100644 --- a/CI/unit_tests/utils/test_units.py +++ b/CI/unit_tests/utils/test_units.py @@ -1,4 +1,5 @@ """Test MDSuite units.""" + import mdsuite as mds diff --git a/docs/source/conf.py b/docs/source/conf.py index 26714572..a25843b7 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -24,6 +24,7 @@ Summary ------- """ + import sys import sphinx_rtd_theme diff --git a/examples/notebooks/Mapping_Molecules.ipynb b/examples/notebooks/Mapping_Molecules.ipynb index a9fa7b74..926d9a84 100644 --- a/examples/notebooks/Mapping_Molecules.ipynb +++ b/examples/notebooks/Mapping_Molecules.ipynb @@ -29,11 +29,7 @@ "import mdsuite.file_io.chemfiles_read\n", "from mdsuite.utils import Units\n", "\n", - "from zinchub import DataHub\n", - "import shutil\n", - "\n", - "import h5py as hf\n", - "import numpy as np" + "from zinchub import DataHub\n" ] }, { diff --git a/examples/notebooks/Molten_Salt_Comparison.ipynb b/examples/notebooks/Molten_Salt_Comparison.ipynb index fd62436d..e06e6c5c 100644 --- a/examples/notebooks/Molten_Salt_Comparison.ipynb +++ b/examples/notebooks/Molten_Salt_Comparison.ipynb @@ -21,10 +21,7 @@ "source": [ "import mdsuite as mds\n", "import matplotlib.pyplot as plt\n", - "from zinchub import DataHub\n", - "import tensorflow as tf\n", - "import numpy as np\n", - "from scipy.integrate import cumtrapz" + "from zinchub import DataHub" ] }, { diff --git a/mdsuite/__init__.py b/mdsuite/__init__.py index 54b42ce0..60089182 100644 --- a/mdsuite/__init__.py +++ b/mdsuite/__init__.py @@ -24,6 +24,7 @@ Summary ------- """ + try: from importlib import metadata except ImportError: # for Python<3.8 diff --git a/mdsuite/calculators/__init__.py b/mdsuite/calculators/__init__.py index f9987948..09c3cc4d 100644 --- a/mdsuite/calculators/__init__.py +++ b/mdsuite/calculators/__init__.py @@ -24,6 +24,7 @@ Summary ------- """ + from __future__ import annotations from mdsuite.calculators.angular_distribution_function import ( diff --git a/mdsuite/calculators/angular_distribution_function.py b/mdsuite/calculators/angular_distribution_function.py index 4b6c91a3..73914bd9 100644 --- a/mdsuite/calculators/angular_distribution_function.py +++ b/mdsuite/calculators/angular_distribution_function.py @@ -27,6 +27,7 @@ describes the average distribution of angles between three particles of species a, b, and c. Note that a, b, and c may all be the same species, e.g. Na-Na-Na. """ + import itertools import logging from abc import ABC @@ -108,6 +109,7 @@ class AngularDistributionFunction(TrajectoryCalculator, ABC): stop = 200, bins = 100, use_tf_function = False) + """ def __init__(self, **kwargs): @@ -118,6 +120,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment object from which to take attributes. + """ super().__init__(**kwargs) self.scale_function = {"quadratic": {"outer_scale_factor": 10}} @@ -191,6 +194,7 @@ def __call__( if true, perform the analysis on molecules. plot : bool If true, plot the result of the analysis. + """ # set args that will affect the computation result self.args = Args( @@ -224,6 +228,7 @@ def check_input(self): Returns ------- Updates the class attributes. + """ self._run_dependency_check() if self.args.stop is None: @@ -253,6 +258,7 @@ def _compute_number_of_atoms(self, reference: dict): Returns ------- Updates the number of atoms attribute. + """ # TODO return to dotdict form when molecules is a dotdict number_of_atoms = 0 @@ -376,6 +382,7 @@ def _compute_angles(species, r_ijk_indices): ------- condition name + """ (i_name, i_min, i_max), (j_name, j_min, j_max), (k_name, k_min, k_max) = species name = f"{i_name}-{j_name}-{k_name}" @@ -404,6 +411,7 @@ def _build_histograms(self, positions, species_indices, angles): ------- angles : dict A dictionary of the triples references and their histogram values. + """ tmp = tf.transpose(tf.concat(positions, axis=0), (1, 0, 2)) @@ -449,6 +457,7 @@ def _compute_adfs(self, angles, species_indices): Returns ------- Updates the class, the SQL database, and plots values if required. + """ for species in itertools.combinations_with_replacement(species_indices, 3): name = f"{species[0][0]}-{species[1][0]}-{species[2][0]}" @@ -526,6 +535,7 @@ def _correct_batch_properties(self): Returns ------- Updates the parent class. + """ if self.batch_size > self.args.number_of_configurations: self.batch_size = self.args.number_of_configurations diff --git a/mdsuite/calculators/calculator.py b/mdsuite/calculators/calculator.py index 77aba53a..76d786c6 100644 --- a/mdsuite/calculators/calculator.py +++ b/mdsuite/calculators/calculator.py @@ -25,6 +25,7 @@ ------- Parent class for the calculators. """ + from __future__ import annotations import functools @@ -104,6 +105,7 @@ def inner(self, *args, **kwargs) -> Union[db.Computation, Dict[str, db.Computati data: A dictionary of shape {name: data} when called from the project class A list of [data] when called directly from the experiment class + """ # This is only true, when called via project.experiments..run, # otherwise the experiment will be None @@ -191,6 +193,7 @@ class Calculator(CalculatorDatabase): plot_array : list A list of plot objects to be show together at the end of the species loop. + """ def __init__( @@ -205,6 +208,7 @@ def __init__( Experiment for which the calculator will be run. experiments : List[Experiment] List of experiments on which to run the calculator. + """ # Set upon instantiation of parent class super().__init__(experiment) @@ -266,6 +270,7 @@ def run_visualization( Returns ------- Updates the plot array with a Bokeh plot object. + """ self.plot_array.append( self.plotter.construct_plot( @@ -296,6 +301,7 @@ def plot_data(self, data): ---------- data: db.Compution.data_dict associated with the current project + """ for selected_species, val in data.items(): self.run_visualization( diff --git a/mdsuite/calculators/coordination_number_calculation.py b/mdsuite/calculators/coordination_number_calculation.py index 07c84125..2be2b559 100644 --- a/mdsuite/calculators/coordination_number_calculation.py +++ b/mdsuite/calculators/coordination_number_calculation.py @@ -25,6 +25,7 @@ ------- Module to compute coodination numbers. """ + import logging from dataclasses import dataclass @@ -73,6 +74,7 @@ def _integrate_rdf(radii_data: np.array, rdf_data: np.array, density: float) -> ------- integral_data : np.array Cumulative integral of the RDF scaled by the radius and denisty. + """ integral_data = cumtrapz(y=radii_data[1:] ** 2 * rdf_data[1:], x=radii_data[1:]) @@ -134,6 +136,7 @@ def __init__(self, **kwargs): ---------- experiment : class object Class object of the experiment. + """ super().__init__(**kwargs) self.file_to_study = None @@ -176,6 +179,7 @@ def __call__( Window length of the savgol filter. number_of_shells : int Number of shells to look for. + """ if isinstance(rdf_data, Computation): self.rdf_data = rdf_data @@ -210,6 +214,7 @@ def _compute_nm_volume(self): Returns ------- Updates the volume attribute of the class. + """ volume_si = self.experiment.volume * self.experiment.units.volume @@ -241,6 +246,7 @@ def _get_rdf_peaks(self, rdf: np.ndarray) -> np.ndarray: ------ ValueError Raised if the number of peaks required for the analysis are not met. + """ filtered_data = apply_savgol_filter( rdf, @@ -277,6 +283,7 @@ def _find_minima(self, radii: np.ndarray, rdf: np.ndarray) -> dict: ------- coordination_shells : dict A dictionary of coordination shell radial ranges. + """ peaks = self._get_rdf_peaks(rdf) # get the max value indices @@ -311,6 +318,7 @@ def _get_coordination_numbers( ------- coordination_numbers : dict A dictionary of coordination numbers. + """ coordination_shells = self._find_minima(radii, rdf) # get the minimums diff --git a/mdsuite/calculators/einstein_diffusion_coefficients.py b/mdsuite/calculators/einstein_diffusion_coefficients.py index c51efae7..811930de 100644 --- a/mdsuite/calculators/einstein_diffusion_coefficients.py +++ b/mdsuite/calculators/einstein_diffusion_coefficients.py @@ -25,6 +25,7 @@ ------- Module for the computation of self-diffusion coefficients using the Einstein method. """ + from __future__ import annotations import logging @@ -79,6 +80,7 @@ class EinsteinDiffusionCoefficients(TrajectoryCalculator, ABC): project.experiment.run.EinsteinDiffusionCoefficients(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -89,6 +91,7 @@ def __init__(self, **kwargs): Experiment class to call from experiments : Experiment Experiment classes to call from + """ super().__init__(**kwargs) self.scale_function = {"linear": {"scale_factor": 150}} @@ -143,6 +146,7 @@ def __call__( Returns ------- None + """ if species is None: if molecules: @@ -177,6 +181,7 @@ def ensemble_operation(self, ensemble): Returns ------- MSD of the tensor_values. + """ msd = tf.math.squared_difference( tf.gather(ensemble, self.args.tau_values, axis=1), ensemble[:, None, 0] diff --git a/mdsuite/calculators/einstein_distinct_diffusion_coefficients.py b/mdsuite/calculators/einstein_distinct_diffusion_coefficients.py index bbb8bdb9..5ce871af 100644 --- a/mdsuite/calculators/einstein_distinct_diffusion_coefficients.py +++ b/mdsuite/calculators/einstein_distinct_diffusion_coefficients.py @@ -25,6 +25,7 @@ ------- Module for computing distinct diffusion coefficients using the Einstein method. """ + import itertools import warnings from dataclasses import dataclass @@ -85,6 +86,7 @@ class EinsteinDistinctDiffusionCoefficients(TrajectoryCalculator): experiment.run_computation.EinsteinDistinctDiffusionCoefficients(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -95,6 +97,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ super().__init__(**kwargs) @@ -199,6 +202,7 @@ def ref_conf_map(ref_dataset, full_ds): Returns ------- + """ def test_conf_map(test_dataset): @@ -210,6 +214,7 @@ def test_conf_map(test_dataset): test_dataset Returns -------. + """ return msd_operation(ref_dataset, test_dataset) @@ -230,6 +235,7 @@ def _compute_self_correlation(self, ds_a, ds_b): Returns ------- + """ atomwise_vmap = jax.vmap(msd_operation, in_axes=0) @@ -251,6 +257,7 @@ def _compute_msd(self, data: dict, data_path: list, combination: tuple): Returns ------- updates the class state + """ msd_array = self._map_over_particles( data[data_path[0]].numpy(), data[data_path[1]].numpy() @@ -271,6 +278,7 @@ def _apply_averaging_factor(self): Returns ------- averaged copy of the tensor_values. + """ self.msd_array /= int(self.n_batches) * self.ensemble_loop diff --git a/mdsuite/calculators/einstein_helfand_ionic_conductivity.py b/mdsuite/calculators/einstein_helfand_ionic_conductivity.py index 0d303f9c..35f0ec89 100644 --- a/mdsuite/calculators/einstein_helfand_ionic_conductivity.py +++ b/mdsuite/calculators/einstein_helfand_ionic_conductivity.py @@ -25,6 +25,7 @@ ------- MDSuite module for the computation of ionic conductivity using the Einstein method. """ + from abc import ABC from dataclasses import dataclass @@ -64,6 +65,7 @@ class EinsteinHelfandIonicConductivity(TrajectoryCalculator, ABC): experiment.run_computation.EinsteinHelfandTIonicConductivity(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -74,6 +76,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ # parse to the experiment class super().__init__(**kwargs) @@ -112,6 +115,7 @@ def __call__( Number of configurations to use in each ensemble correlation_time : int Correlation time to use in the analysis. + """ if fit_range == -1: fit_range = int(data_range - 1) @@ -173,6 +177,7 @@ def ensemble_operation(self, ensemble: tf.Tensor): Returns ------- MSD of the tensor_values. + """ msd = tf.math.squared_difference( tf.gather(ensemble, self.args.tau_values, axis=1), ensemble[:, 0, :] diff --git a/mdsuite/calculators/einstein_helfand_thermal_conductivity.py b/mdsuite/calculators/einstein_helfand_thermal_conductivity.py index ba24b9cd..8f15bf1b 100644 --- a/mdsuite/calculators/einstein_helfand_thermal_conductivity.py +++ b/mdsuite/calculators/einstein_helfand_thermal_conductivity.py @@ -25,6 +25,7 @@ ------- MDSuite module for the computation of thermal conductivity using the Einstein method. """ + from abc import ABC from dataclasses import dataclass @@ -76,6 +77,7 @@ class EinsteinHelfandThermalConductivity(TrajectoryCalculator, ABC): experiment.run.EinsteinHelfandTThermalConductivity(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -86,6 +88,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ # parse to the experiment class super().__init__(**kwargs) @@ -122,6 +125,7 @@ def __call__( Data range to use in the analysis. correlation_time : int Correlation time to use in the window sampling. + """ if fit_range == -1: fit_range = int(data_range - 1) @@ -193,6 +197,7 @@ def ensemble_operation(self, ensemble): Returns ------- MSD of the tensor_values. + """ msd = tf.math.squared_difference(ensemble, ensemble[None, 0]) diff --git a/mdsuite/calculators/einstein_helfand_thermal_kinaci.py b/mdsuite/calculators/einstein_helfand_thermal_kinaci.py index e63e0ebd..fa38d336 100644 --- a/mdsuite/calculators/einstein_helfand_thermal_kinaci.py +++ b/mdsuite/calculators/einstein_helfand_thermal_kinaci.py @@ -26,6 +26,7 @@ MDSuite module for the computation of the thermal conductivity in solids using the Einstein method as applied to the Kinaci integrated thermal flux. """ + from abc import ABC from dataclasses import dataclass @@ -88,6 +89,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ # parse to the experiment class super().__init__(**kwargs) @@ -126,6 +128,7 @@ def __call__( correlation_time : int Correlation time to use in the window sampling. + """ if fit_range == -1: fit_range = int(data_range - 1) @@ -198,6 +201,7 @@ def ensemble_operation(self, ensemble): Returns ------- MSD of the tensor_values. + """ msd = tf.math.squared_difference(ensemble, ensemble[None, 0]) diff --git a/mdsuite/calculators/green_kubo_distinct_diffusion_coefficients.py b/mdsuite/calculators/green_kubo_distinct_diffusion_coefficients.py index 31f56d22..76476eee 100644 --- a/mdsuite/calculators/green_kubo_distinct_diffusion_coefficients.py +++ b/mdsuite/calculators/green_kubo_distinct_diffusion_coefficients.py @@ -25,6 +25,7 @@ ------- Module for computing distinct diffusion coefficients using the Green-Kubo method. """ + import itertools from abc import ABC from dataclasses import dataclass @@ -58,6 +59,7 @@ class Args: class GreenKuboDistinctDiffusionCoefficients(TrajectoryCalculator, ABC): """ Class for the Green-Kubo diffusion coefficient implementation + Attributes ---------- experiment : object @@ -79,6 +81,7 @@ class GreenKuboDistinctDiffusionCoefficients(TrajectoryCalculator, ABC): -------- experiment.run_computation.GreenKuboDistinctDiffusionCoefficients(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -89,6 +92,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ super().__init__(**kwargs) @@ -142,6 +146,7 @@ def __call__( If true, export the data directly into a csv file. integration_range : int Range over which to perform the integration. + """ if integration_range is None: integration_range = data_range @@ -184,6 +189,7 @@ def _compute_self_correlation(self, ds_a, ds_b): Returns ------- -------. + """ atomwise_vmap = jax.vmap(correlate, in_axes=0) @@ -212,11 +218,13 @@ def _map_over_particles(self, ds_a: np.ndarray, ds_b: np.ndarray) -> np.ndarray: def ref_conf_map(ref_dataset, full_ds): """ Maps over the atoms axis in dataset + Parameters ---------- dataset Returns -------. + """ def test_conf_map(test_dataset): @@ -228,6 +236,7 @@ def test_conf_map(test_dataset): test_dataset Returns -------. + """ return correlate(ref_dataset, test_dataset) @@ -254,6 +263,7 @@ def ensemble_operation(self, data: dict, dict_ref: list, same_species: bool = Fa Returns ------- updates the class state + """ vacf = self._map_over_particles( data[dict_ref[0]].numpy(), data[dict_ref[1]].numpy() diff --git a/mdsuite/calculators/green_kubo_ionic_conductivity.py b/mdsuite/calculators/green_kubo_ionic_conductivity.py index 7447971e..e318dbcc 100644 --- a/mdsuite/calculators/green_kubo_ionic_conductivity.py +++ b/mdsuite/calculators/green_kubo_ionic_conductivity.py @@ -28,6 +28,7 @@ electrical charge due to the mobility of the ions contained within it. This differs from electronic conductivity which is transferred by electrons. """ + from abc import ABC from dataclasses import dataclass @@ -83,6 +84,7 @@ class GreenKuboIonicConductivity(TrajectoryCalculator, ABC): -------- experiment.run_computation.GreenKuboIonicConductivity(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -92,6 +94,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ # update experiment class super().__init__(**kwargs) @@ -131,6 +134,7 @@ def __call__( Correlation time to use in the window sampling. integration_range : int Range over which integration should be performed. + """ self.plot = plot self.jacf: np.ndarray @@ -197,6 +201,7 @@ def ensemble_operation(self, ensemble: tf.Tensor): Returns ------- ACF of the tensor_values. + """ ensemble = tf.gather(ensemble, self.args.tau_values, axis=1) jacf = tfp.stats.auto_correlation(ensemble, normalize=False, axis=1, center=False) diff --git a/mdsuite/calculators/green_kubo_self_diffusion_coefficients.py b/mdsuite/calculators/green_kubo_self_diffusion_coefficients.py index 6e72a629..433ee9fd 100644 --- a/mdsuite/calculators/green_kubo_self_diffusion_coefficients.py +++ b/mdsuite/calculators/green_kubo_self_diffusion_coefficients.py @@ -25,6 +25,7 @@ ------- Module for the computation of diffusion coefficients using the Green-Kubo approach. """ + from abc import ABC from dataclasses import dataclass from typing import Any, List, Union @@ -60,6 +61,7 @@ class Args: class GreenKuboDiffusionCoefficients(TrajectoryCalculator, ABC): """ Class for the Green-Kubo diffusion coefficient implementation + Attributes ---------- experiment : object @@ -84,6 +86,7 @@ class GreenKuboDiffusionCoefficients(TrajectoryCalculator, ABC): experiment.run_computation.GreenKuboSelfDiffusionCoefficients(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -94,6 +97,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ super().__init__(**kwargs) @@ -140,6 +144,7 @@ def __call__( integration_range : int Range over which to integrate. Default is to integrate over the full data range. + """ if species is None: if molecules: @@ -187,6 +192,7 @@ def ensemble_operation(self, ensemble): Returns ------- MSD of the tensor_values. + """ vacf = ( self.experiment.units.length**2 @@ -211,6 +217,7 @@ def plot_data(self, data: dict): ---------- data : dict Data loaded from the sql database to be plotted. + """ for selected_species, val in data.items(): fig = figure(x_axis_label=self.x_label, y_axis_label=self.y_label) diff --git a/mdsuite/calculators/green_kubo_thermal_conductivity.py b/mdsuite/calculators/green_kubo_thermal_conductivity.py index 0c7f0f60..9b8223f6 100644 --- a/mdsuite/calculators/green_kubo_thermal_conductivity.py +++ b/mdsuite/calculators/green_kubo_thermal_conductivity.py @@ -26,6 +26,7 @@ MDSuite module for the computation of the thermal conductivity using the Green-Kubo relation. """ + from abc import ABC from dataclasses import dataclass @@ -75,6 +76,7 @@ class GreenKuboThermalConductivity(TrajectoryCalculator, ABC): -------- experiment.run_computation.GreenKuboThermalConductivity(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -85,6 +87,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ super().__init__(**kwargs) self.scale_function = {"linear": {"scale_factor": 5}} @@ -119,6 +122,7 @@ def __call__( Correlation time to use in the window sampling. integration_range : int Range over which the integration should be performed. + """ self.plot = plot self.jacf: np.ndarray @@ -199,6 +203,7 @@ def ensemble_operation(self, ensemble: tf.Tensor): Returns ------- MSD of the tensor_values. + """ jacf = self.args.data_range * tf.reduce_sum( tfp.stats.auto_correlation(ensemble, normalize=False, axis=0, center=False), diff --git a/mdsuite/calculators/green_kubo_viscosity.py b/mdsuite/calculators/green_kubo_viscosity.py index 20aa38d8..22a247e3 100644 --- a/mdsuite/calculators/green_kubo_viscosity.py +++ b/mdsuite/calculators/green_kubo_viscosity.py @@ -26,6 +26,7 @@ MDSuite module for the computation of the viscosity in a system using the Green-Kubo relation as applied to the momentum flux measured during a simulation. """ + from abc import ABC from dataclasses import dataclass @@ -76,6 +77,7 @@ class GreenKuboViscosity(TrajectoryCalculator, ABC): -------- experiment.run_computation.GreenKuboViscosity(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -85,6 +87,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to call from + """ super().__init__(**kwargs) self.scale_function = {"linear": {"scale_factor": 5}} @@ -115,6 +118,7 @@ def __call__( if true, plot the tensor_values data_range : Number of configurations to use in each ensemble + """ self.plot = plot self.sigma = [] @@ -194,6 +198,7 @@ def ensemble_operation(self, ensemble: tf.Tensor): Returns ------- MSD of the tensor_values. + """ jacf = self.args.data_range * tf.reduce_sum( tfp.stats.auto_correlation(ensemble, normalize=False, axis=0, center=False), diff --git a/mdsuite/calculators/green_kubo_viscosity_flux.py b/mdsuite/calculators/green_kubo_viscosity_flux.py index 0966b21e..815e8c59 100644 --- a/mdsuite/calculators/green_kubo_viscosity_flux.py +++ b/mdsuite/calculators/green_kubo_viscosity_flux.py @@ -26,6 +26,7 @@ MDSuite module for the computation of viscosity using the Green-Kubo relation as applied to the stress on a system. """ + from abc import ABC from dataclasses import dataclass @@ -70,6 +71,7 @@ class GreenKuboViscosityFlux(TrajectoryCalculator, ABC): experiment.run_computation.GreenKuboViscosityFlux(data_range=500, plot=True, correlation_time=10) + """ def __init__(self, **kwargs): @@ -80,6 +82,7 @@ def __init__(self, **kwargs): ---------- experiment : object Experiment class to read and write to + """ super().__init__(**kwargs) self.scale_function = {"linear": {"scale_factor": 5}} @@ -114,6 +117,7 @@ def __call__( If true, a plot of the analysis is saved. data_range : int Number of configurations to include in each ensemble + """ self.plot = plot self.sigma = [] @@ -191,6 +195,7 @@ def ensemble_operation(self, ensemble): Returns ------- updates class vacf with the tensor_values. + """ jacf = self.args.data_range * tf.reduce_sum( tfp.stats.auto_correlation(ensemble, normalize=False, axis=0, center=False), diff --git a/mdsuite/calculators/kirkwood_buff_integrals.py b/mdsuite/calculators/kirkwood_buff_integrals.py index e11bbb74..7df47b10 100644 --- a/mdsuite/calculators/kirkwood_buff_integrals.py +++ b/mdsuite/calculators/kirkwood_buff_integrals.py @@ -25,6 +25,7 @@ ------- Module for the computation of kirkwood buff integrals. """ + import logging from dataclasses import dataclass @@ -86,6 +87,7 @@ class KirkwoodBuffIntegral(Calculator): Examples -------- experiment.run.KirkwoodBuffIntegral() + """ def __init__(self, **kwargs): @@ -96,6 +98,7 @@ def __init__(self, **kwargs): ---------- experiment : class object Class object of the experiment. + """ super().__init__(**kwargs) self.file_to_study = None @@ -135,6 +138,7 @@ def __call__( Order of the savgol polynomial filter savgol_window_length : int Window length of the savgol filter. + """ if isinstance(rdf_data, Computation): self.rdf_data = rdf_data @@ -168,6 +172,7 @@ def _calculate_kb_integral(self, radii_data: np.ndarray, rdf_data: np.ndarray): ------- kb_integral : np.ndarray KB integral to be saved. + """ filtered_data = apply_savgol_filter( rdf_data, diff --git a/mdsuite/calculators/nernst_einstein_ionic_conductivity.py b/mdsuite/calculators/nernst_einstein_ionic_conductivity.py index 26a41804..46a7f72f 100644 --- a/mdsuite/calculators/nernst_einstein_ionic_conductivity.py +++ b/mdsuite/calculators/nernst_einstein_ionic_conductivity.py @@ -24,6 +24,7 @@ Summary ------- """ + import logging import operator @@ -55,6 +56,7 @@ def __init__(self, **kwargs): ---------- experiment : Experiment Experiment class from which to read + """ super().__init__(**kwargs) self.post_generation = True @@ -93,6 +95,7 @@ def __call__( Data range to use in the analysis. save : bool if true, save the output. + """ self.update_user_args(plot=plot, save=False, data_range=data_range, export=export) self.corrected = corrected @@ -111,6 +114,7 @@ def _load_data(self): ------- tensor_values: dict A dictionary of tensor_values stored in the yaml file + """ test = self.experiment.export_property_data( {"property": "Diffusion_Coefficients"} @@ -128,6 +132,7 @@ def truth_table(self): truth_table : list A truth table communication which tensor_values is available for the analysis. + """ if self._truth_table is None: log.warning( @@ -177,6 +182,7 @@ def _nernst_einstein(self, diffusion_information: list): Returns ------- Nernst-Einstein Ionic conductivity of the experiment in units of S/cm + """ # evaluate the prefactor numerator = self.experiment.number_of_atoms * (elementary_charge**2) @@ -221,6 +227,7 @@ def _corrected_nernst_einstein( Returns ------- Corrected Nernst-Einstein ionic conductivity in units of S/cm + """ # evaluate the prefactor numerator = self.experiment.number_of_atoms * (elementary_charge**2) @@ -328,6 +335,7 @@ def _run_corrected_nernst_einstein(self): Returns ------- Updates the experiment database_path + """ cne_table = [self.truth_table[0][1], self.truth_table[1][1]] diff --git a/mdsuite/calculators/potential_of_mean_force.py b/mdsuite/calculators/potential_of_mean_force.py index c51ed4b5..0e21f382 100644 --- a/mdsuite/calculators/potential_of_mean_force.py +++ b/mdsuite/calculators/potential_of_mean_force.py @@ -26,6 +26,7 @@ Module for the computation of the potential of mean force (PMF). The PMF can be used to better understand effective bond strength between species of a system. """ + import logging from dataclasses import dataclass @@ -97,6 +98,7 @@ class PotentialOfMeanForce(Calculator): -------- experiment.run_computation.PotentialOfMeanForce(savgol_order = 2, savgol_window_length = 17) + """ def __init__(self, **kwargs): @@ -110,6 +112,7 @@ def __init__(self, **kwargs): experiments : class object Class object of the experiment. load_data : bool + """ super().__init__(**kwargs) self.file_to_study = None @@ -151,6 +154,7 @@ def __call__( Window length of the savgol filter. number_of_shells : int Number of shells to integrate through. + """ if isinstance(rdf_data, Computation): self.rdf_data = rdf_data @@ -195,6 +199,7 @@ def _calculate_potential_of_mean_force(self, rdf: np.ndarray) -> np.ndarray: ----- Units here are always eV as the data stored in the RDF is constant independent of what was in the simulation. + """ pomf = -1 * boltzmann_constant * self.experiment.temperature * np.log(rdf) @@ -210,6 +215,7 @@ def _populate_args(self) -> tuple: The data range used in the RDF calculation. cutoff : float The cutoff (in nm) used in the RDF calculation + """ raw_data = self.rdf_data.data_dict keys = list(raw_data) @@ -236,6 +242,7 @@ def get_pomf_peaks(self, pomf_data: np.ndarray) -> np.ndarray: ------ ValueError Raised if the number of peaks required for the analysis are not met. + """ filtered_data = apply_savgol_filter( pomf_data, @@ -280,6 +287,7 @@ def _find_minimum(self, pomf_data: np.ndarray, radii_data: np.ndarray) -> dict: Dict of all shells detected based on user arguments, e.g: {'1': [0.1, 0.2]} indicates that the first pomf peak is betwee 0.1 and 0.2 angstrom. + """ # get the peaks of the tensor_values post-filtering peaks = self.get_pomf_peaks(pomf_data) @@ -311,6 +319,7 @@ def _get_pomf_values(self, pomf: np.ndarray, radii: np.ndarray) -> dict: pomf_data : dict A dictionary of the pomf values and their uncertainty. e,g: {"POMF_1": 5.6, "POMF_1_error": 0.01} + """ pomf_shells = self._find_minimum(pomf, radii) diff --git a/mdsuite/calculators/radial_distribution_function.py b/mdsuite/calculators/radial_distribution_function.py index 5b576bf1..d66f52ea 100644 --- a/mdsuite/calculators/radial_distribution_function.py +++ b/mdsuite/calculators/radial_distribution_function.py @@ -27,6 +27,7 @@ describes the probability of finding a particle of species b at a distance r of species a. """ + from __future__ import annotations import itertools @@ -113,6 +114,7 @@ def __init__(self, **kwargs): Attributes ---------- kwargs: see RunComputation class for all the passed arguments + """ super().__init__(**kwargs) @@ -188,6 +190,7 @@ def __call__( override the automatic batch size calculation use_tf_function : bool If true, tf.function is used in the calculation. + """ # set args that will affect the computation result self.args = Args( @@ -219,6 +222,7 @@ def check_input(self): Returns ------- Updates class attributes if required. + """ if self.args.stop is None: self.args.stop = self.experiment.number_of_configurations - 1 @@ -256,6 +260,7 @@ def _initialize_rdf_parameters(self): Returns ------- Updates class attributes. + """ self.bin_range = [0, self.args.cutoff] self.index_list = list(range(len(self.args.species))) @@ -291,6 +296,7 @@ def _get_species_names(self, species_tuple: tuple) -> str: ------- names : str Prefix for the saved file + """ arg_1 = self.args.species[species_tuple[0]] arg_2 = self.args.species[species_tuple[1]] @@ -304,6 +310,7 @@ def _calculate_prefactor(self, species: Union[str, tuple] = None): ---------- species : str The species tuple of the RDF being studied, e.g. Na_Na + """ species_scale_factor = 1 species_split = species.split("_") @@ -352,6 +359,7 @@ def _calculate_radial_distribution_functions(self): Returns ------- Updates the class state with the full RDF for each desired species pair. + """ # Compute the true RDF for each species combination. self.rdf.update( @@ -389,6 +397,7 @@ def _ang_to_nm(self, data_in: np.ndarray) -> np.ndarray: ------- data_out : np.ndarray data_in converted to nm + """ return (self.experiment.units.length / 1e-9) * data_in @@ -399,6 +408,7 @@ def _correct_batch_properties(self): Returns ------- Updates the parent class. + """ if self.batch_size > self.args.number_of_configurations: self.batch_size = self.args.number_of_configurations @@ -491,6 +501,7 @@ def compute_species_values( rdf : dict Dict of rdf values for each combination of species, e.g.: {'H-O': tf.Tensor(...), 'H-H': ..., 'O-O': ...} + """ rdf = { name: tf.zeros(self.args.number_of_bins, dtype=tf.int32) @@ -576,6 +587,7 @@ def prepare_computation(self): batch_tqdm : bool If true, the main tqdm loop over batches is disabled and only the mini-batch loop will be displayed. + """ path_list = [ join_path(item, self.loaded_property.name) for item in self.args.species @@ -607,6 +619,7 @@ def combine_dictionaries(dict_a: dict, dict_b: dict): ---------- dict_a : dict dict_b : dict + """ out = {} for key in dict_a: @@ -631,6 +644,7 @@ def bin_minibatch( number_of_bins : int cutoff : float Cutoff to enforce on the distance tensor. + """ # select the indices that are within the boundaries of the current species / # molecule @@ -728,6 +742,7 @@ def ideal_correction(self) -> float: ------- correction : float Correct ideal gas term for the RDF prefactor + """ def _spherical_symmetry(data: np.array) -> np.array: # TODO make it a property @@ -738,10 +753,12 @@ def _spherical_symmetry(data: np.array) -> np.array: # TODO make it a property ---------- data : np.array tensor_values on which to operate + Returns ------- function_values : np.array result of the operation + """ return 4 * np.pi * (data**2) @@ -751,6 +768,7 @@ def _correction_1(data: np.array) -> np.array: tensor_values : np.array tensor_values on which to operate + Returns ------- function_values : np.array @@ -765,6 +783,7 @@ def _correction_2(data: np.array) -> np.array: tensor_values : np.array tensor_values on which to operate + Returns ------- function_values : np.array @@ -786,6 +805,7 @@ def _correction_2(data: np.array) -> np.array: def _piecewise(data: np.array) -> np.array: """ Return a piecewise operation on a set of tensor_values + Parameters ---------- data : np.array @@ -795,6 +815,7 @@ def _piecewise(data: np.array) -> np.array: ------- scaled_data : np.array tensor_values that has been operated on. + """ # Boundaries on the ideal gsa correction. These go to 73% over half the box # size, the most for a cubic box. diff --git a/mdsuite/calculators/spatial_distribution_function.py b/mdsuite/calculators/spatial_distribution_function.py index 521537b0..029ef386 100644 --- a/mdsuite/calculators/spatial_distribution_function.py +++ b/mdsuite/calculators/spatial_distribution_function.py @@ -25,6 +25,7 @@ ------- Module for the spatial distribution function calculator. """ + from __future__ import annotations import logging @@ -131,6 +132,7 @@ def __call__( List of species to use, for computing the SDF, if None a single SDF of all available species will be computed kwargs + """ if species is None: if molecules: @@ -168,10 +170,12 @@ def _load_positions(self, indices: list, species: str) -> tf.Tensor: List of indices to take from the database_path species: str The species to load the positions from + Returns ------- loaded_data : tf.Tensor tf.Tensor of tensor_values loaded from the hdf5 database_path + """ path_list = [join_path(species, self.loaded_property.name)] @@ -259,6 +263,7 @@ def _get_unit_sphere(self) -> tf.Tensor: tf.Tensor: A Tensor with shape (n_bins, n_bins, 3) where 3 represents (x,y,z) for the coordinates of a unit sphere + """ theta_range = [0, math.pi] phi_range = [-math.pi, math.pi] diff --git a/mdsuite/calculators/structure_factor.py b/mdsuite/calculators/structure_factor.py index fc97ee58..3179139a 100644 --- a/mdsuite/calculators/structure_factor.py +++ b/mdsuite/calculators/structure_factor.py @@ -23,6 +23,7 @@ ------- Code for the computation of the structure factor. """ + import logging from dataclasses import dataclass @@ -104,6 +105,7 @@ class StructureFactor(Calculator): In order to use the structure factor calculator both the masses and the charges of each species must be present. If they are not correct, the structure factor will not work. + """ volume: float @@ -119,6 +121,7 @@ def __init__(self, **kwargs): ---------- experiment : class object Class object of the experiment. + """ super().__init__(**kwargs) @@ -155,6 +158,7 @@ def __call__( Method use to compute the weight factors. resolution : int (default=700) Resolution of the structure factor. + """ self.plot = plot @@ -202,6 +206,7 @@ def _compute_angstsrom_volume(self): Returns ------- Updates the volume attribute of the class. + """ volume_si = self.experiment.volume * self.experiment.units.volume @@ -219,6 +224,7 @@ def _compute_form_factors(self): Notes ----- aff -> atomic form factor + """ for name, species_data in self.species_dict.items(): # aff -> atomic form factor @@ -250,6 +256,7 @@ def _compute_partial_structure_factors(self) -> dict: This expands a tensor by use of an outer produce and therefore could theoretically result in memory issue for very large radii values over very fine fourier grids. In this case, batching can be performed over Q values. + """ partial_structure_factors = {} for pair, pair_data in self.rdf_data.data_dict.items(): @@ -273,6 +280,7 @@ def _compute_weight_factors(self) -> dict: weight_factors : dict A dict of weight factors to be used in the SF computation. There is one weight factor for each pair. + """ weight_factors = {} for pair, pair_data in self.rdf_data.data_dict.items(): @@ -306,6 +314,7 @@ def _compute_total_structure_factor( ------- total_structure_factor : np.ndarray Total structure factor of the system. + """ structure_factor = np.zeros(self.args.resolution) for pair, pair_data in partial_sf.items(): diff --git a/mdsuite/calculators/trajectory_calculator.py b/mdsuite/calculators/trajectory_calculator.py index 49311da3..60f5abf5 100644 --- a/mdsuite/calculators/trajectory_calculator.py +++ b/mdsuite/calculators/trajectory_calculator.py @@ -25,6 +25,7 @@ ------- A parent class for calculators that operate on the trajectory. """ + from __future__ import annotations from abc import ABC @@ -78,6 +79,7 @@ class TrajectoryCalculator(Calculator, ABC): Data manager parent to handle preparation of data generators. _database : Database Simulation database from which data should be loaded. + """ def __init__(self, experiment: Experiment = None, experiments: List = None): @@ -90,6 +92,7 @@ def __init__(self, experiment: Experiment = None, experiments: List = None): Experiment for which the calculator will be run. experiments : List[Experiment] List of experiments on which to run the calculator. + """ super(TrajectoryCalculator, self).__init__( experiment=experiment, experiments=experiments @@ -121,6 +124,7 @@ def _run_dependency_check(self): Returns ------- Will call transformations if required. + """ if self.loaded_property is None: return @@ -162,6 +166,7 @@ def _string_to_function(argument): Returns ------- transformation call. + """ switcher_unwrapping = {"Unwrapped_Positions": self._unwrap_choice()} @@ -202,6 +207,7 @@ def _handle_tau_values(self) -> np.array: ------- times : np.array The time values corresponding to the selected tau values + """ if isinstance(self.args.tau_values, int): self.data_resolution = self.args.tau_values @@ -237,6 +243,7 @@ def _check_remainder(self): Returns ------- Updates the remainder attribute if required. + """ return self.remainder - (self.remainder % self.args.data_range) @@ -255,6 +262,7 @@ def _prepare_managers(self, data_path: list, correct: bool = False): Returns ------- Updates the calculator class + """ self.memory_manager = MemoryManager( data_path=data_path, @@ -303,6 +311,7 @@ def _correct_batch_properties(self): Notes ----- This method is called by some calculator + """ raise NotImplementedError diff --git a/mdsuite/calculators/transformations_reference.py b/mdsuite/calculators/transformations_reference.py index 880ce715..ccaadc3a 100644 --- a/mdsuite/calculators/transformations_reference.py +++ b/mdsuite/calculators/transformations_reference.py @@ -24,6 +24,7 @@ Summary ------- """ + switcher_transformations = { "Translational_Dipole_Moment": "TranslationalDipoleMoment", "Ionic_Current": "IonicCurrent", diff --git a/mdsuite/database/calculator_database.py b/mdsuite/database/calculator_database.py index f6a98701..d8bc73f2 100644 --- a/mdsuite/database/calculator_database.py +++ b/mdsuite/database/calculator_database.py @@ -24,6 +24,7 @@ Summary ------- """ + from __future__ import annotations import logging @@ -112,6 +113,7 @@ def get_computation_data(self) -> db.Computation: db.Computation Returns the computation object from the database if available, otherwise returns None + """ log.debug(f"Getting data for {self.experiment.name} with args {self.args}") with self.experiment.project.session as ses: @@ -244,6 +246,7 @@ def queue_data(self, data, subjects): subjects: list A list of strings / subject names that are associated with the data, e.g. the pairs of the RDF + """ self._queued_data.append(ComputationResults(data=data, subjects=subjects)) @@ -263,6 +266,7 @@ def update_database(self, parameters, delete_duplicate: bool = True): Returns ------- Updates the sql database + """ raise DeprecationWarning("This function has been replaced by `queue_data`") diff --git a/mdsuite/database/data_manager.py b/mdsuite/database/data_manager.py index 8b514fd2..47123522 100644 --- a/mdsuite/database/data_manager.py +++ b/mdsuite/database/data_manager.py @@ -27,6 +27,7 @@ generators. These generators allow for the full use of the TF data pipelines but can required special formatting rules. """ + import logging import numpy as np @@ -98,6 +99,7 @@ def __init__( Selection of atoms in the calculation. offset : int Offset in the data loading if it should not be loaded from the start. + """ self.database = database self.data_path = data_path @@ -144,6 +146,7 @@ def batch_generator( # noqa: C901 Returns ------- Returns a generator function and its arguments + """ args = ( self.n_batches, @@ -175,8 +178,10 @@ def generator( Path to the tensor_values in the database_path dictionary : bool If true, tensor_values is returned in a dictionary + Returns ------- + """ database = Database(database) @@ -242,8 +247,10 @@ def atom_generator( Path to the tensor_values in the database_path dictionary : bool If true, tensor_values is returned in a dictionary + Returns ------- + """ # Atom selection not currently available for mini-batched calculations if type(self.atom_selection) is dict: @@ -303,12 +310,14 @@ def ensemble_generator(self, system: bool = False, glob_data: dict = None) -> tu Returns ------- Ensemble loop generator + """ args = (self.ensemble_loop, self.correlation_time, self.data_range) def dictionary_generator(ensemble_loop, correlation_time, data_range): """ Generator for the ensemble loop + Parameters ---------- ensemble_loop : int @@ -317,9 +326,11 @@ def dictionary_generator(ensemble_loop, correlation_time, data_range): Distance between ensembles data_range : int Size of each ensemble + Returns ------- None. + """ ensemble_loop = int( np.clip( diff --git a/mdsuite/database/database_base.py b/mdsuite/database/database_base.py index 4c34b97f..30915e56 100644 --- a/mdsuite/database/database_base.py +++ b/mdsuite/database/database_base.py @@ -24,6 +24,7 @@ Summary ------- """ + import logging from pathlib import Path @@ -47,6 +48,7 @@ def __init__(self, database_name: str): ---------- database_name: str name of the database + """ self.name = "" # Name of the Project self.database_name = database_name diff --git a/mdsuite/database/experiment_database.py b/mdsuite/database/experiment_database.py index 0cbe7045..796d8c99 100644 --- a/mdsuite/database/experiment_database.py +++ b/mdsuite/database/experiment_database.py @@ -24,6 +24,7 @@ Summary ------- """ + from __future__ import annotations import dataclasses @@ -49,6 +50,7 @@ class LazyProperty: References ---------- https://realpython.com/python-descriptors/ + """ def __set_name__(self, owner, name): @@ -110,6 +112,7 @@ def export_property_data(self, parameters: dict) -> List[db.Computation]: ------- output : list A list of rows represented as dictionaries. + """ raise DeprecationWarning( "This function has been removed and replaced by queue_database" @@ -124,6 +127,7 @@ def set_db(self, name: str, value): Name of the database entry value: Any serializeable data type that can be written to the database + """ with self.project.session as ses: experiment = get_or_create(ses, db.Experiment, name=self.name) @@ -155,6 +159,7 @@ def get_db(self, name: str, default=None): ----- Internally the values will be converted to dict, so e.g. tuples or sets might be converted to lists + """ with self.project.session as ses: experiment = get_or_create(ses, db.Experiment, name=self.name) @@ -200,6 +205,7 @@ def species(self) -> Dict[str, SpeciesInfo]: ------- dict[str, SpeciesInfo]: A dictionary of species such as {Li: SpeciesInfo} + """ if self._species is None: with self.project.session as ses: @@ -223,6 +229,7 @@ def species(self, value: dict): ---------- value: dict A dictionary of {element: SpeciesInfo} + """ if value is None: return diff --git a/mdsuite/database/mdsuite_properties.py b/mdsuite/database/mdsuite_properties.py index bc012bbe..c9fb35b4 100644 --- a/mdsuite/database/mdsuite_properties.py +++ b/mdsuite/database/mdsuite_properties.py @@ -24,6 +24,7 @@ Summary ------- """ + from dataclasses import dataclass from mdsuite.database.simulation_database import PropertyInfo diff --git a/mdsuite/database/project_database.py b/mdsuite/database/project_database.py index d8097971..9b96af65 100644 --- a/mdsuite/database/project_database.py +++ b/mdsuite/database/project_database.py @@ -24,6 +24,7 @@ Summary ------- """ + import logging from pathlib import Path @@ -76,6 +77,7 @@ def description(self, value: str): Description of the project. If the string ends in .txt, the contents of the txt file will be read. If it ends in .md, same outcome. Anything else will be read as is. + """ if value is None: return diff --git a/mdsuite/database/scheme.py b/mdsuite/database/scheme.py index ee91f256..65fd122d 100644 --- a/mdsuite/database/scheme.py +++ b/mdsuite/database/scheme.py @@ -24,6 +24,7 @@ Summary ------- """ + import logging from sqlalchemy import Boolean, Column, ForeignKey, Integer, String @@ -102,6 +103,7 @@ def __repr__(self): ------- information : str Experiment number and name as an fstring + """ return f"{self.id}: {self.name}" @@ -144,6 +146,7 @@ class ExperimentAttribute(Base): numeric value of the property. str_value : str String value of the property. + """ __tablename__ = "experiment_attributes" @@ -219,6 +222,7 @@ def __repr__(self): ------- information : str Experiment number and name as an fstring + """ return f"Exp{self.experiment_id}_{self.name}_{self.id}" @@ -319,6 +323,7 @@ def computation_parameter(self) -> dict: "molecules": false, "version": 1 } + """ computation_parameter = {} for comp_attr in self.computation_attributes: diff --git a/mdsuite/database/simulation_database.py b/mdsuite/database/simulation_database.py index 60668fdb..4468c4cb 100644 --- a/mdsuite/database/simulation_database.py +++ b/mdsuite/database/simulation_database.py @@ -24,6 +24,7 @@ Summary ------- """ + import dataclasses import logging import pathlib @@ -56,6 +57,7 @@ class PropertyInfo: The name of the property n_dims: The dimensionality of the property + """ name: str @@ -76,6 +78,7 @@ class SpeciesInfo: properties: list of PropertyInfo List of the properties that were recorded for the species mass and charge are optional + """ name: str @@ -116,6 +119,7 @@ class MoleculeInfo(SpeciesInfo): water = {"groups": {"0": {"H": [0, 1], "O": [0]}} This tells us that the 0th water molecule consists of the 0th and 1st hydrogen atoms in the database as well as the 0th oxygen atom. + """ groups: dict = None @@ -158,6 +162,7 @@ class TrajectoryMetadata: simulation_data : str|Path, optional All other simulation data that can be extracted from the trajectory metadata. E.g. software version, pressure in NPT simulations, time step, ... + """ n_configurations: int @@ -183,6 +188,7 @@ def __init__(self, species_list: List[SpeciesInfo], chunk_size: int): are recorded for each chunk_size : int The number of configurations to be stored in this chunk + """ self.chunk_size = chunk_size self.species_list = species_list @@ -197,6 +203,7 @@ def __init__(self, species_list: List[SpeciesInfo], chunk_size: int): def add_data(self, data: np.ndarray, config_idx, species_name, property_name): """ Add configuration data to the chunk + Parameters ---------- data: @@ -240,6 +247,7 @@ class Database: ---------- path : str|Path The name of the database_path in question. + """ def __init__(self, path: typing.Union[str, pathlib.Path] = "database"): @@ -250,6 +258,7 @@ def __init__(self, path: typing.Union[str, pathlib.Path] = "database"): ---------- path : str|Path The name of the database_path in question. + """ if isinstance(path, pathlib.Path): self.path = path.as_posix() @@ -340,6 +349,7 @@ def add_data(self, chunk: TrajectoryChunkData): a data chunk start_idx: Configuration at which to start writing. + """ workaround_time_in_axis_1 = True @@ -467,6 +477,7 @@ def add_dataset(self, architecture: dict): Returns ------- Updates the database_path directly. + """ with hf.File(self.path, "a") as database: for item in architecture: @@ -515,6 +526,7 @@ def _add_group_structure(self, structure: dict): Returns ------- Updates the database_path directly. + """ with hf.File(self.path, "a") as database: # Build file paths for the addition. @@ -534,6 +546,7 @@ def get_memory_information(self) -> dict: memory_database : dict A dictionary of the memory information of the groups in the database_path + """ with hf.File(self.path, "r") as database: memory_database = {} @@ -556,6 +569,7 @@ def check_existence(self, path: str) -> bool: ------- response : bool If true, the path exists, else, it does not. + """ with hf.File(self.path, "r") as database_object: keys = [] @@ -583,6 +597,7 @@ def change_key_names(self, mapping: dict): Returns ------- Updates the database_path + """ with hf.File(self.path, "r+") as db: groups = list(db.keys()) @@ -651,6 +666,7 @@ def get_load_time(self, database_path: str = None): ------- opening time : float Time taken to open and close the database_path + """ if database_path is None: start = time.time() @@ -679,6 +695,7 @@ def get_data_size(self, data_path: str) -> tuple: dataset_properties : tuple Tuple of tensor_values about the dataset, e.g. (n_rows, n_columns, n_bytes) + """ with hf.File(self.path, "r") as db: data_tuple = ( @@ -697,6 +714,7 @@ def get_database_summary(self): ------- summary : list A list of properties that are in the database. + """ with hf.File(self.path, "r") as db: return list(db.keys()) diff --git a/mdsuite/experiment/experiment.py b/mdsuite/experiment/experiment.py index 57167776..b89101b9 100644 --- a/mdsuite/experiment/experiment.py +++ b/mdsuite/experiment/experiment.py @@ -24,6 +24,7 @@ Summary ------- """ + import copy import importlib.resources import json @@ -129,6 +130,7 @@ class Experiment(ExperimentDatabase): read from the file and will be correct. number_of_atoms : int The total number of atoms in the simulation + """ def __init__( @@ -158,6 +160,7 @@ def __init__( If true, several parameters involved in plotting and parallelization will be adjusted so as to allow for optimal performance on a large computing cluster. + """ if not name[0].isalpha(): raise ValueError( @@ -218,6 +221,7 @@ def run(self) -> RunComputation: ------- RunComputation: class that has all available calculators as properties + """ return RunComputation(experiment=self) @@ -277,6 +281,7 @@ def cls_transformation_run(self, transformation: Transformations, *args, **kwarg Parameters ---------- transformation: Transformations + """ transformation.experiment = self transformation.run_transformation(*args, **kwargs) @@ -299,6 +304,7 @@ def units_to_si(units_system) -> Units: ------- units: Units dataclass that contains the conversion factors to SI + """ if isinstance(units_system, Units): return units_system @@ -354,6 +360,7 @@ def run_visualization( Returns ------- Displays a visualization app. + """ import_error_msg = ( "It looks like you don't have the necessary plugin for " @@ -436,6 +443,7 @@ def set_charge(self, element: str, charge: float): Name of the element whose charge you want to change charge : list New charge/s of the element + """ species = self.species species[element].charge = [charge] @@ -451,6 +459,7 @@ def set_mass(self, element: str, mass: float): Name of the element whose mass you want to change mass : list New mass/es of the element + """ species = self.species species[element].mass = mass @@ -471,6 +480,7 @@ def add_data( format with its own reader or want to use non-default arguments for your reader, instantiate the reader and pass it to this method. TODO reference online documentation of data loading in the error messages + Parameters ---------- simulation_data : str or pathlib.Path or mdsuite.file_io.file_read.FileProcessor @@ -518,6 +528,7 @@ def _add_data_from_file_processor( been seen. update_with_pubchempy: bool Whether or not to look for the masses of the species in pubchempy + """ already_read = str(file_processor) in self.read_files if already_read and not force: @@ -577,6 +588,7 @@ def load_matrix( ------- property_matrix : np.array, tf.Tensor Tensor of the property to be studied. Format depends on kwargs. + """ database = Database(self.database_path / "database.hdf5") @@ -604,6 +616,7 @@ def _store_metadata(self, metadata: TrajectoryMetadata, update_with_pubchempy=Fa metadata: TrajectoryMetadata update_with_pubchempy: bool Load data from pubchempy and add it to fill missing infomration + """ # new trajectory: store all metadata and construct a new database self.temperature = metadata.temperature @@ -689,6 +702,7 @@ def _species_list_to_architecture_dict(species_list, n_configurations): # TODO let the database handler use the species list directly instead of the dict """ converter from species list to legacy architecture dict + Parameters ---------- species_list @@ -697,6 +711,7 @@ def _species_list_to_architecture_dict(species_list, n_configurations): Returns ------- dict like architecture = {'Na':{'Positions':(n_part, n_config, n_dim)}} + """ architecture = {} for sp_info in species_list: diff --git a/mdsuite/experiment/run.py b/mdsuite/experiment/run.py index 3cd0a00d..23b098bb 100644 --- a/mdsuite/experiment/run.py +++ b/mdsuite/experiment/run.py @@ -9,6 +9,7 @@ Description: Collection of calculators / transformations for exp.run """ + from __future__ import annotations import functools @@ -70,6 +71,7 @@ def __init__( experiments: List[Experiment] A list of experiments passed by running the computation from the project class + """ self.experiment = experiment self.experiments = experiments @@ -93,6 +95,7 @@ def transformation_wrapper(self, func: Union[Type[Transformations], Any]): Parameters ---------- func: a transformation to be attached to the experiment/s + """ @functools.wraps(func.run_transformation) diff --git a/mdsuite/experiment/run_module.py b/mdsuite/experiment/run_module.py index 9aea3351..daed47f4 100644 --- a/mdsuite/experiment/run_module.py +++ b/mdsuite/experiment/run_module.py @@ -24,6 +24,7 @@ Summary ------- """ + from __future__ import annotations from typing import TYPE_CHECKING @@ -40,10 +41,12 @@ class RunModule: This class is a helper to convert the dictionary of possible computations "dict_classes_computations" into attributes of the `experiment.run_computation` helper class. + """ def __init__(self, parent, module_dict, **kwargs): """Initialize the attributes + Parameters ---------- parent: Experiment @@ -53,6 +56,7 @@ def __init__(self, parent, module_dict, **kwargs): operations with their names as keys kwargs: Additional parameters to be passed to the module_dict. + """ self.parent: Experiment = parent self._kwargs = kwargs @@ -68,6 +72,7 @@ def __getattribute__(self, item): Returns ------- Instantiated calculator class with added experiment that can be called. + """ if item.startswith("_"): # handle private functions diff --git a/mdsuite/file_io/chemfiles_read.py b/mdsuite/file_io/chemfiles_read.py index b71b097f..f7f20b7c 100644 --- a/mdsuite/file_io/chemfiles_read.py +++ b/mdsuite/file_io/chemfiles_read.py @@ -1,4 +1,5 @@ """Test MDSuites chemfiles read module.""" + import pathlib import typing @@ -34,6 +35,7 @@ def __init__( If the trajectory file does not contain all information about the topology of the system (i.e. which data in the trajectory file belongs to which particle), you can provide the topology here. + """ self.traj_file_path = pathlib.Path(traj_file_path).resolve() @@ -147,6 +149,7 @@ def _read_process_n_configurations( An open chemfiles Trajectory n_configs : int Number of configurations to read in. + """ species_list = self.metadata.species_list chunk = mdsuite.database.simulation_database.TrajectoryChunkData( diff --git a/mdsuite/file_io/extxyz_files.py b/mdsuite/file_io/extxyz_files.py index 4e255863..0c3dc752 100644 --- a/mdsuite/file_io/extxyz_files.py +++ b/mdsuite/file_io/extxyz_files.py @@ -24,6 +24,7 @@ Summary ------- """ + import copy import logging import pathlib @@ -71,6 +72,7 @@ def __init__( example: custom_data_map = {"Reduced_Momentum": "redmom"}, if the file header contains "redmom:R:3" to point to the correct 3 columns containing the reduced momentum values + """ super(EXTXYZFile, self).__init__( file_path, @@ -233,6 +235,7 @@ def _get_time(header: str) -> float: ---------- header The extxyz header line as one string. + """ data = copy.deepcopy(header).split() time = None @@ -257,12 +260,14 @@ def _get_property_to_column_idx_dict( header to analyse var_names: dict of translations from MDsuite property names to extxyz property names + Returns ------- species_index: int The index of the column in which the species names are stored property_summary : dict A dictionary of properties and their location in the data file. + """ data = copy.deepcopy(header).split() properties_string = None diff --git a/mdsuite/file_io/file_read.py b/mdsuite/file_io/file_read.py index ed25ed27..495aae09 100644 --- a/mdsuite/file_io/file_read.py +++ b/mdsuite/file_io/file_read.py @@ -24,6 +24,7 @@ Summary ------- """ + from __future__ import annotations import abc @@ -74,6 +75,7 @@ def get_configurations_generator(self) -> typing.Iterator[TrajectoryChunkData]: Returns ------- generator that yields TrajectoryChunkData + """ raise NotImplementedError("File Processors must implement data loading") diff --git a/mdsuite/file_io/lammps_flux_files.py b/mdsuite/file_io/lammps_flux_files.py index 79fd45bd..7c44492e 100644 --- a/mdsuite/file_io/lammps_flux_files.py +++ b/mdsuite/file_io/lammps_flux_files.py @@ -24,6 +24,7 @@ Summary ------- """ + import pathlib import typing @@ -65,6 +66,7 @@ def __init__( Initialize the lammps flux reader. Since the flux file does not have a fixed expected content, you need to provide the necessary metadata (sample_rate, box_l) here manually + Parameters ---------- file_path @@ -82,6 +84,7 @@ def __init__( corresponding columns example: {"Thermal_Flux":["c_flux_thermal[1]","c_flux_thermal[2]","c_flux_thermal[3]"]}. + """ super(LAMMPSFluxFile, self).__init__( file_path, diff --git a/mdsuite/file_io/lammps_trajectory_files.py b/mdsuite/file_io/lammps_trajectory_files.py index 939ae0ac..d4fbe33b 100644 --- a/mdsuite/file_io/lammps_trajectory_files.py +++ b/mdsuite/file_io/lammps_trajectory_files.py @@ -91,6 +91,7 @@ def __init__( example: custom_data_map = {"Reduced_Momentum": ["rp_x", "rp_y", "rp_z"]}, if the file contains columns labelled as 'rp_{x,y,z}' for the three components of the reduced momentum vector + """ super(LAMMPSTrajectoryFile, self).__init__( file_path, @@ -268,6 +269,7 @@ def extract_properties_from_header( A dict of the form {'MDSuite_Property_1': [column_indices], 'MDSuite_Property_2': ...} Example {'Unwrapped_Positions': [2,3,4], 'Velocities': [5,6,8]} + """ column_dict_properties = { variable: idx for idx, variable in enumerate(header_property_names) diff --git a/mdsuite/file_io/script_input.py b/mdsuite/file_io/script_input.py index 31fd5eca..d109c1aa 100644 --- a/mdsuite/file_io/script_input.py +++ b/mdsuite/file_io/script_input.py @@ -1,4 +1,5 @@ """MDSuite script input module.""" + import typing import mdsuite.file_io.file_read @@ -16,12 +17,14 @@ def __init__( ): """ Provide all the data needed for this class to act as a FileProcessor + Parameters ---------- data metadata name : A unique name for this dataset. Used to prevent multiple adding of the same data. + """ self.data = data self.mdata = metadata diff --git a/mdsuite/file_io/tabular_text_files.py b/mdsuite/file_io/tabular_text_files.py index 704d5ee5..008d9240 100644 --- a/mdsuite/file_io/tabular_text_files.py +++ b/mdsuite/file_io/tabular_text_files.py @@ -1,4 +1,5 @@ """MDSuite Tabular Text file reader module.""" + import abc import copy import dataclasses @@ -43,6 +44,7 @@ class TabularTextFileReaderMData: within a config if int: sort the lines in the config by the column with this index (e.g., use to sort by particle id in unsorted config output) + """ n_configs: int @@ -72,6 +74,7 @@ def __init__( custom_column_names. The result, self._column_name_dict is supposed to be used by child functions to create their TabularTextFileReaderData + Parameters ---------- file_path: @@ -85,6 +88,7 @@ def __init__( Dict connecting user-defined properties the column names. To be provided by the user. Example: {'MyMagicProperty':['MMP1', 'MMP2']}. + """ self.file_path = pathlib.Path(file_path).resolve() my_file_format_column_names = copy.deepcopy(file_format_column_names) @@ -165,6 +169,7 @@ def _read_process_n_configurations( ) -> mdsuite.database.simulation_database.TrajectoryChunkData: """ Read n configurations and package them into a trajectory chunk of the right format + Parameters ---------- file: @@ -175,6 +180,7 @@ def _read_process_n_configurations( Number of header lines PER CONFIG ------- The chunk for your reader output. + """ species_list = self.metadata.species_list chunk = mdsuite.database.simulation_database.TrajectoryChunkData( @@ -224,9 +230,11 @@ def read_n_lines(file, n_lines: int, start_at: int = None) -> list: """ Get n_lines lines, starting at line number start_at. If start_at is None, read from the current file state + Returns ------- A list of strings, one string for each line. + """ if start_at is not None: file.seek(0) @@ -237,6 +245,7 @@ def read_n_lines(file, n_lines: int, start_at: int = None) -> list: def skip_n_lines(file, n_lines: int) -> None: """ skip n_lines in file + Parameters ---------- file: the file where we skip lines @@ -245,6 +254,7 @@ def skip_n_lines(file, n_lines: int) -> None: Returns ------- Nothing + """ for _ in range(n_lines): next(file) diff --git a/mdsuite/graph_modules/molecular_graph.py b/mdsuite/graph_modules/molecular_graph.py index 2e9b1afc..e28a9093 100644 --- a/mdsuite/graph_modules/molecular_graph.py +++ b/mdsuite/graph_modules/molecular_graph.py @@ -24,6 +24,7 @@ Summary ------- """ + from __future__ import annotations import logging @@ -54,6 +55,7 @@ class MolecularGraph: ---------- reference_property : str MDSuite property to use for reference during the unwrapping. + """ molecular_mass: float @@ -122,6 +124,7 @@ def _get_molecular_mass(self): mass : float mass of the molecule + """ self.molecular_mass = 0.0 for species, number in self.species.items(): @@ -138,6 +141,7 @@ def build_configuration_graph(self) -> tf.Tensor: adjacency_matrix : tf.Tensor An adjacency matrix for the configuration describing which atoms are bonded to which others. + """ path_list = [ join_path(species, self.reference_property.name) for species in self.species @@ -186,6 +190,7 @@ def _perform_graph_decomposition(self, adjacency_matrix: tf.Tensor) -> dict: reduced_graphs : dict A dict of sub graphs constructed from the decomposition of the adjacency matrix. Of the form {'0': [], '1': []} + """ # TODO: wrap this in an optimizer to iteratively improve the cutoff until the # number is correct. @@ -249,6 +254,7 @@ def _amount_isomorphism_test(self): Returns ------- Returns nothing, raises a value error if condition is not met. + """ log.info("Performing molecule number isomorphism test.") # number of molecules test @@ -271,6 +277,7 @@ def _molecule_group_equality_isomorphism_test(self): Returns ------- Nothing, will raise an exception if the test fails. + """ log.info("Performing group equality isomorphism test.") for mol_number, mol_data in self.molecular_groups.items(): @@ -296,6 +303,7 @@ def _adjacency_graph_isomorphism_test(self): Notes ----- This must be implemented, however, will be quite an expensive operation. + """ raise NotImplementedError @@ -314,6 +322,7 @@ def _split_decomposed_graphs(self, graph_dict: dict) -> dict: group_dict : dict A dictionary of atoms and indices that specify that indices of this species is in a molecule. + """ particle_groups = {} for item in graph_dict: @@ -357,6 +366,7 @@ def build_smiles_graph(smiles_string: str) -> tuple: Graph object returned by PySmiles species : dict A dict object containing species information about the molecule. + """ mol = read_smiles(smiles_string, explicit_hydrogen=True) data = mol.nodes @@ -395,6 +405,7 @@ def _apply_system_cutoff(input_tensor: tf.Tensor, cutoff: float) -> tf.Tensor: A tensor of ones and zeros where 1s corresponded to 'bonded' particles and 0s indicated no bonding. Note, the diagonals of this tensor are set to 0 as a particle cannot bond itself. + """ cutoff_mask = tf.cast( tf.less(input_tensor, cutoff), dtype=tf.int16 diff --git a/mdsuite/memory_management/__init__.py b/mdsuite/memory_management/__init__.py index 4df5406b..5569909d 100644 --- a/mdsuite/memory_management/__init__.py +++ b/mdsuite/memory_management/__init__.py @@ -24,6 +24,7 @@ Summary ------- """ + from .memory_manager import MemoryManager __all__ = ["MemoryManager"] diff --git a/mdsuite/memory_management/memory_manager.py b/mdsuite/memory_management/memory_manager.py index f0ce2e07..4f865e64 100644 --- a/mdsuite/memory_management/memory_manager.py +++ b/mdsuite/memory_management/memory_manager.py @@ -24,6 +24,7 @@ Summary ------- """ + import logging from typing import Tuple @@ -64,6 +65,7 @@ class MemoryManager: memory_fraction : float scale_function : dict gpu : bool + """ def __init__( @@ -99,6 +101,7 @@ def __init__( If data is being loaded from a non-zero point in the database the offset is used to take this into account. For example, expanding a transformation. + """ if scale_function is None: scale_function = {"linear": {"scale_factor": 10}} @@ -145,6 +148,7 @@ def _select_scale_function(input_dict: dict): Returns ------- Updates the class state + """ def _string_to_function(argument: str): @@ -159,6 +163,7 @@ def _string_to_function(argument: str): Returns ------- function : Callable + """ switcher = { "linear": linear_scale_function, @@ -192,6 +197,7 @@ def get_batch_size(self) -> tuple: remainder : int number of elements that will be left unloaded after a loop over all batches. This amount can then be loaded to collect unused tensor_values. + """ if self.data_path is None: raise ValueError("No tensor_values have been requested.") @@ -232,6 +238,7 @@ def hdf5_load_time(n: int): ------- load_time : float Load time of N data points from a hdf5 database. + """ return np.log(n) @@ -250,6 +257,7 @@ def _get_optimal_batch_size(naive_size): ------- batch_size : int An optimized batch size + """ # db_io_time = self.database.get_load_time() return naive_size @@ -276,6 +284,7 @@ def _compute_atomwise_minibatch(self, data_range: int): Number of batches over atoms. self.atom_remainder : int Remainder atoms after even batching. + """ per_atom_memory = 0 # memory usage per atom within ONE configuration per_configuration_memory = 0 # per configuration memory usage @@ -355,11 +364,13 @@ def get_ensemble_loop( Data range to be used in the analysis. correlation_time : int Correlation time to be considered when looping over the tensor_values + Returns ------- data_range_partitions : int Number of time the batch can be looped over with given data_range and correlation time. + """ final_window = self.batch_size - data_range if final_window < 0: diff --git a/mdsuite/project/project.py b/mdsuite/project/project.py index f5a4666c..4a9a8a60 100644 --- a/mdsuite/project/project.py +++ b/mdsuite/project/project.py @@ -21,6 +21,7 @@ ------- Parent class for the project. """ + from __future__ import annotations import logging @@ -79,6 +80,7 @@ class is saved and updated after each operation in order to retain the experiments : dict A dict of class objects. Class objects are instances of the experiment class for different experiments. + """ def __init__( @@ -101,6 +103,7 @@ def __init__( storage_path : str Where to store the tensor_values and databases. This should be a place with sufficient storage space for the full analysis. + """ super().__init__() if name is None: @@ -151,6 +154,7 @@ def __str__(self): ------- str: A list of all available experiments like "1.) Exp01\n2.) Exp02\n3.) Exp03" + """ return "\n".join([f"{exp.id}.) {exp.name}" for exp in self.db_experiments]) @@ -259,6 +263,7 @@ def activate_experiments(self, names: Union[str, list]): Returns ------- Updates the class state. + """ if isinstance(names, str): names = [names] @@ -273,6 +278,7 @@ def disable_experiments(self, names: Union[str, list]): ---------- names: Name or list of names of experiments that should be instantiated and loaded into self.experiments + Returns ------- @@ -297,9 +303,11 @@ def add_data(self, data_sets: dict): values: str or mdsuite.file_io.file_read.FileProcessor refer to mdsuite.experiment.add_data() for an explanation of the file specification options + Returns ------- Updates the experiment classes. + """ for key, val in data_sets.items(): self.experiments[key].add_data(val) @@ -312,6 +320,7 @@ def run(self) -> RunComputation: ------- RunComputation: class that has all available calculators as properties + """ return RunComputation(experiments=list(self.active_experiments.values())) diff --git a/mdsuite/time_series/__init__.py b/mdsuite/time_series/__init__.py index 8a4b0600..4498dc49 100644 --- a/mdsuite/time_series/__init__.py +++ b/mdsuite/time_series/__init__.py @@ -24,6 +24,7 @@ Summary ------- """ + from .energies import Energies time_series_dict = {"Energies": Energies} diff --git a/mdsuite/time_series/base.py b/mdsuite/time_series/base.py index 89be20f6..9a3f40e9 100644 --- a/mdsuite/time_series/base.py +++ b/mdsuite/time_series/base.py @@ -24,6 +24,7 @@ Summary ------- """ + from __future__ import annotations from typing import TYPE_CHECKING @@ -54,6 +55,7 @@ def __init__(self, experiment: Experiment): ---------- experiment: Experiment The parent experiment class to perform the time series operation on + """ self.experiment = experiment diff --git a/mdsuite/time_series/energies.py b/mdsuite/time_series/energies.py index a7e4da65..ca0b75b2 100644 --- a/mdsuite/time_series/energies.py +++ b/mdsuite/time_series/energies.py @@ -24,6 +24,7 @@ Summary ------- """ + from __future__ import annotations from typing import TYPE_CHECKING diff --git a/mdsuite/transformations/__init__.py b/mdsuite/transformations/__init__.py index effd50ac..7fb4808e 100644 --- a/mdsuite/transformations/__init__.py +++ b/mdsuite/transformations/__init__.py @@ -24,6 +24,7 @@ Summary ------- """ + from .integrated_heat_current import IntegratedHeatCurrent from .ionic_current import IonicCurrent from .kinaci_integrated_heat_current import KinaciIntegratedHeatCurrent diff --git a/mdsuite/transformations/integrated_heat_current.py b/mdsuite/transformations/integrated_heat_current.py index 56334f9b..9a1e9e99 100644 --- a/mdsuite/transformations/integrated_heat_current.py +++ b/mdsuite/transformations/integrated_heat_current.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import tensorflow as tf diff --git a/mdsuite/transformations/ionic_current.py b/mdsuite/transformations/ionic_current.py index f8261dd8..ddf15b64 100644 --- a/mdsuite/transformations/ionic_current.py +++ b/mdsuite/transformations/ionic_current.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import tensorflow as tf diff --git a/mdsuite/transformations/kinaci_integrated_heat_current.py b/mdsuite/transformations/kinaci_integrated_heat_current.py index 0fb01bd6..d52ee08d 100644 --- a/mdsuite/transformations/kinaci_integrated_heat_current.py +++ b/mdsuite/transformations/kinaci_integrated_heat_current.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import numpy as np diff --git a/mdsuite/transformations/map_molecules.py b/mdsuite/transformations/map_molecules.py index 697735f1..7d1abb6f 100644 --- a/mdsuite/transformations/map_molecules.py +++ b/mdsuite/transformations/map_molecules.py @@ -24,6 +24,7 @@ Summary ------- """ + import logging from typing import List @@ -58,6 +59,7 @@ class MolecularMap(Transformations): {'smiles': 'F[P-](F)(F)(F)(F)F', 'amount': 20}} would be the input for the emim-PF6 ionic liquid. + """ def __init__(self): @@ -85,6 +87,7 @@ def _prepare_database_entry(self, species: str, number_of_molecules: int) -> dic ------- data_structure : dict A data structure for the incoming data. + """ # collect machine properties and determine batch size path = join_path(species, self.mapping_property.name) @@ -111,6 +114,7 @@ def _run_dependency_check(self): Returns ------- Calls a resolve method if dependencies are not met. + """ for sp_name in self.experiment.species: path = join_path(sp_name, self.dependency.name) @@ -130,6 +134,7 @@ def _get_mass_array(self, species: list) -> list: ------- mass_array : list A list of masses. + """ return [self.experiment.species[item].mass for item in species] @@ -145,6 +150,7 @@ def _get_type_spec(self, path_list: list) -> dict: Returns ------- type_spec : dict + """ type_spec = {} for item in path_list: @@ -176,6 +182,7 @@ def _get_reduced_mass_dict(self, species: dict, molecular_mass) -> dict: ------- reduced_mass_dict : dict Dictionary of reduced masses for each species. + """ reduced_mass_dict = {} for item in species: @@ -192,6 +199,7 @@ def _map_molecules(self, molecular_graph: MolecularGraph): Returns ------- Updates the database. + """ molecule_name = molecular_graph.molecule_name molecules = self.experiment.molecules @@ -272,6 +280,7 @@ def run_transformation(self, molecules: List[Molecule]): Returns ------- Update the experiment database. + """ self._run_dependency_check() diff --git a/mdsuite/transformations/momentum_flux.py b/mdsuite/transformations/momentum_flux.py index df2d89d9..d10bae2b 100644 --- a/mdsuite/transformations/momentum_flux.py +++ b/mdsuite/transformations/momentum_flux.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import tensorflow as tf diff --git a/mdsuite/transformations/scale_coordinates.py b/mdsuite/transformations/scale_coordinates.py index 6275d6a8..80d0de73 100644 --- a/mdsuite/transformations/scale_coordinates.py +++ b/mdsuite/transformations/scale_coordinates.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import tensorflow as tf diff --git a/mdsuite/transformations/test_trafos.py b/mdsuite/transformations/test_trafos.py index c7a089b4..ff9b39d1 100644 --- a/mdsuite/transformations/test_trafos.py +++ b/mdsuite/transformations/test_trafos.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import numpy as np diff --git a/mdsuite/transformations/thermal_flux.py b/mdsuite/transformations/thermal_flux.py index 015c6f4c..5c7dd51f 100644 --- a/mdsuite/transformations/thermal_flux.py +++ b/mdsuite/transformations/thermal_flux.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import numpy as np diff --git a/mdsuite/transformations/transformation_dict.py b/mdsuite/transformations/transformation_dict.py index 43c51ed2..a2f4119f 100644 --- a/mdsuite/transformations/transformation_dict.py +++ b/mdsuite/transformations/transformation_dict.py @@ -24,6 +24,7 @@ Summary ------- """ + from mdsuite.database.mdsuite_properties import mdsuite_properties as mdp from mdsuite.transformations import ( CoordinateUnwrapper, diff --git a/mdsuite/transformations/transformations.py b/mdsuite/transformations/transformations.py index 58baf090..d27d49d1 100644 --- a/mdsuite/transformations/transformations.py +++ b/mdsuite/transformations/transformations.py @@ -25,6 +25,7 @@ ------- Parent class for the transformations. """ + from __future__ import annotations import abc @@ -83,6 +84,7 @@ class Transformations: data manager for handling the data transfer memory_manager : MemoryManager memory manager for the computation. + """ def __init__( @@ -111,6 +113,7 @@ def __init__( specifies memory requirements of the transformation dtype : data type of the processed values + """ self._experiment = None self._database = None @@ -165,6 +168,7 @@ def _run_dataset_check(self, path: str): outcome : bool If True, the dataset already exists and should be extended. If False, a new dataset should be built. + """ return self.database.check_existence(path) @@ -179,9 +183,11 @@ def _save_output( # todo for the future: this should not be part of the transformation. # the transformation should yield a batch and the experiment should take care of # storing it in the correct place, just as with file inputs + Returns ------- saves the tensor_values to the database_path. + """ # turn data into trajectory chunk # data_structure is dict {'/path/to/property':{'indices':irrelevant, @@ -285,6 +291,7 @@ def _prepare_database_entry(self, species: str, system_tensor=False): ------- tensor_values structure for use in saving the tensor_values to the database_path. + """ if system_tensor: output_length = 1 @@ -447,6 +454,7 @@ def run_transformation(self, species: typing.Iterable[str] = None): """ Perform the batching and data loading for the transformation, then calls transform_batch + Parameters ---------- species : Iterable[str] @@ -540,6 +548,7 @@ def transform_batch( Or tuple of (, ), where the carryover can have any type. The carryover will be used as the optional argument for the next batch + """ raise NotImplementedError("transformation of a batch must be implemented") @@ -554,6 +563,7 @@ def run_transformation(self, species: typing.Iterable[str] = None) -> None: """ Perform the batching and data loading for the transformation, then calls transform_batch + Parameters ---------- species : Iterable[str] diff --git a/mdsuite/transformations/translational_dipole_moment.py b/mdsuite/transformations/translational_dipole_moment.py index 496e2ded..b765008e 100644 --- a/mdsuite/transformations/translational_dipole_moment.py +++ b/mdsuite/transformations/translational_dipole_moment.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import tensorflow as tf diff --git a/mdsuite/transformations/unwrap_coordinates.py b/mdsuite/transformations/unwrap_coordinates.py index 4b177de1..ed66bba6 100644 --- a/mdsuite/transformations/unwrap_coordinates.py +++ b/mdsuite/transformations/unwrap_coordinates.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import tensorflow as tf diff --git a/mdsuite/transformations/unwrap_via_indices.py b/mdsuite/transformations/unwrap_via_indices.py index 53224e90..d94f12e5 100644 --- a/mdsuite/transformations/unwrap_via_indices.py +++ b/mdsuite/transformations/unwrap_via_indices.py @@ -24,6 +24,7 @@ Summary ------- """ + import typing import tensorflow as tf diff --git a/mdsuite/transformations/velocity_from_positions.py b/mdsuite/transformations/velocity_from_positions.py index 1f294d3b..924b921d 100644 --- a/mdsuite/transformations/velocity_from_positions.py +++ b/mdsuite/transformations/velocity_from_positions.py @@ -12,6 +12,7 @@ ------- Calculate the velocity of particles from their positions """ + import typing import tensorflow as tf diff --git a/mdsuite/transformations/wrap_coordinates.py b/mdsuite/transformations/wrap_coordinates.py index 8c904843..25fb6b86 100644 --- a/mdsuite/transformations/wrap_coordinates.py +++ b/mdsuite/transformations/wrap_coordinates.py @@ -46,6 +46,7 @@ def __init__(self, center_box: bool = True): if True (default): coordinates are wrapped to [-L/2 , L/2] if False: coordinates are wrapped to [0 , L], where L is the box size. + """ super(CoordinateWrapper, self).__init__( input_properties=[ diff --git a/mdsuite/utils/calculator_helper_methods.py b/mdsuite/utils/calculator_helper_methods.py index 7e8001e6..540ad1a6 100644 --- a/mdsuite/utils/calculator_helper_methods.py +++ b/mdsuite/utils/calculator_helper_methods.py @@ -25,6 +25,7 @@ ------- Static methods used in calculators are kept here rather than polluting the parent class. """ + import logging from typing import Any, Iterable, Tuple, Union @@ -59,6 +60,7 @@ def fit_einstein_curve( List of fit values pcov : list Covariance matrix of the fit values. + """ # Defined here for completeness. popt = [] @@ -81,6 +83,7 @@ def func(x, m, a): Returns ------- m * x + a + """ return m * x + a @@ -123,6 +126,7 @@ def correlate(ds_a: np.ndarray, ds_b: np.ndarray) -> np.ndarray: ------- Computes the correlation between the two data sets and averages over the spatial dimension. + """ def _correlate_op(a: np.ndarray, b: np.ndarray): @@ -139,6 +143,7 @@ def _correlate_op(a: np.ndarray, b: np.ndarray): Returns ------- correlation over a single dimension. + """ return jnp.correlate(a, b, mode="full") diff --git a/mdsuite/utils/colours.py b/mdsuite/utils/colours.py index 92f48a13..b6d00853 100644 --- a/mdsuite/utils/colours.py +++ b/mdsuite/utils/colours.py @@ -1,4 +1,5 @@ """MDSuite colours module.""" + import dataclasses diff --git a/mdsuite/utils/config.py b/mdsuite/utils/config.py index ed67728e..8fad6d43 100644 --- a/mdsuite/utils/config.py +++ b/mdsuite/utils/config.py @@ -24,6 +24,7 @@ Summary ------- """ + from dataclasses import dataclass @@ -40,6 +41,7 @@ class Config: If true, jupyter is being used. memory_fraction: bool The portion of the available memory to be used. + """ jupyter: bool = False diff --git a/mdsuite/utils/constants.py b/mdsuite/utils/constants.py index 79844774..c82a409e 100644 --- a/mdsuite/utils/constants.py +++ b/mdsuite/utils/constants.py @@ -24,6 +24,7 @@ Summary ------- """ + import dataclasses diff --git a/mdsuite/utils/helpers.py b/mdsuite/utils/helpers.py index 9a5d3495..12a7860f 100644 --- a/mdsuite/utils/helpers.py +++ b/mdsuite/utils/helpers.py @@ -9,6 +9,7 @@ Description: """ + import contextlib import numpy as np @@ -25,6 +26,7 @@ class NoneType: >>> x = NoneType >>> x is NoneType >>> x is not None + """ def __init__(self): @@ -47,6 +49,7 @@ def compute_memory_fraction(desired_memory: float, total_memory: float = None): memory fraction : float What fraction of the current systems memory this value corresponds to. If this number is above 1, it is clipped to 1 + """ if total_memory is None: total_memory = get_machine_properties()["memory"] / (1024.0**3) @@ -69,6 +72,7 @@ def change_memory_fraction(desired_memory): ------ environment where the 'config.memory_fraction' is adapted in regard to the desired_memory. + """ import mdsuite diff --git a/mdsuite/utils/linalg.py b/mdsuite/utils/linalg.py index dc1417d6..a64ae6b5 100644 --- a/mdsuite/utils/linalg.py +++ b/mdsuite/utils/linalg.py @@ -67,6 +67,7 @@ def get_angles(r_ij_mat, indices, acos=True): Returns ------- tf.Tensor: Tensor with the shape (triples) + """ r_ij = tf.gather_nd( r_ij_mat, tf.stack([indices[:, 0], indices[:, 1], indices[:, 2]], axis=1) @@ -130,6 +131,7 @@ def apply_system_cutoff(tensor: tf.Tensor, cutoff: float) -> tf.Tensor: ---------- tensor : tf.Tensor cutoff : flaot + """ cutoff_mask = tf.cast(tf.less(tensor, cutoff), dtype=tf.bool) # Construct the mask @@ -166,6 +168,7 @@ def cartesian_to_spherical_coordinates( A tensor of shape `[A1, ..., An, 3]`. The last dimensions contains (`r`,`theta`,`phi`), where `r` is the sphere radius, `theta` is the polar angle and `phi` is the azimuthal angle. Returns `NaN` gradient if x = y = 0. + """ with tf.name_scope(name): point_cartesian = tf.convert_to_tensor(value=point_cartesian) @@ -207,6 +210,7 @@ def spherical_to_cartesian_coordinates( ------- A tensor of shape `[A1, ..., An, 3]`, where the last dimension contains the cartesian coordinates in x,y,z order. + """ with tf.name_scope(name): point_spherical = tf.convert_to_tensor(value=point_spherical) diff --git a/mdsuite/utils/meta_functions.py b/mdsuite/utils/meta_functions.py index 2b988fd7..78773ed1 100644 --- a/mdsuite/utils/meta_functions.py +++ b/mdsuite/utils/meta_functions.py @@ -62,6 +62,7 @@ def is_jsonable(x: dict) -> bool: Returns ------- bool: Whether the dict was serializable or not. + """ try: json.dumps(x) @@ -107,6 +108,7 @@ def get_dimensionality(box: list) -> int: dimensions : int dimension of the box i.e, 1 or 2 or 3 (Higher dimensions probably don't make sense just yet) + """ # Check if the x, y, or z entries are empty, i.e. 2 dimensions if box[0] == 0 or box[1] == 0 or box[2] == 0: @@ -137,6 +139,7 @@ def get_machine_properties() -> dict: ------- machine_properties : dict A dictionary containing information about the hardware being used. + """ machine_properties = {} available_memory = psutil.virtual_memory().available # RAM available @@ -175,6 +178,7 @@ def line_counter(filename: str) -> int: ------- lines : int Number of lines in the file + """ f = open(filename, "rb") num_lines = sum(1 for _ in f) @@ -213,6 +217,7 @@ def optimize_batch_size( ------- batch size : int Number of configurations to load in each batch + """ if test: file_size = _file_size @@ -259,6 +264,7 @@ def linear_fitting_function(x: np.array, a: float, b: float) -> np.array: ------- a*x + b : float Returns the evaluation of a linear function. + """ return a * x + b @@ -280,6 +286,7 @@ def simple_file_read(filename: str) -> list: ------- data_array: list Data read in by the function. + """ data_array = [] # define empty tensor_values array with open(filename, "r+") as f: # Open the file for reading @@ -309,6 +316,7 @@ def timeit(f: Callable) -> Callable: ----- There is currently no test for this wrapper as there is no simple way of checking timing on a remote server. + """ @wraps(f) @@ -351,6 +359,7 @@ def apply_savgol_filter( ----- There are no tests for this method as a test would simply be testing the scipy implementation which they have done. + """ return savgol_filter(data, window_length, order) @@ -408,6 +417,7 @@ def golden_section_search( ------- minimum range : tuple Returns two radii values within which the minimum can be found. + """ # Define the golden ratio identities phi_a = 1 / golden_ratio @@ -455,6 +465,7 @@ def get_nearest_divisor(a: int, b: int) -> int: ------- divisor : int nearest number to a that divides into b evenly. + """ remainder = 1 # initialize a remainder a += 1 @@ -468,6 +479,7 @@ def get_nearest_divisor(a: int, b: int) -> int: def split_array(data: np.array, condition: np.array) -> list: """ split an array by a condition + Parameters ---------- data : np.array @@ -479,6 +491,7 @@ def split_array(data: np.array, condition: np.array) -> list: ------- split_array : list A list of split up arrays. + """ initial_split = [data[condition], data[~condition]] # attempt to split the array @@ -506,6 +519,7 @@ def find_item(obj, key): item: dict value. returns the value for the given key. Return type may change depending on the requested key + """ if key in obj: return obj[key] diff --git a/mdsuite/utils/molecule.py b/mdsuite/utils/molecule.py index f20c30cb..9e089430 100644 --- a/mdsuite/utils/molecule.py +++ b/mdsuite/utils/molecule.py @@ -25,6 +25,7 @@ ------- Module for the MDSuite molecule dataclass """ + from dataclasses import dataclass @@ -56,6 +57,7 @@ class Molecule: mol_pbc : bool If true, the simulation that was run was using molecule-based PBC, i.e. molecules were not allowed to break in the simulation. + """ name: str diff --git a/mdsuite/utils/neighbour_list.py b/mdsuite/utils/neighbour_list.py index 398a8b53..b94c92a7 100644 --- a/mdsuite/utils/neighbour_list.py +++ b/mdsuite/utils/neighbour_list.py @@ -42,6 +42,7 @@ def get_triu_indicies(n_atoms): ------- Returns a vector of size (2, None) instead of a tuple of two values like np.triu_indices + """ bool_mat = tf.ones((n_atoms, n_atoms), dtype=tf.bool) # Just construct a boolean true matrix the size of one time_step diff --git a/mdsuite/utils/report_computer_characteristics.py b/mdsuite/utils/report_computer_characteristics.py index 64745427..47ed503c 100644 --- a/mdsuite/utils/report_computer_characteristics.py +++ b/mdsuite/utils/report_computer_characteristics.py @@ -24,6 +24,7 @@ Summary ------- """ + import re from os import path from pathlib import Path diff --git a/mdsuite/utils/scale_functions.py b/mdsuite/utils/scale_functions.py index a9ad0823..28dc3d0d 100644 --- a/mdsuite/utils/scale_functions.py +++ b/mdsuite/utils/scale_functions.py @@ -24,6 +24,7 @@ Summary ------- """ + import numpy as np @@ -43,6 +44,7 @@ def linear_scale_function(memory_usage: int, scale_factor: int = 1) -> int: ------- scaled_memory : int Amount of memory required per configuration loaded. + """ return memory_usage * scale_factor @@ -63,6 +65,7 @@ def linearithmic_scale_function(memory_usage: int, scale_factor: int = 1) -> flo ------- scaled_memory : float Amount of memory required per configuration loaded. + """ return scale_factor * memory_usage * np.log(memory_usage) @@ -81,10 +84,12 @@ def quadratic_scale_function( Scalar scaling factor for the inner multiplication outer_scale_factor : int Scalar scaling factor for the outer multiplication + Returns ------- scaled_memory : int Amount of memory required per configuration loaded. + """ return outer_scale_factor * (memory_usage * inner_scale_factor) ** 2 @@ -113,5 +118,6 @@ def polynomial_scale_function( ------- scaled_memory : int Amount of memory required per configuration loaded. + """ return outer_scale_factor * (memory_usage * inner_scale_factor) ** order diff --git a/mdsuite/utils/tensor_flow/layers.py b/mdsuite/utils/tensor_flow/layers.py index d6f00734..f632f4d4 100644 --- a/mdsuite/utils/tensor_flow/layers.py +++ b/mdsuite/utils/tensor_flow/layers.py @@ -24,6 +24,7 @@ Summary ------- """ + import tensorflow as tf from .helpers import triu_indices as compute_triu @@ -38,6 +39,7 @@ def __init__(self, dense: bool = True, **kwargs): ---------- dense: bool Return the flat_rij or a dense r_ij + """ super().__init__(**kwargs) self.dense = dense diff --git a/mdsuite/utils/testing.py b/mdsuite/utils/testing.py index 81898832..3a83d261 100644 --- a/mdsuite/utils/testing.py +++ b/mdsuite/utils/testing.py @@ -24,6 +24,7 @@ Summary ------- """ + import multiprocessing import traceback diff --git a/mdsuite/utils/units.py b/mdsuite/utils/units.py index 6ff57aa1..dccb7c6f 100644 --- a/mdsuite/utils/units.py +++ b/mdsuite/utils/units.py @@ -24,6 +24,7 @@ Summary ------- """ + from dataclasses import dataclass standard_state_pressure = 100000 # Pa -- Standard state pressure diff --git a/mdsuite/visualizer/d2_data_visualization.py b/mdsuite/visualizer/d2_data_visualization.py index 0f463f6b..d3811fa6 100644 --- a/mdsuite/visualizer/d2_data_visualization.py +++ b/mdsuite/visualizer/d2_data_visualization.py @@ -24,6 +24,7 @@ Summary ------- """ + import pathlib from typing import List, Union @@ -49,6 +50,7 @@ def __init__(self, title: str, path: pathlib.Path): title of the plot. path : pathlib.Path path to the saving directory of the plot + """ if config.jupyter: output_notebook() @@ -84,6 +86,7 @@ def construct_plot( ------- figure : figure A bokeh figure object. + """ fig = figure( x_axis_label=x_label, diff --git a/mdsuite/visualizer/d3_data_visualizer.py b/mdsuite/visualizer/d3_data_visualizer.py index 8dffcd21..2c7433bc 100644 --- a/mdsuite/visualizer/d3_data_visualizer.py +++ b/mdsuite/visualizer/d3_data_visualizer.py @@ -25,6 +25,7 @@ ------- Module for the MDSuite 3d visualizer. """ + import importlib.resources import json from typing import Union @@ -59,6 +60,7 @@ def __init__( title of the plot. colour_map : np.ndarray A colour map to apply to the data. + """ self.data = data self.title = title @@ -90,6 +92,7 @@ def _build_app(self): Returns ------- Updates the class. + """ self.app = gui.Application.instance self.app.initialize() @@ -115,6 +118,7 @@ def _get_atom_properties(element: str) -> dict: data : dict A dictionary of data to use for the rendering: e.g. {'colour': (0.7, 0.33, 0.0), 'mass': 0.8) + """ data = {} data_name = "mdsuite.data" @@ -142,6 +146,7 @@ def _add_center(self): Returns ------- Updates the plot. + """ if type(self.center) is str: self._add_single_center() @@ -171,6 +176,7 @@ def _add_group_center(self): Returns ------- Adds a group of particles to the center. + """ translation = np.array([0, 0, 0]) mass = 0.0 diff --git a/mdsuite/visualizer/znvis_visualizer.py b/mdsuite/visualizer/znvis_visualizer.py index 5b9ab2d9..af4413ba 100644 --- a/mdsuite/visualizer/znvis_visualizer.py +++ b/mdsuite/visualizer/znvis_visualizer.py @@ -25,6 +25,7 @@ ------- Module implementing the ZnVis visualizer in MDSuite. """ + import importlib.resources import json @@ -61,6 +62,7 @@ def __init__( Frame rate at which to run the visualization in frames per second. database_path : str Database path from the experiment. + """ self.counter = 0 # Particle information @@ -88,6 +90,7 @@ def _get_species_properties(species: str): RBG array of colours. radius : float Radius of the particles. This is a reduced mass. + """ # Load the species data from pubchempy data file. pse = json.loads( @@ -111,6 +114,7 @@ def _prepare_species(self): ------- particle_list : list[znvis.Particle] A list of particle objects. + """ particle_list = [] for item in self.species: @@ -134,6 +138,7 @@ def run_visualization(self): Returns ------- Opens the ZnVis app and runs the visualization. + """ particle_list = self._prepare_species() visualizer = znvis.Visualizer(particles=particle_list, frame_rate=24) diff --git a/setup.py b/setup.py index 999d6a63..e5c01325 100644 --- a/setup.py +++ b/setup.py @@ -24,6 +24,7 @@ Summary ------- """ + from pathlib import Path import setuptools