From bcf4097fdeea6e5f61d48dd7e34e37ba2d861845 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Sat, 26 Oct 2024 09:30:22 -0600 Subject: [PATCH 01/18] Re-adding pylint test. --- .github/workflows/pylint.yaml | 32 ++ ccpp-pylint-config.toml | 550 ++++++++++++++++++++++++++++++++++ test/pylint_test.sh | 28 -- 3 files changed, 582 insertions(+), 28 deletions(-) create mode 100644 .github/workflows/pylint.yaml create mode 100644 ccpp-pylint-config.toml delete mode 100755 test/pylint_test.sh diff --git a/.github/workflows/pylint.yaml b/.github/workflows/pylint.yaml new file mode 100644 index 00000000..a034fd0b --- /dev/null +++ b/.github/workflows/pylint.yaml @@ -0,0 +1,32 @@ +name: Pylinting + +on: + workflow_dispatch: + pull_request: + branches: [develop, main] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.11'] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pylint + + - name: Pylint scripts + run: | + pylint scripts + - name: Pylint tests + run: | + pylint test diff --git a/ccpp-pylint-config.toml b/ccpp-pylint-config.toml new file mode 100644 index 00000000..c8333fa0 --- /dev/null +++ b/ccpp-pylint-config.toml @@ -0,0 +1,550 @@ +[tool.pylint.main] +# Analyse import fallback blocks. This can be used to support both Python 2 and 3 +# compatible code, which means that the block might have code that exists only in +# one or another interpreter, leading to false positives when analysed. +# analyse-fallback-blocks = + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint in +# a server-like mode. +# clear-cache-post-run = + +# Always return a 0 (non-error) status code, even if lint errors are found. This +# is primarily useful in continuous integration scripts. +# exit-zero = + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +# extension-pkg-allow-list = + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +# extension-pkg-whitelist = + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +# fail-on = + +# Specify a score threshold under which the program will exit with error. +fail-under = 10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +# from-stdin = + +# Files or directories to be skipped. They should be base names, not paths. +ignore = ["CVS"] + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, it +# can't be used as an escape character. +# ignore-paths = + +# Files or directories matching the regular expression patterns are skipped. The +# regex matches against base names, not paths. The default value ignores Emacs +# file locks +ignore-patterns = ["^\\.#"] + +# List of module names for which member attributes should not be checked and will +# not be imported (useful for modules/projects where namespaces are manipulated +# during runtime and thus existing member attributes cannot be deduced by static +# analysis). It supports qualified module names, as well as Unix pattern +# matching. +# ignored-modules = + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +# init-hook = + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs = 1 + +# Control the amount of potential inferred values when inferring a single object. +# This can help the performance when dealing with large functions or complex, +# nested conditions. +limit-inference-results = 100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +# load-plugins = + +# Pickle collected data for later comparisons. +persistent = true + +# Resolve imports to .pyi stubs if available. May reduce no-member messages and +# increase not-an-iterable messages. +# prefer-stubs = + +# Minimum Python version to use for version dependent checks. Will default to the +# version used to run pylint. +py-version = "3.11" + +# Discover python modules and packages in the file system subtree. +# recursive = + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +# source-roots = + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode = true + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +# unsafe-load-any-extension = + +[tool.pylint.basic] +# Naming style matching correct argument names. +argument-naming-style = "snake_case" + +# Regular expression matching correct argument names. Overrides argument-naming- +# style. If left empty, argument names will be checked with the set naming style. +# argument-rgx = + +# Naming style matching correct attribute names. +attr-naming-style = "snake_case" + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +# attr-rgx = + +# Bad variable names which should always be refused, separated by a comma. +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +# bad-names-rgxs = + +# Naming style matching correct class attribute names. +class-attribute-naming-style = "any" + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +# class-attribute-rgx = + +# Naming style matching correct class constant names. +class-const-naming-style = "UPPER_CASE" + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +# class-const-rgx = + +# Naming style matching correct class names. +class-naming-style = "PascalCase" + +# Regular expression matching correct class names. Overrides class-naming-style. +# If left empty, class names will be checked with the set naming style. +# class-rgx = + +# Naming style matching correct constant names. +const-naming-style = "UPPER_CASE" + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming style. +# const-rgx = + +# Minimum line length for functions/classes that require docstrings, shorter ones +# are exempt. +docstring-min-length = -1 + +# Naming style matching correct function names. +function-naming-style = "snake_case" + +# Regular expression matching correct function names. Overrides function-naming- +# style. If left empty, function names will be checked with the set naming style. +# function-rgx = + +# Good variable names which should always be accepted, separated by a comma. +good-names = ["i", "j", "k", "ex", "Run", "_"] + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +# good-names-rgxs = + +# Include a hint for the correct naming format with invalid-name. +# include-naming-hint = + +# Naming style matching correct inline iteration names. +inlinevar-naming-style = "any" + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +# inlinevar-rgx = + +# Naming style matching correct method names. +method-naming-style = "snake_case" + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +# method-rgx = + +# Naming style matching correct module names. +module-naming-style = "snake_case" + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +# module-rgx = + +# Colon-delimited sets of names that determine each other's naming style when the +# name regexes allow several styles. +# name-group = + +# Regular expression which should only match function or class names that do not +# require a docstring. +no-docstring-rgx = "^_" + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. These +# decorators are taken in consideration only for invalid-name. +property-classes = ["abc.abstractproperty"] + +# Regular expression matching correct type alias names. If left empty, type alias +# names will be checked with the set naming style. +# typealias-rgx = + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +# typevar-rgx = + +# Naming style matching correct variable names. +variable-naming-style = "snake_case" + +# Regular expression matching correct variable names. Overrides variable-naming- +# style. If left empty, variable names will be checked with the set naming style. +# variable-rgx = + +[tool.pylint.classes] +# Warn about protected attribute access inside special methods +# check-protected-access-in-special-methods = + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg = ["cls"] + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pylint.design] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +# exclude-too-few-public-methods = + +# List of qualified class names to ignore when counting class parents (see R0901) +# ignored-parents = + +# Maximum number of arguments for function / method. +max-args = 5 + +# Maximum number of attributes for a class (see R0902). +max-attributes = 7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr = 5 + +# Maximum number of branch for function / method body. +max-branches = 12 + +# Maximum number of locals for function / method body. +max-locals = 15 + +# Maximum number of parents for a class (see R0901). +max-parents = 7 + +# Maximum number of positional arguments for function / method. +max-positional-arguments = 5 + +# Maximum number of public methods for a class (see R0904). +max-public-methods = 20 + +# Maximum number of return / yield for function / method body. +max-returns = 6 + +# Maximum number of statements in function / method body. +max-statements = 50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods = 2 + +[tool.pylint.exceptions] +# Exceptions that will emit a warning when caught. +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] + +[tool.pylint.format] +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +# expected-line-ending-format = + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines = "^\\s*(# )??$" + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren = 4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string = " " + +# Maximum number of characters on a single line. +max-line-length = 100 + +# Maximum number of lines in a module. +max-module-lines = 1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +# single-line-class-stmt = + +# Allow the body of an if to be on the same line as the test if there is no else. +# single-line-if-stmt = + +[tool.pylint.imports] +# List of modules that can be imported at any level, not just the top level one. +# allow-any-import-level = + +# Allow explicit reexports by alias from a package __init__. +# allow-reexport-from-package = + +# Allow wildcard imports from modules that define __all__. +# allow-wildcard-with-all = + +# Deprecated modules which should not be used, separated by a comma. +# deprecated-modules = + +# Output a graph (.gv or any supported image format) of external dependencies to +# the given file (report RP0402 must not be disabled). +# ext-import-graph = + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be disabled). +# import-graph = + +# Output a graph (.gv or any supported image format) of internal dependencies to +# the given file (report RP0402 must not be disabled). +# int-import-graph = + +# Force import order to recognize a module as part of the standard compatibility +# libraries. +# known-standard-library = + +# Force import order to recognize a module as part of a third party library. +known-third-party = ["enchant"] + +# Couples of modules and preferred modules, separated by a comma. +# preferred-modules = + +[tool.pylint.logging] +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style = "old" + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules = ["logging"] + +[tool.pylint."messages control"] +# Only show warnings with the listed confidence levels. Leave empty to show all. +# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] + +# Disable the message, report, category or checker with the given id(s). You can +# either give multiple identifiers separated by comma (,) or put this option +# multiple times (only on the command line, not in the configuration file where +# it should appear only once). You can also use "--disable=all" to disable +# everything first and then re-enable specific checks. For example, if you want +# to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-implicit-booleaness-not-comparison-to-string", "use-implicit-booleaness-not-comparison-to-zero", "use-symbolic-message-instead"] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where it +# should appear only once). See also the "--disable" option for examples. +# enable = + +[tool.pylint.method_args] +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"] + +[tool.pylint.miscellaneous] +# List of note tags to take in consideration, separated by a comma. +notes = ["FIXME", "XXX", "TODO"] + +# Regular expression of note tags to take in consideration. +# notes-rgx = + +[tool.pylint.refactoring] +# Maximum number of nested blocks for function / method body +max-nested-blocks = 5 + +# Complete name of functions that never returns. When checking for inconsistent- +# return-statements if a never returning function is called then it will be +# considered as an explicit return statement and no message will be printed. +never-returning-functions = ["sys.exit", "argparse.parse_error"] + +# Let 'consider-using-join' be raised when the separator to join on would be non- +# empty (resulting in expected fixes of the type: ``"- " + " - ".join(items)``) +suggest-join-with-non-empty-separator = true + +[tool.pylint.reports] +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each category, +# as well as 'statement' which is the total number of statements analyzed. This +# score is used by the global evaluation report (RP0004). +evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +# msg-template = + +# Set the output format. Available formats are: text, parseable, colorized, json2 +# (improved json format), json (old json format) and msvs (visual studio). You +# can also give a reporter class, e.g. mypackage.mymodule.MyReporterClass. +# output-format = + +# Tells whether to display a full report or only the messages. +# reports = + +# Activate the evaluation score. +score = true + +[tool.pylint.similarities] +# Comments are removed from the similarity computation +ignore-comments = true + +# Docstrings are removed from the similarity computation +ignore-docstrings = true + +# Imports are removed from the similarity computation +ignore-imports = true + +# Signatures are removed from the similarity computation +ignore-signatures = true + +# Minimum lines number of a similarity. +min-similarity-lines = 4 + +[tool.pylint.spelling] +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions = 4 + +# Spelling dictionary name. No available dictionaries : You need to install both +# the python package and the system dependency for enchant to work. +# spelling-dict = + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:" + +# List of comma separated words that should not be checked. +# spelling-ignore-words = + +# A path to a file that contains the private dictionary; one word per line. +# spelling-private-dict-file = + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +# spelling-store-unknown-words = + +[tool.pylint.typecheck] +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators = ["contextlib.contextmanager"] + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +# generated-members = + +# Tells whether missing members accessed in mixin class should be ignored. A +# class is considered mixin if its name matches the mixin-class-rgx option. +# Tells whether to warn about missing members when the owner of the attribute is +# inferred to be None. +ignore-none = true + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference can +# return multiple potential results while evaluating a Python object, but some +# branches might not be evaluated, which results in partial inference. In that +# case, it might be useful to still emit no-member and other checks for the rest +# of the inferred objects. +ignore-on-opaque-inference = true + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"] + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes = ["optparse.Values", "thread._local", "_thread._local", "argparse.Namespace"] + +# Show a hint with possible names when a member name was not found. The aspect of +# finding the hint is based on edit distance. +missing-member-hint = true + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance = 1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices = 1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx = ".*[Mm]ixin" + +# List of decorators that change the signature of a decorated function. +# signature-mutators = + +[tool.pylint.variables] +# List of additional names supposed to be defined in builtins. Remember that you +# should avoid defining new builtins when possible. +# additional-builtins = + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables = true + +# List of names allowed to shadow builtins +# allowed-redefined-builtins = + +# List of strings which can identify a callback function by name. A callback name +# must start or end with one of those strings. +callbacks = ["cb_", "_cb"] + +# A regular expression matching the name of dummy variables (i.e. expected to not +# be used). +dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" + +# Argument names that match this expression will be ignored. +ignored-argument-names = "_.*|^ignored_|^unused_" + +# Tells whether we should check for unused import in __init__ files. +# init-import = + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] + + diff --git a/test/pylint_test.sh b/test/pylint_test.sh deleted file mode 100755 index a8bf3f90..00000000 --- a/test/pylint_test.sh +++ /dev/null @@ -1,28 +0,0 @@ -#! /bin/bash - -# Script to run pylint tests on CCPP Framework python scripts - -# Add CCPP Framework paths to PYTHONPATH so pylint can find them -SCRIPTDIR="$( cd $( dirname ${0} ); pwd -P )" -SPINROOT="$( dirname ${SCRIPTDIR} )" -CCPPDIR="${SPINROOT}/scripts" -export PYTHONPATH="${CCPPDIR}:$PYTHONPATH" - -pylintcmd="pylint --rcfile=${SCRIPTDIR}/.pylintrc" - -# Test top-level scripts -scripts="${CCPPDIR}/ccpp_capgen.py" -scripts="${scripts} ${CCPPDIR}/ccpp_suite.py" -scripts="${scripts} ${CCPPDIR}/ddt_library.py" -scripts="${scripts} ${CCPPDIR}/host_cap.py" -scripts="${scripts} ${CCPPDIR}/host_model.py" -scripts="${scripts} ${CCPPDIR}/metadata_table.py" -scripts="${scripts} ${CCPPDIR}/metavar.py" -scripts="${scripts} ${CCPPDIR}/state_machine.py" -${pylintcmd} ${scripts} -# Test the fortran_tools module -${pylintcmd} ${CCPPDIR}/fortran_tools -# Test the parse_tools module -${pylintcmd} ${CCPPDIR}/parse_tools -# Test the fortran to metadata converter tool -${pylintcmd} ${CCPPDIR}/ccpp_fortran_to_metadata.py From 340c2f91f5929ee2f8a2296d423a4b5a639a3f6b Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Sat, 26 Oct 2024 11:19:43 -0600 Subject: [PATCH 02/18] Simplifying arg parser. --- scripts/ccpp_datafile.py | 48 +++++++++++++++++++--------------------- 1 file changed, 23 insertions(+), 25 deletions(-) diff --git a/scripts/ccpp_datafile.py b/scripts/ccpp_datafile.py index 20dee70d..51ccfb7c 100755 --- a/scripts/ccpp_datafile.py +++ b/scripts/ccpp_datafile.py @@ -163,51 +163,49 @@ def _command_line_parser(): the list of optional arguments below. Note that exactly one action is required. """ - parser = argparse.ArgumentParser(description=description) + parser = argparse.ArgumentParser(description=description, + formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("datatable", type=str, help="Path to a data table XML file created by capgen") ### Only one action per call group = parser.add_mutually_exclusive_group(required=True) for report in _VALID_REPORTS: - rep_type = "--{}".format(report["report"].replace("_", "-")) + report_name = report["report"].replace("_", "-") + report_name_option = f"--{report_name}" if report["type"] is bool: - group.add_argument(rep_type, action='store_true', default=False, + group.add_argument(report_name_option, action='store_true', default=False, help=report["help"]) elif report["type"] is str: if "metavar" in report: - group.add_argument(rep_type, required=False, type=str, - metavar=report["metavar"], default='', - help=report["help"]) + report_help = report["help"] + default_str = '' + group.add_argument(report_name_option, required=False, type=str, + default=default_str, help=report_help, + metavar=report["metavar"],) else: - group.add_argument(rep_type, required=False, type=str, - default='', help=report["help"]) + group.add_argument(report_name_option, required=False, type=str, + default=default_str, help=report_help) # end if else: - raise ValueError("Unknown report type, '{}'".format(report["type"])) + raise ValueError(f"Unknown report type, '{report['type']}'") # end if # end for ### - defval = "," - help_str = "String to separate items in a list (default: '{}')" - parser.add_argument("--separator", type=str, required=False, default=defval, - metavar="SEP", dest="sep", help=help_str.format(defval)) - defval = False + + parser.add_argument("--separator", type=str, required=False, default=",", + metavar="SEP", dest="sep", + help="String to separate items in a list") + help_str = ("Exclude protected variables (only has an effect if the " - "requested report is returning a list of variables)." - " (default: {})") + "requested report is returning a list of variables).") parser.add_argument("--exclude-protected", action='store_true', - required=False, - default=defval, help=help_str.format(defval)) - defval = -1 - help_str = ("Screen width for '--show' line wrapping. -1 means do not " - "wrap. (default: {})") + required=False, default=False, help=help_str) parser.add_argument("--line-wrap", type=int, required=False, metavar="LINE_WIDTH", dest="line_wrap", - default=defval, help=help_str.format(defval)) - defval = 2 - help_str = "Indent depth for '--show' output (default: {})" + default=-1, + help="Screen width for '--show' line wrapping. -1 means do not wrap.") parser.add_argument("--indent", type=int, required=False, default=2, - help=help_str.format(defval)) + help="Indent depth for '--show' output") return parser ############################################################################### From 1285ea9d1fc76d37b69ff71af63e64e809f6512c Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 09:32:23 -0600 Subject: [PATCH 03/18] First pass refactor of tests into integrated stack. --- .github/workflows/capgen_unit_tests.yaml | 12 +- .travis.yml | 28 --- CMakeLists.txt | 57 ++++- doc/CMakeLists.txt | 28 +-- scripts/ccpp_datafile.py | 6 +- src/CMakeLists.txt | 5 +- test/CMakeLists.txt | 3 + test/capgen_test/CMakeLists.txt | 155 ++---------- test/capgen_test/README.md | 6 - test/capgen_test/run_test | 282 ---------------------- test/capgen_test/test_reports.py | 293 +++++++++++++---------- 11 files changed, 272 insertions(+), 603 deletions(-) delete mode 100644 .travis.yml create mode 100644 test/CMakeLists.txt delete mode 100644 test/capgen_test/README.md delete mode 100755 test/capgen_test/run_test diff --git a/.github/workflows/capgen_unit_tests.yaml b/.github/workflows/capgen_unit_tests.yaml index 363d19e3..37a0a9ca 100644 --- a/.github/workflows/capgen_unit_tests.yaml +++ b/.github/workflows/capgen_unit_tests.yaml @@ -16,6 +16,16 @@ jobs: - uses: actions/checkout@v3 - name: update repos and install dependencies run: sudo apt-get update && sudo apt-get install -y build-essential ${{matrix.fortran-compiler}} cmake python3 git + + - name: Build the framework + run: | + cmake -S. -B./build -DCCPP_FRAMEWORK_ENABLE_TESTS=ON + - name: Run unit tests - run: cd test && ./run_fortran_tests.sh + run: | + cd build + ctest --rerun-failed --output-on-failure . --verbose + - name: Run python tests + run: | + BUILD_DIR=./build pytest test/capgen_test/test_reports.py diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 100192cd..00000000 --- a/.travis.yml +++ /dev/null @@ -1,28 +0,0 @@ -language: python - -python: - - "3.6" - - "3.7" - - "3.8" - - "3.9" - -branches: - only: - - feature/capgen - -install: - - pip install pylint - -script: - - env PYTHONPATH=scripts:${PYTHONPATH} pylint --rcfile ./test/.pylintrc ./test/unit_tests/test_metadata_table.py - - env PYTHONPATH=scripts:${PYTHONPATH} pylint --rcfile ./test/.pylintrc ./test/unit_tests/test_metadata_scheme_file.py - - python test/unit_tests/test_metadata_table.py - - python test/unit_tests/test_metadata_scheme_file.py - -notifications: - email: - recipients: - - dom.heinzeller@noaa.gov - - goldy@ucar.edu - on_success: always # default: change - on_failure: always # default: always diff --git a/CMakeLists.txt b/CMakeLists.txt index 90634bee..c707f5ce 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -4,12 +4,46 @@ project(ccpp_framework VERSION 5.0.0 LANGUAGES Fortran) +include(cmake/ccpp_capgen.cmake) +set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${PROJECT_SOURCE_DIR}/cmake") + + #------------------------------------------------------------------------------ # Set package definitions set(PACKAGE "ccpp-framework") set(AUTHORS "Dom Heinzeller" "Grant Firl" "Mike Kavulich" "Dustin Swales" "Courtney Peverley") string(TIMESTAMP YEAR "%Y") +option(CCPP_FRAMEWORK_BUILD_DOCUMENTATION + "Create and install the HTML documentation (requires Doxygen)" OFF) +option(CCPP_FRAMEWORK_ENABLE_OPENMP "Enable OpenMP support for the framework" OFF) +option(CCPP_FRAMEWORK_ENABLE_TESTS "Enable building/running tests" OFF) +set(CCPP_VERBOSITY "0" CACHE STRING "Verbosity level of output (default: 0)") + +# Use rpaths on MacOSX +set(CMAKE_MACOSX_RPATH 1) + +ADD_COMPILE_OPTIONS(-O0) +message(STATUS "Compiling Fortran with ${CMAKE_Fortran_COMPILER_ID}") +if(${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") + ADD_COMPILE_OPTIONS(-fcheck=all) + ADD_COMPILE_OPTIONS(-fbacktrace) + ADD_COMPILE_OPTIONS(-ffpe-trap=zero) + ADD_COMPILE_OPTIONS(-finit-real=nan) + ADD_COMPILE_OPTIONS(-ggdb) + ADD_COMPILE_OPTIONS(-ffree-line-length-none) + ADD_COMPILE_OPTIONS(-cpp) +elseif(${CMAKE_Fortran_COMPILER_ID} MATCHES "IntelLLVM") + #ADD_COMPILE_OPTIONS(-check all) + ADD_COMPILE_OPTIONS(-fpe0) + ADD_COMPILE_OPTIONS(-warn) + ADD_COMPILE_OPTIONS(-traceback) + ADD_COMPILE_OPTIONS(-debug full) + ADD_COMPILE_OPTIONS(-fpp) +else() + message (WARNING "This program has only been compiled with gfortran and ifx.") +endif() + #------------------------------------------------------------------------------ # Set MPI flags for Fortran with MPI F08 interface find_package(MPI REQUIRED Fortran) @@ -19,18 +53,18 @@ endif() #------------------------------------------------------------------------------ # Set OpenMP flags for C/C++/Fortran -if (OPENMP) +if (CCPP_FRAMEWORK_ENABLE_OPENMP) find_package(OpenMP REQUIRED) + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") set (CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${OpenMP_Fortran_FLAGS}") -endif (OPENMP) +endif (CCPP_FRAMEWORK_ENABLE_OPENMP) #------------------------------------------------------------------------------ # Set a default build type if none was specified if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) message(STATUS "Setting build type to 'Release' as none was specified.") set(CMAKE_BUILD_TYPE Debug CACHE STRING "Choose the type of build." FORCE) - # Set the possible values of build type for cmake-gui - set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "Coverage") endif() #------------------------------------------------------------------------------ @@ -46,4 +80,17 @@ option(BUILD_SHARED_LIBS "Build a static library" OFF) #------------------------------------------------------------------------------ # Add the sub-directories add_subdirectory(src) -add_subdirectory(doc) + +if(CCPP_FRAMEWORK_ENABLE_TESTS) + enable_testing() + add_subdirectory(test) +endif() + +if (CCPP_FRAMEWORK_BUILD_DOCUMENTATION) + find_package(Doxygen REQUIRED) + if(NOT DOXYGEN_FOUND) + message(FATAL_ERROR "Doxygen is needed to build the documentation.") + endif() + add_subdirectory(doc) +endif() + diff --git a/doc/CMakeLists.txt b/doc/CMakeLists.txt index 9e96e4e4..b7997658 100644 --- a/doc/CMakeLists.txt +++ b/doc/CMakeLists.txt @@ -2,28 +2,16 @@ # Doxygen rules # # Add a target to generate API documentation with Doxygen -find_package(Doxygen) -option(BUILD_DOCUMENTATION - "Create and install the HTML documentation (requires Doxygen)" - ${DOXYGEN_FOUND}) +set(doxyfile_in ${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in) +set(doxyfile ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile) -# -if(BUILD_DOCUMENTATION) - if(NOT DOXYGEN_FOUND) - message(FATAL_ERROR "Doxygen is needed to build the documentation.") - endif() - - set(doxyfile_in ${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in) - set(doxyfile ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile) - - configure_file(${doxyfile_in} ${doxyfile} @ONLY) +configure_file(${doxyfile_in} ${doxyfile} @ONLY) - add_custom_target(doc - COMMAND ${DOXYGEN_EXECUTABLE} ${doxyfile} - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMENT "Generating API documentation with Doxygen" - VERBATIM) -endif() +add_custom_target(doc + COMMAND ${DOXYGEN_EXECUTABLE} ${doxyfile} + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMENT "Generating API documentation with Doxygen" + VERBATIM) set(gmtb_sty_in ${CMAKE_CURRENT_SOURCE_DIR}/DevelopersGuide/gmtb.sty) set(gmtb_sty ${CMAKE_CURRENT_BINARY_DIR}/DevelopersGuide/gmtb.sty) diff --git a/scripts/ccpp_datafile.py b/scripts/ccpp_datafile.py index 51ccfb7c..9b5952f5 100755 --- a/scripts/ccpp_datafile.py +++ b/scripts/ccpp_datafile.py @@ -1258,12 +1258,10 @@ def generate_ccpp_datatable(run_env, host_model, api, scheme_headers, ARG_VARS = vars(PARGS) _ACTION = None _ERRMSG = '' - _ESEP = '' for opt in ARG_VARS: if (opt in DatatableReport.valid_actions()) and ARG_VARS[opt]: if _ACTION: - _ERRMSG += _ESEP + "Duplicate action, '{}'".format(opt) - _ESEP = '\n' + _ERRMSG += f"Duplicate action, '{opt}'\n" else: _ACTION = DatatableReport(opt, ARG_VARS[opt]) # end if @@ -1275,5 +1273,5 @@ def generate_ccpp_datatable(run_env, host_model, api, scheme_headers, REPORT = datatable_report(PARGS.datatable, _ACTION, PARGS.sep, PARGS.exclude_protected) # end if - print("{}".format(REPORT.rstrip())) + print(f"{REPORT.rstrip()}") sys.exit(0) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index eaa78afe..ff7d36b0 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -29,9 +29,10 @@ set(${PACKAGE}_LIB_DIRS # Define the executable and what to link add_library(ccpp_framework STATIC ${SOURCES_F90}) target_link_libraries(ccpp_framework PUBLIC MPI::MPI_Fortran) -set_target_properties(ccpp_framework PROPERTIES VERSION ${PROJECT_VERSION} +set_target_properties(ccpp_framework PROPERTIES + VERSION ${PROJECT_VERSION} SOVERSION ${PROJECT_VERSION_MAJOR} - LINK_FLAGS ${CMAKE_Fortran_FLAGS}) + LINK_FLAGS "${CMAKE_Fortran_FLAGS}") #------------------------------------------------------------------------------ # Installation diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt new file mode 100644 index 00000000..99f7f981 --- /dev/null +++ b/test/CMakeLists.txt @@ -0,0 +1,3 @@ +# add_subdirectory(advection_test) +add_subdirectory(capgen_test) +# add_subdirectory(var_compatability_test) \ No newline at end of file diff --git a/test/capgen_test/CMakeLists.txt b/test/capgen_test/CMakeLists.txt index ccae4f08..f1e334bd 100644 --- a/test/capgen_test/CMakeLists.txt +++ b/test/capgen_test/CMakeLists.txt @@ -1,18 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) -PROJECT(test_host) -ENABLE_LANGUAGE(Fortran) -include(CMakeForceCompiler) - -SET(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR}/cmake/modules) - -#------------------------------------------------------------------------------ -# -# Set where the CCPP Framework lives -# -#------------------------------------------------------------------------------ -get_filename_component(TEST_ROOT "${CMAKE_SOURCE_DIR}" DIRECTORY) -get_filename_component(CCPP_ROOT "${TEST_ROOT}" DIRECTORY) #------------------------------------------------------------------------------ # # Create list of SCHEME_FILES, HOST_FILES, and SUITE_FILES @@ -22,9 +8,10 @@ get_filename_component(CCPP_ROOT "${TEST_ROOT}" DIRECTORY) LIST(APPEND SCHEME_FILES "temp_scheme_files.txt" "ddt_suite_files.txt") LIST(APPEND HOST_FILES "test_host_data" "test_host_mod") LIST(APPEND SUITE_FILES "ddt_suite.xml" "temp_suite.xml") + # HOST is the name of the executable we will build. # We assume there are files ${HOST}.meta and ${HOST}.F90 in CMAKE_SOURCE_DIR -SET(HOST "${CMAKE_PROJECT_NAME}") +SET(HOST "test_host") #------------------------------------------------------------------------------ # @@ -32,80 +19,11 @@ SET(HOST "${CMAKE_PROJECT_NAME}") # #------------------------------------------------------------------------------ -# By default, no verbose output -SET(VERBOSITY 0 CACHE STRING "Verbosity level of output (default: 0)") # By default, generated caps go in ccpp subdir -SET(CCPP_CAP_FILES "${CMAKE_BINARY_DIR}/ccpp" CACHE +SET(CCPP_CAP_FILES "${CMAKE_CURRENT_BINARY_DIR}/ccpp" CACHE STRING "Location of CCPP-generated cap files") -SET(CCPP_FRAMEWORK ${CCPP_ROOT}/scripts) - -# Use rpaths on MacOSX -set(CMAKE_MACOSX_RPATH 1) - -#------------------------------------------------------------------------------ -# Set a default build type if none was specified -if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) - #message(STATUS "Setting build type to 'Debug' as none was specified.") - #set(CMAKE_BUILD_TYPE Debug CACHE STRING "Choose the type of build." FORCE) - message(STATUS "Setting build type to 'Release' as none was specified.") - set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) - - # Set the possible values of build type for cmake-gui - set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" - "MinSizeRel" "RelWithDebInfo") -endif() - -ADD_COMPILE_OPTIONS(-O0) - -if (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") -# gfortran -# MESSAGE("gfortran being used.") - ADD_COMPILE_OPTIONS(-fcheck=all) - ADD_COMPILE_OPTIONS(-fbacktrace) - ADD_COMPILE_OPTIONS(-ffpe-trap=zero) - ADD_COMPILE_OPTIONS(-finit-real=nan) - ADD_COMPILE_OPTIONS(-ggdb) - ADD_COMPILE_OPTIONS(-ffree-line-length-none) - ADD_COMPILE_OPTIONS(-cpp) -elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "Intel") -# ifort -# MESSAGE("ifort being used.") - #ADD_COMPILE_OPTIONS(-check all) - ADD_COMPILE_OPTIONS(-fpe0) - ADD_COMPILE_OPTIONS(-warn) - ADD_COMPILE_OPTIONS(-traceback) - ADD_COMPILE_OPTIONS(-debug extended) - ADD_COMPILE_OPTIONS(-fpp) -elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "PGI") -# pgf90 -# MESSAGE("pgf90 being used.") - ADD_COMPILE_OPTIONS(-g) - ADD_COMPILE_OPTIONS(-Mipa=noconst) - ADD_COMPILE_OPTIONS(-traceback) - ADD_COMPILE_OPTIONS(-Mfree) - ADD_COMPILE_OPTIONS(-Mfptrap) - ADD_COMPILE_OPTIONS(-Mpreprocess) -else (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") - message (WARNING "This program has only been compiled with gfortran, pgf90 and ifort. If another compiler is needed, the appropriate flags SHOULD be added in ${CMAKE_SOURCE_DIR}/CMakeLists.txt") -endif (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") - -#------------------------------------------------------------------------------ -# CMake Modules -# Set the CMake module path -list(APPEND CMAKE_MODULE_PATH "${CCPP_FRAMEWORK}/cmake") -#------------------------------------------------------------------------------ -# Set OpenMP flags for C/C++/Fortran -if (OPENMP) - include(detect_openmp) - detect_openmp() - set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") - set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") - set (CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${OpenMP_Fortran_FLAGS}") - message(STATUS "Enable OpenMP support for C/C++/Fortran compiler") -else(OPENMP) - message (STATUS "Disable OpenMP support for C/C++/Fortran compiler") -endif() +SET(CCPP_FRAMEWORK ${CMAKE_SOURCE_DIR}/scripts) # Create metadata and source file lists FOREACH(FILE ${SCHEME_FILES}) @@ -127,62 +45,37 @@ FOREACH(FILE ${HOST_FILES}) get_filename_component(ABS_PATH "${FILE}.F90" ABSOLUTE) LIST(APPEND HOST_SOURCE "${ABS_PATH}") ENDFOREACH(FILE) + list(APPEND LIBRARY_LIST ${HOST_SOURCE}) string(REPLACE ";" ".meta," HOST_METADATA "${HOST_FILES}") set(HOST_METADATA "${HOST_METADATA}.meta,${HOST}.meta") string(REPLACE ";" "," SUITE_XML "${SUITE_FILES}") -# Run ccpp_capgen -set(CAPGEN_CMD "${CCPP_FRAMEWORK}/ccpp_capgen.py") -list(APPEND CAPGEN_CMD "--host-files") -list(APPEND CAPGEN_CMD "${HOST_METADATA}") -list(APPEND CAPGEN_CMD "--scheme-files") -list(APPEND CAPGEN_CMD "${SCHEME_METADATA}") -list(APPEND CAPGEN_CMD "--suites") -list(APPEND CAPGEN_CMD "${SUITE_XML}") -list(APPEND CAPGEN_CMD "--host-name") -list(APPEND CAPGEN_CMD "test_host") -list(APPEND CAPGEN_CMD "--output-root") -list(APPEND CAPGEN_CMD "${CCPP_CAP_FILES}") -while (VERBOSITY GREATER 0) - list(APPEND CAPGEN_CMD "--verbose") - MATH(EXPR VERBOSITY "${VERBOSITY} - 1") -endwhile () -list(APPEND CAPGEN_CMD "--debug") -string(REPLACE ";" " " CAPGEN_STRING "${CAPGEN_CMD}") -MESSAGE(STATUS "Running: ${CAPGEN_STRING}") -EXECUTE_PROCESS(COMMAND ${CAPGEN_CMD} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} - OUTPUT_VARIABLE CAPGEN_OUT ERROR_VARIABLE CAPGEN_OUT RESULT_VARIABLE RES) -MESSAGE(STATUS "${CAPGEN_OUT}") -if (RES EQUAL 0) - MESSAGE(STATUS "CCPP cap generation completed") -else(RES EQUAL 0) - MESSAGE(FATAL_ERROR "CCPP cap generation FAILED: result = ${RES}") -endif(RES EQUAL 0) +message(STATUS "CCPP_VERBOSITY = ${CCPP_VERBOSITY}") +ccpp_capgen(CAPGEN_DEBUG ON + VERBOSITY ${CCPP_VERBOSITY} + HOSTFILES ${HOST_METADATA} + SCHEMEFILES ${SCHEME_METADATA} + SUITES ${SUITE_XML} + HOST_NAME "test_host" + OUTPUT_ROOT "${CCPP_CAP_FILES}") # Retrieve the list of files from datatable.xml and set to CCPP_CAPS -set(DTABLE_CMD "${CCPP_FRAMEWORK}/ccpp_datafile.py") -list(APPEND DTABLE_CMD "${CCPP_CAP_FILES}/datatable.xml") -list(APPEND DTABLE_CMD "--ccpp-files") -list(APPEND DTABLE_CMD "--separator=\\;") -string(REPLACE ";" " " DTABLE_STRING "${DTABLE_CMD}") -MESSAGE(STATUS "Running: ${DTABLE_STRING}") -EXECUTE_PROCESS(COMMAND ${DTABLE_CMD} OUTPUT_VARIABLE CCPP_CAPS - RESULT_VARIABLE RES - OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_STRIP_TRAILING_WHITESPACE) -message(STATUS "CCPP_CAPS = ${CCPP_CAPS}") -if (RES EQUAL 0) - MESSAGE(STATUS "CCPP cap files retrieved") -else(RES EQUAL 0) - MESSAGE(FATAL_ERROR "CCPP cap file retrieval FAILED: result = ${RES}") -endif(RES EQUAL 0) -list(APPEND LIBRARY_LIST ${CCPP_CAPS}) -add_library(TESTLIB OBJECT ${LIBRARY_LIST}) +ccpp_datafile(DATATABLE "${CCPP_CAP_FILES}/datatable.xml" + REPORT_NAME "--ccpp-files") + +string(REPLACE "," ";" CCPP_CAPS_LIST ${CCPP_CAPS}) +message(STATUS "Adding ${CCPP_CAPS_LIST} to library list") +list(APPEND LIBRARY_LIST ${CCPP_CAPS_LIST}) +add_library(TESTLIB OBJECT ${LIBRARY_LIST}) ADD_EXECUTABLE(${HOST} ${HOST}.F90 $) -INCLUDE_DIRECTORIES(${CCPP_CAP_FILES}) +TARGET_INCLUDE_DIRECTORIES(${HOST} PRIVATE ${CCPP_CAP_FILES}) set_target_properties(${HOST} PROPERTIES COMPILE_FLAGS "${CMAKE_Fortran_FLAGS}" LINK_FLAGS "${CMAKE_Fortran_FLAGS}") + +enable_testing() +add_test(NAME ${HOST} COMMAND ${HOST}) diff --git a/test/capgen_test/README.md b/test/capgen_test/README.md deleted file mode 100644 index 127544e0..00000000 --- a/test/capgen_test/README.md +++ /dev/null @@ -1,6 +0,0 @@ -ccpp_capgen test -=========== - -To build and run the ccpp_capgen test, run ./run_test -This script will build and run the test. -The exit code is zero (0) on PASS and non-zero on FAIL. diff --git a/test/capgen_test/run_test b/test/capgen_test/run_test deleted file mode 100755 index 3d21e2c2..00000000 --- a/test/capgen_test/run_test +++ /dev/null @@ -1,282 +0,0 @@ -#! /bin/bash - -currdir="`pwd -P`" -scriptdir="$( cd $( dirname $0 ); pwd -P )" - -## -## Option default values -## -defdir="ct_build" -build_dir="${currdir}/${defdir}" -cleanup="PASS" # Other supported options are ALWAYS and NEVER -verbosity=0 - -## -## General syntax help function -## Usage: help -## -help () { - local hname="Usage: `basename ${0}`" - local hprefix="`echo ${hname} | tr '[!-~]' ' '`" - echo "${hname} [ --build-dir ] [ --cleanup ]" - echo "${hprefix} [ --verbosity <#> ]" - hprefix=" " - echo "" - echo "${hprefix} : Directory for building and running the test" - echo "${hprefix} default is /${defdir}" - echo "${hprefix} : Cleanup option is ALWAYS, NEVER, or PASS" - echo "${hprefix} default is PASS" - echo "${hprefix} verbosity: 0, 1, or 2" - echo "${hprefix} default is 0" - exit $1 -} - -## -## Error output function (should be handed a string) -## -perr() { - >&2 echo -e "\nERROR: ${@}\n" - exit 1 -} - -## -## Cleanup the build and test directory -## -docleanup() { - # We start off in the build directory - if [ "${build_dir}" == "${currdir}" ]; then - echo "WARNING: Cannot clean ${build_dir}" - else - cd ${currdir} - rm -rf ${build_dir} - fi -} - -## Process our input arguments -while [ $# -gt 0 ]; do - case $1 in - --h | -h | --help | -help) - help 0 - ;; - --build-dir) - if [ $# -lt 2 ]; then - perr "${1} requires a build directory" - fi - build_dir="${2}" - shift - ;; - --cleanup) - if [ $# -lt 2 ]; then - perr "${1} requies a cleanup option (ALWAYS, NEVER, PASS)" - fi - if [ "${2}" == "ALWAYS" -o "${2}" == "NEVER" -o "${2}" == "PASS" ]; then - cleanup="${2}" - else - perr "Allowed cleanup options: ALWAYS, NEVER, PASS" - fi - shift - ;; - --verbosity) - if [ $# -lt 2 ]; then - perr "${1} requires a verbosity value (0, 1, or 2)" - fi - if [ "${2}" == "0" -o "${2}" == "1" -o "${2}" == "2" ]; then - verbosity=$2 - else - perr "allowed verbosity levels are 0, 1, 2" - fi - shift - ;; - *) - perr "Unrecognized option, \"${1}\"" - ;; - esac - shift -done - -# Create the build directory, if necessary -if [ -d "${build_dir}" ]; then - # Always make sure build_dir is not in the test dir - if [ "$( cd ${build_dir}; pwd -P )" == "${currdir}" ]; then - build_dir="${build_dir}/${defdir}" - fi -else - mkdir -p ${build_dir} - res=$? - if [ $res -ne 0 ]; then - perr "Unable to create build directory, '${build_dir}'" - fi -fi -build_dir="$( cd ${build_dir}; pwd -P )" - -## framework is the CCPP Framework root dir -framework="$( cd $( dirname $( dirname ${scriptdir} ) ); pwd -P )" -frame_src="${framework}/src" - -## -## check strings for datafile command-list test -## NB: This has to be after build_dir is finalized -## -host_files="${build_dir}/ccpp/test_host_ccpp_cap.F90" -suite_files="${build_dir}/ccpp/ccpp_ddt_suite_cap.F90" -suite_files="${suite_files},${build_dir}/ccpp/ccpp_temp_suite_cap.F90" -utility_files="${build_dir}/ccpp/ccpp_kinds.F90" -utility_files="${utility_files},${frame_src}/ccpp_constituent_prop_mod.F90" -utility_files="${utility_files},${frame_src}/ccpp_hashable.F90" -utility_files="${utility_files},${frame_src}/ccpp_hash_table.F90" -ccpp_files="${utility_files}" -ccpp_files="${ccpp_files},${build_dir}/ccpp/test_host_ccpp_cap.F90" -ccpp_files="${ccpp_files},${build_dir}/ccpp/ccpp_ddt_suite_cap.F90" -ccpp_files="${ccpp_files},${build_dir}/ccpp/ccpp_temp_suite_cap.F90" -process_list="adjusting=temp_calc_adjust,setter=temp_set" -module_list="environ_conditions,make_ddt,setup_coeffs,temp_adjust,temp_calc_adjust,temp_set" -dependencies="${scriptdir}/adjust/qux.F90,${scriptdir}/bar.F90,${scriptdir}/foo.F90" -suite_list="ddt_suite;temp_suite" -required_vars_ddt="ccpp_error_code,ccpp_error_message,horizontal_dimension" -required_vars_ddt="${required_vars_ddt},horizontal_loop_begin" -required_vars_ddt="${required_vars_ddt},horizontal_loop_end" -required_vars_ddt="${required_vars_ddt},model_times" -required_vars_ddt="${required_vars_ddt},number_of_model_times" -required_vars_ddt="${required_vars_ddt},surface_air_pressure" -input_vars_ddt="horizontal_dimension" -input_vars_ddt="${input_vars_ddt},horizontal_loop_begin" -input_vars_ddt="${input_vars_ddt},horizontal_loop_end" -input_vars_ddt="${input_vars_ddt},model_times,number_of_model_times" -input_vars_ddt="${input_vars_ddt},surface_air_pressure" -output_vars_ddt="ccpp_error_code,ccpp_error_message" -output_vars_ddt="${output_vars_ddt},model_times,number_of_model_times" -required_vars_temp="ccpp_error_code,ccpp_error_message" -required_vars_temp="${required_vars_temp},coefficients_for_interpolation" -required_vars_temp="${required_vars_temp},horizontal_dimension" -required_vars_temp="${required_vars_temp},horizontal_loop_begin" -required_vars_temp="${required_vars_temp},horizontal_loop_end" -required_vars_temp="${required_vars_temp},index_of_water_vapor_specific_humidity" -required_vars_temp="${required_vars_temp},number_of_tracers" -required_vars_temp="${required_vars_temp},potential_temperature" -required_vars_temp="${required_vars_temp},potential_temperature_at_interface" -required_vars_temp="${required_vars_temp},potential_temperature_increment" -required_vars_temp="${required_vars_temp},surface_air_pressure" -required_vars_temp="${required_vars_temp},time_step_for_physics" -required_vars_temp="${required_vars_temp},vertical_interface_dimension" -required_vars_temp="${required_vars_temp},vertical_layer_dimension" -required_vars_temp="${required_vars_temp},water_vapor_specific_humidity" -input_vars_temp="coefficients_for_interpolation" -input_vars_temp="${input_vars_temp},horizontal_dimension" -input_vars_temp="${input_vars_temp},horizontal_loop_begin" -input_vars_temp="${input_vars_temp},horizontal_loop_end" -input_vars_temp="${input_vars_temp},index_of_water_vapor_specific_humidity" -input_vars_temp="${input_vars_temp},number_of_tracers" -input_vars_temp="${input_vars_temp},potential_temperature" -input_vars_temp="${input_vars_temp},potential_temperature_at_interface" -input_vars_temp="${input_vars_temp},potential_temperature_increment" -input_vars_temp="${input_vars_temp},surface_air_pressure,time_step_for_physics" -input_vars_temp="${input_vars_temp},vertical_interface_dimension" -input_vars_temp="${input_vars_temp},vertical_layer_dimension" -input_vars_temp="${input_vars_temp},water_vapor_specific_humidity" -output_vars_temp="ccpp_error_code,ccpp_error_message" -output_vars_temp="${output_vars_temp},coefficients_for_interpolation" -output_vars_temp="${output_vars_temp},potential_temperature" -output_vars_temp="${output_vars_temp},potential_temperature_at_interface" -output_vars_temp="${output_vars_temp},surface_air_pressure" -output_vars_temp="${output_vars_temp},water_vapor_specific_humidity" - -## -## Run a database report and check the return string -## $1 is the report program file -## $2 is the database file -## $3 is the report string -## $4 is the check string -## $5+ are any optional arguments -## -check_datatable() { - local checkstr=${4} - local teststr - local prog=${1} - local database=${2} - local report=${3} - shift 4 - echo "Checking ${report} report" - teststr="`${prog} ${database} ${report} $@`" - if [ "${teststr}" != "${checkstr}" ]; then - perr "datatable check:\nExpected: '${checkstr}'\nGot: '${teststr}'" - fi -} - -# cd to the build directory -cd ${build_dir} -res=$? -if [ $res -ne 0 ]; then - perr "Unable to cd to build directory, '${build_dir}'" -fi -# Clean build directory -rm -rf * -res=$? -if [ $res -ne 0 ]; then - perr "Unable to clean build directory, '${build_dir}'" -fi -# Run CMake -opts="" -if [ $verbosity -gt 0 ]; then - opts="${opts} -DVERBOSITY=${verbosity}" -fi -# Run cmake -cmake ${scriptdir} ${opts} -res=$? -if [ $res -ne 0 ]; then - perr "CMake failed with exit code, ${res}" -fi -# Test the datafile user interface -report_prog="${framework}/scripts/ccpp_datafile.py" -datafile="${build_dir}/ccpp/datatable.xml" -echo "Running python interface tests" -python3 ${scriptdir}/test_reports.py ${build_dir} ${datafile} -res=$? -if [ $res -ne 0 ]; then - perr "python interface tests failed" -fi -echo "Running command line tests" -echo "Checking required files from command line:" -check_datatable ${report_prog} ${datafile} "--host-files" ${host_files} -check_datatable ${report_prog} ${datafile} "--suite-files" ${suite_files} -check_datatable ${report_prog} ${datafile} "--utility-files" ${utility_files} -check_datatable ${report_prog} ${datafile} "--ccpp-files" ${ccpp_files} -echo -e "\nChecking lists from command line" -check_datatable ${report_prog} ${datafile} "--process-list" ${process_list} -check_datatable ${report_prog} ${datafile} "--module-list" ${module_list} -check_datatable ${report_prog} ${datafile} "--dependencies" ${dependencies} -check_datatable ${report_prog} ${datafile} "--suite-list" ${suite_list} \ - --sep ";" -echo -e "\nChecking variables for DDT suite from command line" -check_datatable ${report_prog} ${datafile} "--required-variables" \ - ${required_vars_ddt} "ddt_suite" -check_datatable ${report_prog} ${datafile} "--input-variables" \ - ${input_vars_ddt} "ddt_suite" -check_datatable ${report_prog} ${datafile} "--output-variables" \ - ${output_vars_ddt} "ddt_suite" -echo -e "\nChecking variables for temp suite from command line" -check_datatable ${report_prog} ${datafile} "--required-variables" \ - ${required_vars_temp} "temp_suite" -check_datatable ${report_prog} ${datafile} "--input-variables" \ - ${input_vars_temp} "temp_suite" -check_datatable ${report_prog} ${datafile} "--output-variables" \ - ${output_vars_temp} "temp_suite" -# Run make -make -res=$? -if [ $res -ne 0 ]; then - perr "make failed with exit code, ${res}" -fi -# Run test -./test_host -res=$? -if [ $res -ne 0 ]; then - perr "test_host failed with exit code, ${res}" -fi - -if [ "${cleanup}" == "ALWAYS" ]; then - docleanup -elif [ $res -eq 0 -a "${cleanup}" == "PASS" ]; then - docleanup -fi - -exit $res diff --git a/test/capgen_test/test_reports.py b/test/capgen_test/test_reports.py index c9fc452d..e0d1247d 100644 --- a/test/capgen_test/test_reports.py +++ b/test/capgen_test/test_reports.py @@ -12,48 +12,22 @@ """ import sys import os +import unittest +import subprocess + +_BUILD_DIR = os.path.join(os.path.abspath(os.environ['BUILD_DIR']), "test", "capgen_test") _TEST_DIR = os.path.dirname(os.path.abspath(__file__)) _FRAMEWORK_DIR = os.path.abspath(os.path.join(_TEST_DIR, os.pardir, os.pardir)) _SCRIPTS_DIR = os.path.join(_FRAMEWORK_DIR, "scripts") _SRC_DIR = os.path.join(_FRAMEWORK_DIR, "src") -if not os.path.exists(_SCRIPTS_DIR): - raise ImportError("Cannot find scripts directory") -# end if - -if ((sys.version_info[0] < 3) or - (sys.version_info[0] == 3) and (sys.version_info[1] < 8)): - raise Exception("Python 3.8 or greater required") -# end if - sys.path.append(_SCRIPTS_DIR) # pylint: disable=wrong-import-position from ccpp_datafile import datatable_report, DatatableReport # pylint: enable=wrong-import-position -def usage(errmsg=None): - """Raise an exception with optional error message and usage message""" - emsg = "usage: {} " - if errmsg: - emsg = errmsg + '\n' + emsg - # end if - raise ValueError(emsg.format(sys.argv[0])) - -if len(sys.argv) != 3: - usage() -# end if - -_BUILD_DIR = os.path.abspath(sys.argv[1]) -_DATABASE = os.path.abspath(sys.argv[2]) -if not os.path.isdir(_BUILD_DIR): - _EMSG = " must be an existing build directory" - usage(_EMSG) -# end if -if (not os.path.exists(_DATABASE)) or (not os.path.isfile(_DATABASE)): - _EMSG = " must be an existing CCPP database file" - usage(_EMSG) -# end if +_DATABASE = os.path.abspath(os.path.join(_BUILD_DIR, "ccpp", "datatable.xml")) # Check data _HOST_FILES = [os.path.join(_BUILD_DIR, "ccpp", "test_host_ccpp_cap.F90")] @@ -67,6 +41,9 @@ def usage(errmsg=None): [os.path.join(_BUILD_DIR, "ccpp", "test_host_ccpp_cap.F90"), os.path.join(_BUILD_DIR, "ccpp", "ccpp_ddt_suite_cap.F90"), os.path.join(_BUILD_DIR, "ccpp", "ccpp_temp_suite_cap.F90")] +_DEPENDENCIES = [os.path.join(_TEST_DIR, "adjust", "qux.F90"), + os.path.join(_TEST_DIR, "bar.F90"), + os.path.join(_TEST_DIR, "foo.F90")] _PROCESS_LIST = ["setter=temp_set", "adjusting=temp_calc_adjust"] _MODULE_LIST = ["environ_conditions", "make_ddt", "setup_coeffs", "temp_adjust", "temp_calc_adjust", "temp_set"] @@ -101,97 +78,165 @@ def usage(errmsg=None): "potential_temperature_at_interface", "coefficients_for_interpolation", "surface_air_pressure", "water_vapor_specific_humidity"] +_SEP = "," + +class TestDataTables(unittest.TestCase): + def test_host_files(self): + test_str = datatable_report(_DATABASE, DatatableReport("host_files"), _SEP) + self.assertSetEqual(set(_HOST_FILES), set(test_str.split(_SEP))) + + def test_suite_files(self): + test_str = datatable_report(_DATABASE, DatatableReport("suite_files"), _SEP) + self.assertSetEqual(set(_SUITE_FILES), set(test_str.split(_SEP))) + + def test_utility_files(self): + test_str = datatable_report(_DATABASE, DatatableReport("utility_files"), _SEP) + self.assertSetEqual(set(_UTILITY_FILES), set(test_str.split(_SEP))) + + def test_ccpp_files(self): + test_str = datatable_report(_DATABASE, DatatableReport("ccpp_files"), _SEP) + self.assertSetEqual(set(_CCPP_FILES), set(test_str.split(_SEP))) + + def test_process_list(self): + test_str = datatable_report(_DATABASE, DatatableReport("process_list"), _SEP) + self.assertSetEqual(set(_PROCESS_LIST), set(test_str.split(_SEP))) + + def test_module_list(self): + test_str = datatable_report(_DATABASE, DatatableReport("module_list"), _SEP) + self.assertSetEqual(set(_MODULE_LIST), set(test_str.split(_SEP))) + + def test_suite_list(self): + test_str = datatable_report(_DATABASE, DatatableReport("suite_list"), _SEP) + self.assertSetEqual(set(_SUITE_LIST), set(test_str.split(_SEP))) + + +class TestDdtSuite(unittest.TestCase): + def test_required_variables(self): + test_str = datatable_report(_DATABASE, DatatableReport("required_variables", value="ddt_suite"), _SEP) + self.assertSetEqual(set(_REQUIRED_VARS_DDT), set(test_str.split(_SEP))) + + def test_input_variables(self): + test_str = datatable_report(_DATABASE, DatatableReport("input_variables", value="ddt_suite"), _SEP) + self.assertSetEqual(set(_INPUT_VARS_DDT), set(test_str.split(_SEP))) + + def test_output_variables(self): + test_str = datatable_report(_DATABASE, DatatableReport("output_variables", value="ddt_suite"), _SEP) + self.assertSetEqual(set(_OUTPUT_VARS_DDT), set(test_str.split(_SEP))) + + +class TestTempSuite(unittest.TestCase): + def test_required_variables(self): + test_str = datatable_report(_DATABASE, DatatableReport("required_variables", value="temp_suite"), _SEP) + self.assertSetEqual(set(_REQUIRED_VARS_TEMP + _PROT_VARS_TEMP), set(test_str.split(_SEP))) + + def test_required_variables_excluding_protected(self): + test_str = datatable_report(_DATABASE, DatatableReport("required_variables", value="temp_suite"), _SEP, exclude_protected=True) + self.assertSetEqual(set(_REQUIRED_VARS_TEMP), set(test_str.split(_SEP))) + + def test_input_variables(self): + test_str = datatable_report(_DATABASE, DatatableReport("input_variables", value="temp_suite"), _SEP) + self.assertSetEqual(set(_INPUT_VARS_TEMP + _PROT_VARS_TEMP), set(test_str.split(_SEP))) + + def test_input_variables_excluding_protected(self): + test_str = datatable_report(_DATABASE, DatatableReport("input_variables", value="temp_suite"), _SEP, exclude_protected=True) + self.assertSetEqual(set(_INPUT_VARS_TEMP), set(test_str.split(_SEP))) + + def test_output_variables(self): + test_str = datatable_report(_DATABASE, DatatableReport("output_variables", value="temp_suite"), _SEP) + self.assertSetEqual(set(_OUTPUT_VARS_TEMP), set(test_str.split(_SEP))) + + +class CommandLineDatafileRequiredFiles(unittest.TestCase): + def test_host_files(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--host-files"], + capture_output=True, + text=True) + actualOutput = {s.strip() for s in completedProcess.stdout.split(_SEP)} + self.assertSetEqual(set(_HOST_FILES), actualOutput) + + def test_suite_files(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--suite-files"], + capture_output=True, + text=True) + self.assertEqual(_SEP.join(_SUITE_FILES), completedProcess.stdout.strip()) + + def test_utility_files(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--utility-files"], + capture_output=True, + text=True) + self.assertEqual(_SEP.join(_UTILITY_FILES), completedProcess.stdout.strip()) + + def test_ccpp_files(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--ccpp-files"], + capture_output=True, + text=True) + self.assertEqual(_SEP.join(_CCPP_FILES), completedProcess.stdout.strip()) + + def test_process_list(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--process-list"], + capture_output=True, + text=True) + actualOutput = {s.strip() for s in completedProcess.stdout.split(_SEP)} + self.assertSetEqual(set(_PROCESS_LIST), actualOutput) + + def test_module_list(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--module-list"], + capture_output=True, + text=True) + self.assertEqual(_SEP.join(_MODULE_LIST), completedProcess.stdout.strip()) + + def test_dependencies(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--dependencies"], + capture_output=True, + text=True) + self.assertEqual(_SEP.join(_DEPENDENCIES), completedProcess.stdout.strip()) + + def test_suite_list(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--suite-list", "--sep=;"], + capture_output=True, + text=True) + # actualOutput = {s.strip() for s in completedProcess.stdout.split(";")} + self.assertEqual(";".join(_SUITE_LIST), completedProcess.stdout.strip()) + +class CommandLineDddtSuite(unittest.TestCase): + def test_required_variables(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--required-variables", "ddt_suite"], + capture_output=True, + text=True) + actualOutput = {s.strip() for s in completedProcess.stdout.split(_SEP)} + self.assertSetEqual(set(_REQUIRED_VARS_DDT), actualOutput) + + def test_input_variables(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--input-variables", "ddt_suite"], + capture_output=True, + text=True) + actualOutput = {s.strip() for s in completedProcess.stdout.split(_SEP)} + self.assertSetEqual(set(_INPUT_VARS_DDT), actualOutput) + + def test_output_variables(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--output-variables", "ddt_suite"], + capture_output=True, + text=True) + self.assertEqual(_SEP.join(_OUTPUT_VARS_DDT), completedProcess.stdout.strip()) + +class CommandLineTempSuite(unittest.TestCase): + def test_required_variables(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--required-variables", "temp_suite"], + capture_output=True, + text=True) + actualOutput = {s.strip() for s in completedProcess.stdout.split(_SEP)} + self.assertSetEqual(set(_REQUIRED_VARS_TEMP + _PROT_VARS_TEMP), actualOutput) + + def test_input_variables(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--input-variables", "temp_suite"], + capture_output=True, + text=True) + actualOutput = {s.strip() for s in completedProcess.stdout.split(_SEP)} + self.assertSetEqual(set(_INPUT_VARS_TEMP + _PROT_VARS_TEMP), actualOutput) -def fields_string(field_type, field_list, sep): - """Create an error string for field(s), . - is used to separate items in """ - indent = ' '*11 - if field_list: - if len(field_list) > 1: - field_str = "{} Fields: ".format(field_type) - else: - field_str = "{} Field: ".format(field_type) - # end if - fmsg = "\n{}{}{}".format(indent, field_str, sep.join(field_list)) - else: - fmsg = "" - # end if - return fmsg - -def check_datatable(database, report_type, check_list, - sep=',', exclude_protected=False): - """Run a database report and check the return string. - If an error is found, print an error message. - Return the number of errors""" - if sep is None: - sep = ',' - # end if - test_str = datatable_report(database, report_type, sep, exclude_protected=exclude_protected) - test_list = [x for x in test_str.split(sep) if x] - missing = list() - unexpected = list() - for item in check_list: - if item not in test_list: - missing.append(item) - # end if - # end for - for item in test_list: - if item not in check_list: - unexpected.append(item) - # end if - # end for - if missing or unexpected: - vmsg = "ERROR in {} datafile check:".format(report_type.action) - vmsg += fields_string("Missing", missing, sep) - vmsg += fields_string("Unexpected", unexpected, sep) - print(vmsg) - else: - print("{} report okay".format(report_type.action)) - # end if - return len(missing) + len(unexpected) - -NUM_ERRORS = 0 -print("Checking required files from python:") -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("host_files"), - _HOST_FILES) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("suite_files"), - _SUITE_FILES) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("utility_files"), - _UTILITY_FILES) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("ccpp_files"), - _CCPP_FILES) -print("\nChecking lists from python") -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("process_list"), - _PROCESS_LIST) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("module_list"), - _MODULE_LIST) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("suite_list"), - _SUITE_LIST) -print("\nChecking variables for DDT suite from python") -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("required_variables", - value="ddt_suite"), - _REQUIRED_VARS_DDT) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("input_variables", - value="ddt_suite"), - _INPUT_VARS_DDT) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("output_variables", - value="ddt_suite"), - _OUTPUT_VARS_DDT) -print("\nChecking variables for temp suite from python") -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("required_variables", - value="temp_suite"), - _REQUIRED_VARS_TEMP + _PROT_VARS_TEMP) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("required_variables", - value="temp_suite"), - _REQUIRED_VARS_TEMP, exclude_protected=True) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("input_variables", - value="temp_suite"), - _INPUT_VARS_TEMP + _PROT_VARS_TEMP) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("input_variables", - value="temp_suite"), - _INPUT_VARS_TEMP, exclude_protected=True) -NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("output_variables", - value="temp_suite"), - _OUTPUT_VARS_TEMP) - -sys.exit(NUM_ERRORS) + def test_output_variables(self): + completedProcess = subprocess.run([f"{_SCRIPTS_DIR}/ccpp_datafile.py", _DATABASE, "--output-variables", "temp_suite"], + capture_output=True, + text=True) + actualOutput = {s.strip() for s in completedProcess.stdout.split(_SEP)} + self.assertSetEqual(set(_OUTPUT_VARS_TEMP), actualOutput) From c707b20bcd0219af2ba5f4ba38094b96380ae938 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 09:44:26 -0600 Subject: [PATCH 04/18] Updating MPI cmake find with components. --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c707f5ce..9fd97eb7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -46,7 +46,7 @@ endif() #------------------------------------------------------------------------------ # Set MPI flags for Fortran with MPI F08 interface -find_package(MPI REQUIRED Fortran) +find_package(MPI REQUIRED COMPONENTS Fortran) if(NOT MPI_Fortran_HAVE_F08_MODULE) message(FATAL_ERROR "MPI implementation does not support the Fortran 2008 mpi_f08 interface") endif() From fd66d0f4f06582459e4cd3ec3cf0a7e72a1ea5d1 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:02:45 -0600 Subject: [PATCH 05/18] Adding mpi library. --- .github/workflows/capgen_unit_tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/capgen_unit_tests.yaml b/.github/workflows/capgen_unit_tests.yaml index 37a0a9ca..7ff2f8b0 100644 --- a/.github/workflows/capgen_unit_tests.yaml +++ b/.github/workflows/capgen_unit_tests.yaml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: update repos and install dependencies - run: sudo apt-get update && sudo apt-get install -y build-essential ${{matrix.fortran-compiler}} cmake python3 git + run: sudo apt-get update && sudo apt-get install -y build-essential libopenmpi3 ${{matrix.fortran-compiler}} cmake python3 git - name: Build the framework run: | From 278b2bdfdad733bad6a7f142c6fc75992c4b7aab Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:06:51 -0600 Subject: [PATCH 06/18] Updating mpi library. --- .github/workflows/capgen_unit_tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/capgen_unit_tests.yaml b/.github/workflows/capgen_unit_tests.yaml index 7ff2f8b0..8ed151e4 100644 --- a/.github/workflows/capgen_unit_tests.yaml +++ b/.github/workflows/capgen_unit_tests.yaml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: update repos and install dependencies - run: sudo apt-get update && sudo apt-get install -y build-essential libopenmpi3 ${{matrix.fortran-compiler}} cmake python3 git + run: sudo apt-get update && sudo apt-get install -y build-essential libopenmpi-dev ${{matrix.fortran-compiler}} cmake python3 git - name: Build the framework run: | From 9ce27bf63944fe9943f2c114682bf9916f096a8f Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:08:26 -0600 Subject: [PATCH 07/18] Adding missing cmake. --- cmake/ccpp_capgen.cmake | 102 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 cmake/ccpp_capgen.cmake diff --git a/cmake/ccpp_capgen.cmake b/cmake/ccpp_capgen.cmake new file mode 100644 index 00000000..1acfd249 --- /dev/null +++ b/cmake/ccpp_capgen.cmake @@ -0,0 +1,102 @@ +function(ccpp_capgen) + set(optionalArgs CAPGEN_DEBUG) + set(oneValueArgs HOSTFILES SCHEMEFILES SUITES HOST_NAME OUTPUT_ROOT VERBOSITY) + + cmake_parse_arguments(arg "${optionalArgs}" "${oneValueArgs}" "" ${ARGN}) + + list(APPEND CCPP_CAPGEN_CMD "${CMAKE_SOURCE_DIR}/scripts/ccpp_capgen.py") + + if(DEFINED arg_CAPGEN_DEBUG AND arg_CAPGEN_DEBUG) + list(APPEND CCPP_CAPGEN_CMD "--debug") + endif() + + if(DEFINED arg_HOSTFILES) + list(APPEND CCPP_CAPGEN_CMD "--host-files" "${arg_HOSTFILES}") + endif() + if(DEFINED arg_SCHEMEFILES) + list(APPEND CCPP_CAPGEN_CMD "--scheme-files" "${arg_SCHEMEFILES}") + endif() + if(DEFINED arg_SUITES) + list(APPEND CCPP_CAPGEN_CMD "--suites" "${arg_SUITES}") + endif() + if(DEFINED arg_HOST_NAME) + list(APPEND CCPP_CAPGEN_CMD "--host-name" "${arg_HOST_NAME}") + endif() + if(DEFINED arg_OUTPUT_ROOT) + message(STATUS "Creating output directory: ${arg_OUTPUT_ROOT}") + file(MAKE_DIRECTORY "${arg_OUTPUT_ROOT}") + list(APPEND CCPP_CAPGEN_CMD "--output-root" "${arg_OUTPUT_ROOT}") + endif() + if(DEFINED arg_VERBOSITY) + string(REPEAT "--verbose" ${arg_VERBOSITY} VERBOSE_PARAMS_SEPERATED) + separate_arguments(VERBOSE_PARAMS UNIX_COMMAND "${VERBOSE_PARAMS_SEPERATED}") + list(APPEND CCPP_CAPGEN_CMD ${VERBOSE_PARAMS}) + endif() + + message(STATUS "Running ccpp_capgen from ${CMAKE_CURRENT_SOURCE_DIR}") + + string(REPLACE ";" " " CAPGEN_CMD_PARAMS_LIST "${CCPP_CAPGEN_CMD}") + message(STATUS "Running ccpp_capgen: ${CAPGEN_CMD_PARAMS_LIST}") + + execute_process(COMMAND ${CCPP_CAPGEN_CMD} + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + OUTPUT_VARIABLE CAPGEN_OUT + ERROR_VARIABLE CAPGEN_OUT + RESULT_VARIABLE RES) + + message(STATUS "ccpp-capgen stdout:" ${CAPGEN_OUT}) + + if(RES EQUAL 0) + message(STATUS "ccpp-capgen completed successfully") + else() + message(FATAL_ERROR "CCPP cap generation FAILED: result = ${RES}") + endif() +endfunction() + + + +function(ccpp_datafile) + # set(oneValueArgs DATATABLE REPORT_NAME SEPERATOR CCPP_CAPS_LIB_FILES) + set(oneValueArgs DATATABLE REPORT_NAME CCPP_CAPS_LIB_FILES) + cmake_parse_arguments(arg "" "${oneValueArgs}" "" ${ARGN}) + + set(CCPP_DATAFILE_CMD "${CMAKE_SOURCE_DIR}/scripts/ccpp_datafile.py") + + if(NOT DEFINED arg_DATATABLE) + message(FATAL_ERROR "function(ccpp_datafile): DATATABLE not set. A datatable file must be configured to call ccpp_datafile.") + endif() + list(APPEND CCPP_DATAFILE_CMD "${arg_DATATABLE}") + + if(NOT DEFINED arg_REPORT_NAME) + message(FATAL_ERROR "function(ccpp_datafile): REPORT_NAME not set. Must specify the report to generate to run cpp_datafile.py") + endif() + list(APPEND CCPP_DATAFILE_CMD "${arg_REPORT_NAME}") + + # message(STATUS "${arg_SEPERATOR}") + + # if(DEFINED arg_SEPERATOR) + # message(STATUS "Adding seperator") + # list(APPEND CCPP_DATAFILE_CMD "--seperator=\\${arg_SEPERATOR}") + # endif() + + message(STATUS "${CCPP_DATAFILE_CMD}") + message(STATUS "Running ccpp_datafile from ${CMAKE_CURRENT_SOURCE_DIR}") + + string(REPLACE ";" " " CCPP_DATAFILE_CMD_SEPERATED "${CCPP_DATAFILE_CMD}") + message(STATUS "Running ccpp_datafile.py command: ${CCPP_DATAFILE_CMD_SEPERATED}") + + execute_process(COMMAND ${CCPP_DATAFILE_CMD} + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + OUTPUT_VARIABLE CCPP_CAPS + RESULT_VARIABLE RES + OUTPUT_STRIP_TRAILING_WHITESPACE + ERROR_STRIP_TRAILING_WHITESPACE) + message(STATUS "CCPP_CAPS = ${CCPP_CAPS}") + if(RES EQUAL 0) + message(STATUS "CCPP cap files retrieved") + else() + message(FATAL_ERROR "CCPP cap file retrieval FAILED: result = ${RES}") + endif() + set(CCPP_CAPS "${CCPP_CAPS}" PARENT_SCOPE) +endfunction() + From 68f257591291e3972735b0313a28961c5d3bace8 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:11:25 -0600 Subject: [PATCH 08/18] Actually building. --- .github/workflows/capgen_unit_tests.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/capgen_unit_tests.yaml b/.github/workflows/capgen_unit_tests.yaml index 8ed151e4..665da498 100644 --- a/.github/workflows/capgen_unit_tests.yaml +++ b/.github/workflows/capgen_unit_tests.yaml @@ -20,6 +20,8 @@ jobs: - name: Build the framework run: | cmake -S. -B./build -DCCPP_FRAMEWORK_ENABLE_TESTS=ON + cd build + make - name: Run unit tests run: | From c256bed9bc92bb5325bee9b9a6b7a133184d8937 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:13:58 -0600 Subject: [PATCH 09/18] Adding pytest dependency. --- .github/workflows/capgen_unit_tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/capgen_unit_tests.yaml b/.github/workflows/capgen_unit_tests.yaml index 665da498..b0b75f84 100644 --- a/.github/workflows/capgen_unit_tests.yaml +++ b/.github/workflows/capgen_unit_tests.yaml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: update repos and install dependencies - run: sudo apt-get update && sudo apt-get install -y build-essential libopenmpi-dev ${{matrix.fortran-compiler}} cmake python3 git + run: sudo apt-get update && sudo apt-get install -y build-essential libopenmpi-dev ${{matrix.fortran-compiler}} cmake python3 python3-pytest git - name: Build the framework run: | From e7f84f441570f5ca1034b40efec8b60e11bd718d Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:17:53 -0600 Subject: [PATCH 10/18] Updating pytest binary name. --- .github/workflows/capgen_unit_tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/capgen_unit_tests.yaml b/.github/workflows/capgen_unit_tests.yaml index b0b75f84..61f074b3 100644 --- a/.github/workflows/capgen_unit_tests.yaml +++ b/.github/workflows/capgen_unit_tests.yaml @@ -30,4 +30,4 @@ jobs: - name: Run python tests run: | - BUILD_DIR=./build pytest test/capgen_test/test_reports.py + BUILD_DIR=./build pytest-3 test/capgen_test/test_reports.py From c8863484227fdc3c2afbfb4012cb68f758aba396 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:20:59 -0600 Subject: [PATCH 11/18] Updating pytest install. --- .github/workflows/capgen_unit_tests.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/capgen_unit_tests.yaml b/.github/workflows/capgen_unit_tests.yaml index 61f074b3..d9e45331 100644 --- a/.github/workflows/capgen_unit_tests.yaml +++ b/.github/workflows/capgen_unit_tests.yaml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: update repos and install dependencies - run: sudo apt-get update && sudo apt-get install -y build-essential libopenmpi-dev ${{matrix.fortran-compiler}} cmake python3 python3-pytest git + run: sudo apt-get update && sudo apt-get install -y build-essential libopenmpi-dev ${{matrix.fortran-compiler}} cmake python3 git - name: Build the framework run: | @@ -30,4 +30,5 @@ jobs: - name: Run python tests run: | + pip install --user pytest BUILD_DIR=./build pytest-3 test/capgen_test/test_reports.py From 20feac2c253ace6c40cdd8c7053baa3755ace7f7 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:22:20 -0600 Subject: [PATCH 12/18] Updating pytest binary name. --- .github/workflows/capgen_unit_tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/capgen_unit_tests.yaml b/.github/workflows/capgen_unit_tests.yaml index d9e45331..1270eb8f 100644 --- a/.github/workflows/capgen_unit_tests.yaml +++ b/.github/workflows/capgen_unit_tests.yaml @@ -31,4 +31,4 @@ jobs: - name: Run python tests run: | pip install --user pytest - BUILD_DIR=./build pytest-3 test/capgen_test/test_reports.py + BUILD_DIR=./build pytest test/capgen_test/test_reports.py From efff984c531921e45481fc237e9c7b3199aa8757 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Wed, 30 Oct 2024 10:30:58 -0600 Subject: [PATCH 13/18] Updating test binary name in ctest to distinguish between other tests with same binary name. --- test/capgen_test/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/capgen_test/CMakeLists.txt b/test/capgen_test/CMakeLists.txt index f1e334bd..cd7ec78b 100644 --- a/test/capgen_test/CMakeLists.txt +++ b/test/capgen_test/CMakeLists.txt @@ -78,4 +78,4 @@ set_target_properties(${HOST} PROPERTIES LINK_FLAGS "${CMAKE_Fortran_FLAGS}") enable_testing() -add_test(NAME ${HOST} COMMAND ${HOST}) +add_test(NAME capgen_${HOST} COMMAND ${HOST}) From 84cdd96095a5f750a34b9ee7787858e97dcd8c3a Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Mon, 4 Nov 2024 06:05:00 -0700 Subject: [PATCH 14/18] Minor CMake cleanup and temporarily removing MPI dependency. --- CMakeLists.txt | 15 ++++++++++----- src/CMakeLists.txt | 2 +- src/ccpp_types.F90 | 4 ++-- test/capgen_test/CMakeLists.txt | 6 ------ 4 files changed, 13 insertions(+), 14 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 9fd97eb7..891d252c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -4,6 +4,8 @@ project(ccpp_framework VERSION 5.0.0 LANGUAGES Fortran) +enable_language(C CXX) +include(FetchContent) include(cmake/ccpp_capgen.cmake) set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${PROJECT_SOURCE_DIR}/cmake") @@ -18,6 +20,7 @@ option(CCPP_FRAMEWORK_BUILD_DOCUMENTATION "Create and install the HTML documentation (requires Doxygen)" OFF) option(CCPP_FRAMEWORK_ENABLE_OPENMP "Enable OpenMP support for the framework" OFF) option(CCPP_FRAMEWORK_ENABLE_TESTS "Enable building/running tests" OFF) +option(BUILD_SHARED_LIBS "Build a static library" OFF) set(CCPP_VERBOSITY "0" CACHE STRING "Verbosity level of output (default: 0)") # Use rpaths on MacOSX @@ -46,7 +49,7 @@ endif() #------------------------------------------------------------------------------ # Set MPI flags for Fortran with MPI F08 interface -find_package(MPI REQUIRED COMPONENTS Fortran) +# find_package(MPI REQUIRED Fortran) if(NOT MPI_Fortran_HAVE_F08_MODULE) message(FATAL_ERROR "MPI implementation does not support the Fortran 2008 mpi_f08 interface") endif() @@ -73,15 +76,17 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug") add_definitions(-DDEBUG) endif() -#------------------------------------------------------------------------------ -# Request a static build -option(BUILD_SHARED_LIBS "Build a static library" OFF) - #------------------------------------------------------------------------------ # Add the sub-directories add_subdirectory(src) if(CCPP_FRAMEWORK_ENABLE_TESTS) + FetchContent_Declare( + pFUnit + GIT_REPOSITORY https://github.com/Goddard-Fortran-Ecosystem/pFUnit.git + GIT_TAG 26dadb1157819ea1bd9c355c60ed52f42dd36432 # v4.10.0 + ) + FetchContent_MakeAvailable(pFUnit) enable_testing() add_subdirectory(test) endif() diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index ff7d36b0..b167a004 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -28,7 +28,7 @@ set(${PACKAGE}_LIB_DIRS #------------------------------------------------------------------------------ # Define the executable and what to link add_library(ccpp_framework STATIC ${SOURCES_F90}) -target_link_libraries(ccpp_framework PUBLIC MPI::MPI_Fortran) +# target_link_libraries(ccpp_framework PUBLIC MPI::MPI_Fortran) set_target_properties(ccpp_framework PROPERTIES VERSION ${PROJECT_VERSION} SOVERSION ${PROJECT_VERSION_MAJOR} diff --git a/src/ccpp_types.F90 b/src/ccpp_types.F90 index 7370add4..9b2f8e41 100644 --- a/src/ccpp_types.F90 +++ b/src/ccpp_types.F90 @@ -19,7 +19,7 @@ ! module ccpp_types - use mpi_f08, only: MPI_Comm + ! use mpi_f08, only: MPI_Comm !! \section arg_table_ccpp_types !! \htmlinclude ccpp_types.html @@ -29,7 +29,7 @@ module ccpp_types private public :: ccpp_t, one - public :: MPI_Comm + ! public :: MPI_Comm !> @var Definition of constant one integer, parameter :: one = 1 diff --git a/test/capgen_test/CMakeLists.txt b/test/capgen_test/CMakeLists.txt index cd7ec78b..b54ff5fa 100644 --- a/test/capgen_test/CMakeLists.txt +++ b/test/capgen_test/CMakeLists.txt @@ -13,12 +13,6 @@ LIST(APPEND SUITE_FILES "ddt_suite.xml" "temp_suite.xml") # We assume there are files ${HOST}.meta and ${HOST}.F90 in CMAKE_SOURCE_DIR SET(HOST "test_host") -#------------------------------------------------------------------------------ -# -# End of project-specific input -# -#------------------------------------------------------------------------------ - # By default, generated caps go in ccpp subdir SET(CCPP_CAP_FILES "${CMAKE_CURRENT_BINARY_DIR}/ccpp" CACHE STRING "Location of CCPP-generated cap files") From cc43fda05f3af4d2db754f9cccb7a97832f61d58 Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Mon, 4 Nov 2024 14:37:13 -0700 Subject: [PATCH 15/18] Re-adding MPI for testing. --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 891d252c..a3cfe339 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -49,7 +49,7 @@ endif() #------------------------------------------------------------------------------ # Set MPI flags for Fortran with MPI F08 interface -# find_package(MPI REQUIRED Fortran) +find_package(MPI REQUIRED COMPONENTS Fortran) if(NOT MPI_Fortran_HAVE_F08_MODULE) message(FATAL_ERROR "MPI implementation does not support the Fortran 2008 mpi_f08 interface") endif() From cfe39213532467939060ad1e3dcf0c03186dbddf Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Sun, 10 Nov 2024 21:07:50 -0700 Subject: [PATCH 16/18] Re-adding MPI linking and minor CMake cleanup. --- cmake/ccpp_capgen.cmake | 8 -------- src/CMakeLists.txt | 2 +- src/ccpp_types.F90 | 4 ++-- test/CMakeLists.txt | 2 +- 4 files changed, 4 insertions(+), 12 deletions(-) diff --git a/cmake/ccpp_capgen.cmake b/cmake/ccpp_capgen.cmake index 1acfd249..425bfa2d 100644 --- a/cmake/ccpp_capgen.cmake +++ b/cmake/ccpp_capgen.cmake @@ -56,7 +56,6 @@ endfunction() function(ccpp_datafile) - # set(oneValueArgs DATATABLE REPORT_NAME SEPERATOR CCPP_CAPS_LIB_FILES) set(oneValueArgs DATATABLE REPORT_NAME CCPP_CAPS_LIB_FILES) cmake_parse_arguments(arg "" "${oneValueArgs}" "" ${ARGN}) @@ -72,13 +71,6 @@ function(ccpp_datafile) endif() list(APPEND CCPP_DATAFILE_CMD "${arg_REPORT_NAME}") - # message(STATUS "${arg_SEPERATOR}") - - # if(DEFINED arg_SEPERATOR) - # message(STATUS "Adding seperator") - # list(APPEND CCPP_DATAFILE_CMD "--seperator=\\${arg_SEPERATOR}") - # endif() - message(STATUS "${CCPP_DATAFILE_CMD}") message(STATUS "Running ccpp_datafile from ${CMAKE_CURRENT_SOURCE_DIR}") diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index b167a004..ff7d36b0 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -28,7 +28,7 @@ set(${PACKAGE}_LIB_DIRS #------------------------------------------------------------------------------ # Define the executable and what to link add_library(ccpp_framework STATIC ${SOURCES_F90}) -# target_link_libraries(ccpp_framework PUBLIC MPI::MPI_Fortran) +target_link_libraries(ccpp_framework PUBLIC MPI::MPI_Fortran) set_target_properties(ccpp_framework PROPERTIES VERSION ${PROJECT_VERSION} SOVERSION ${PROJECT_VERSION_MAJOR} diff --git a/src/ccpp_types.F90 b/src/ccpp_types.F90 index 9b2f8e41..7370add4 100644 --- a/src/ccpp_types.F90 +++ b/src/ccpp_types.F90 @@ -19,7 +19,7 @@ ! module ccpp_types - ! use mpi_f08, only: MPI_Comm + use mpi_f08, only: MPI_Comm !! \section arg_table_ccpp_types !! \htmlinclude ccpp_types.html @@ -29,7 +29,7 @@ module ccpp_types private public :: ccpp_t, one - ! public :: MPI_Comm + public :: MPI_Comm !> @var Definition of constant one integer, parameter :: one = 1 diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 99f7f981..c917b606 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -1,3 +1,3 @@ # add_subdirectory(advection_test) add_subdirectory(capgen_test) -# add_subdirectory(var_compatability_test) \ No newline at end of file +# add_subdirectory(var_compatability_test) From 9b66008436aeffcc5d710726290b789e6cdda50d Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Sun, 10 Nov 2024 21:35:21 -0700 Subject: [PATCH 17/18] Fixing CI job failure. --- CMakeLists.txt | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index a3cfe339..376cdf64 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -23,6 +23,16 @@ option(CCPP_FRAMEWORK_ENABLE_TESTS "Enable building/running tests" OFF) option(BUILD_SHARED_LIBS "Build a static library" OFF) set(CCPP_VERBOSITY "0" CACHE STRING "Verbosity level of output (default: 0)") +if(CCPP_FRAMEWORK_ENABLE_TESTS) + FetchContent_Declare( + pFUnit + GIT_REPOSITORY https://github.com/Goddard-Fortran-Ecosystem/pFUnit.git + GIT_TAG 26dadb1157819ea1bd9c355c60ed52f42dd36432 # v4.10.0 + ) + FetchContent_MakeAvailable(pFUnit) + enable_testing() +endif() + # Use rpaths on MacOSX set(CMAKE_MACOSX_RPATH 1) @@ -81,13 +91,6 @@ endif() add_subdirectory(src) if(CCPP_FRAMEWORK_ENABLE_TESTS) - FetchContent_Declare( - pFUnit - GIT_REPOSITORY https://github.com/Goddard-Fortran-Ecosystem/pFUnit.git - GIT_TAG 26dadb1157819ea1bd9c355c60ed52f42dd36432 # v4.10.0 - ) - FetchContent_MakeAvailable(pFUnit) - enable_testing() add_subdirectory(test) endif() From df44e3abcd69451af5993a57d64551142bff72fd Mon Sep 17 00:00:00 2001 From: Michael Waxmonsky Date: Sun, 10 Nov 2024 21:44:07 -0700 Subject: [PATCH 18/18] Removing pylint workflow for different PR. --- .github/workflows/pylint.yaml | 32 -- ccpp-pylint-config.toml | 550 ---------------------------------- 2 files changed, 582 deletions(-) delete mode 100644 .github/workflows/pylint.yaml delete mode 100644 ccpp-pylint-config.toml diff --git a/.github/workflows/pylint.yaml b/.github/workflows/pylint.yaml deleted file mode 100644 index a034fd0b..00000000 --- a/.github/workflows/pylint.yaml +++ /dev/null @@ -1,32 +0,0 @@ -name: Pylinting - -on: - workflow_dispatch: - pull_request: - branches: [develop, main] - -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.11'] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install pylint - - - name: Pylint scripts - run: | - pylint scripts - - name: Pylint tests - run: | - pylint test diff --git a/ccpp-pylint-config.toml b/ccpp-pylint-config.toml deleted file mode 100644 index c8333fa0..00000000 --- a/ccpp-pylint-config.toml +++ /dev/null @@ -1,550 +0,0 @@ -[tool.pylint.main] -# Analyse import fallback blocks. This can be used to support both Python 2 and 3 -# compatible code, which means that the block might have code that exists only in -# one or another interpreter, leading to false positives when analysed. -# analyse-fallback-blocks = - -# Clear in-memory caches upon conclusion of linting. Useful if running pylint in -# a server-like mode. -# clear-cache-post-run = - -# Always return a 0 (non-error) status code, even if lint errors are found. This -# is primarily useful in continuous integration scripts. -# exit-zero = - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -# extension-pkg-allow-list = - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. (This is an alternative name to extension-pkg-allow-list -# for backward compatibility.) -# extension-pkg-whitelist = - -# Return non-zero exit code if any of these messages/categories are detected, -# even if score is above --fail-under value. Syntax same as enable. Messages -# specified are enabled, while categories only check already-enabled messages. -# fail-on = - -# Specify a score threshold under which the program will exit with error. -fail-under = 10 - -# Interpret the stdin as a python script, whose filename needs to be passed as -# the module_or_package argument. -# from-stdin = - -# Files or directories to be skipped. They should be base names, not paths. -ignore = ["CVS"] - -# Add files or directories matching the regular expressions patterns to the -# ignore-list. The regex matches against paths and can be in Posix or Windows -# format. Because '\\' represents the directory delimiter on Windows systems, it -# can't be used as an escape character. -# ignore-paths = - -# Files or directories matching the regular expression patterns are skipped. The -# regex matches against base names, not paths. The default value ignores Emacs -# file locks -ignore-patterns = ["^\\.#"] - -# List of module names for which member attributes should not be checked and will -# not be imported (useful for modules/projects where namespaces are manipulated -# during runtime and thus existing member attributes cannot be deduced by static -# analysis). It supports qualified module names, as well as Unix pattern -# matching. -# ignored-modules = - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -# init-hook = - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use, and will cap the count on Windows to -# avoid hangs. -jobs = 1 - -# Control the amount of potential inferred values when inferring a single object. -# This can help the performance when dealing with large functions or complex, -# nested conditions. -limit-inference-results = 100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -# load-plugins = - -# Pickle collected data for later comparisons. -persistent = true - -# Resolve imports to .pyi stubs if available. May reduce no-member messages and -# increase not-an-iterable messages. -# prefer-stubs = - -# Minimum Python version to use for version dependent checks. Will default to the -# version used to run pylint. -py-version = "3.11" - -# Discover python modules and packages in the file system subtree. -# recursive = - -# Add paths to the list of the source roots. Supports globbing patterns. The -# source root is an absolute path or a path relative to the current working -# directory used to determine a package namespace for modules located under the -# source root. -# source-roots = - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode = true - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -# unsafe-load-any-extension = - -[tool.pylint.basic] -# Naming style matching correct argument names. -argument-naming-style = "snake_case" - -# Regular expression matching correct argument names. Overrides argument-naming- -# style. If left empty, argument names will be checked with the set naming style. -# argument-rgx = - -# Naming style matching correct attribute names. -attr-naming-style = "snake_case" - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. If left empty, attribute names will be checked with the set naming -# style. -# attr-rgx = - -# Bad variable names which should always be refused, separated by a comma. -bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -# bad-names-rgxs = - -# Naming style matching correct class attribute names. -class-attribute-naming-style = "any" - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. If left empty, class attribute names will be checked -# with the set naming style. -# class-attribute-rgx = - -# Naming style matching correct class constant names. -class-const-naming-style = "UPPER_CASE" - -# Regular expression matching correct class constant names. Overrides class- -# const-naming-style. If left empty, class constant names will be checked with -# the set naming style. -# class-const-rgx = - -# Naming style matching correct class names. -class-naming-style = "PascalCase" - -# Regular expression matching correct class names. Overrides class-naming-style. -# If left empty, class names will be checked with the set naming style. -# class-rgx = - -# Naming style matching correct constant names. -const-naming-style = "UPPER_CASE" - -# Regular expression matching correct constant names. Overrides const-naming- -# style. If left empty, constant names will be checked with the set naming style. -# const-rgx = - -# Minimum line length for functions/classes that require docstrings, shorter ones -# are exempt. -docstring-min-length = -1 - -# Naming style matching correct function names. -function-naming-style = "snake_case" - -# Regular expression matching correct function names. Overrides function-naming- -# style. If left empty, function names will be checked with the set naming style. -# function-rgx = - -# Good variable names which should always be accepted, separated by a comma. -good-names = ["i", "j", "k", "ex", "Run", "_"] - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -# good-names-rgxs = - -# Include a hint for the correct naming format with invalid-name. -# include-naming-hint = - -# Naming style matching correct inline iteration names. -inlinevar-naming-style = "any" - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. If left empty, inline iteration names will be checked -# with the set naming style. -# inlinevar-rgx = - -# Naming style matching correct method names. -method-naming-style = "snake_case" - -# Regular expression matching correct method names. Overrides method-naming- -# style. If left empty, method names will be checked with the set naming style. -# method-rgx = - -# Naming style matching correct module names. -module-naming-style = "snake_case" - -# Regular expression matching correct module names. Overrides module-naming- -# style. If left empty, module names will be checked with the set naming style. -# module-rgx = - -# Colon-delimited sets of names that determine each other's naming style when the -# name regexes allow several styles. -# name-group = - -# Regular expression which should only match function or class names that do not -# require a docstring. -no-docstring-rgx = "^_" - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. These -# decorators are taken in consideration only for invalid-name. -property-classes = ["abc.abstractproperty"] - -# Regular expression matching correct type alias names. If left empty, type alias -# names will be checked with the set naming style. -# typealias-rgx = - -# Regular expression matching correct type variable names. If left empty, type -# variable names will be checked with the set naming style. -# typevar-rgx = - -# Naming style matching correct variable names. -variable-naming-style = "snake_case" - -# Regular expression matching correct variable names. Overrides variable-naming- -# style. If left empty, variable names will be checked with the set naming style. -# variable-rgx = - -[tool.pylint.classes] -# Warn about protected attribute access inside special methods -# check-protected-access-in-special-methods = - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg = ["cls"] - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg = ["mcs"] - -[tool.pylint.design] -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -# exclude-too-few-public-methods = - -# List of qualified class names to ignore when counting class parents (see R0901) -# ignored-parents = - -# Maximum number of arguments for function / method. -max-args = 5 - -# Maximum number of attributes for a class (see R0902). -max-attributes = 7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr = 5 - -# Maximum number of branch for function / method body. -max-branches = 12 - -# Maximum number of locals for function / method body. -max-locals = 15 - -# Maximum number of parents for a class (see R0901). -max-parents = 7 - -# Maximum number of positional arguments for function / method. -max-positional-arguments = 5 - -# Maximum number of public methods for a class (see R0904). -max-public-methods = 20 - -# Maximum number of return / yield for function / method body. -max-returns = 6 - -# Maximum number of statements in function / method body. -max-statements = 50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods = 2 - -[tool.pylint.exceptions] -# Exceptions that will emit a warning when caught. -overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] - -[tool.pylint.format] -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -# expected-line-ending-format = - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines = "^\\s*(# )??$" - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren = 4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string = " " - -# Maximum number of characters on a single line. -max-line-length = 100 - -# Maximum number of lines in a module. -max-module-lines = 1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -# single-line-class-stmt = - -# Allow the body of an if to be on the same line as the test if there is no else. -# single-line-if-stmt = - -[tool.pylint.imports] -# List of modules that can be imported at any level, not just the top level one. -# allow-any-import-level = - -# Allow explicit reexports by alias from a package __init__. -# allow-reexport-from-package = - -# Allow wildcard imports from modules that define __all__. -# allow-wildcard-with-all = - -# Deprecated modules which should not be used, separated by a comma. -# deprecated-modules = - -# Output a graph (.gv or any supported image format) of external dependencies to -# the given file (report RP0402 must not be disabled). -# ext-import-graph = - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be disabled). -# import-graph = - -# Output a graph (.gv or any supported image format) of internal dependencies to -# the given file (report RP0402 must not be disabled). -# int-import-graph = - -# Force import order to recognize a module as part of the standard compatibility -# libraries. -# known-standard-library = - -# Force import order to recognize a module as part of a third party library. -known-third-party = ["enchant"] - -# Couples of modules and preferred modules, separated by a comma. -# preferred-modules = - -[tool.pylint.logging] -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style = "old" - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules = ["logging"] - -[tool.pylint."messages control"] -# Only show warnings with the listed confidence levels. Leave empty to show all. -# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] - -# Disable the message, report, category or checker with the given id(s). You can -# either give multiple identifiers separated by comma (,) or put this option -# multiple times (only on the command line, not in the configuration file where -# it should appear only once). You can also use "--disable=all" to disable -# everything first and then re-enable specific checks. For example, if you want -# to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-implicit-booleaness-not-comparison-to-string", "use-implicit-booleaness-not-comparison-to-zero", "use-symbolic-message-instead"] - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where it -# should appear only once). See also the "--disable" option for examples. -# enable = - -[tool.pylint.method_args] -# List of qualified names (i.e., library.method) which require a timeout -# parameter e.g. 'requests.api.get,requests.api.post' -timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"] - -[tool.pylint.miscellaneous] -# List of note tags to take in consideration, separated by a comma. -notes = ["FIXME", "XXX", "TODO"] - -# Regular expression of note tags to take in consideration. -# notes-rgx = - -[tool.pylint.refactoring] -# Maximum number of nested blocks for function / method body -max-nested-blocks = 5 - -# Complete name of functions that never returns. When checking for inconsistent- -# return-statements if a never returning function is called then it will be -# considered as an explicit return statement and no message will be printed. -never-returning-functions = ["sys.exit", "argparse.parse_error"] - -# Let 'consider-using-join' be raised when the separator to join on would be non- -# empty (resulting in expected fixes of the type: ``"- " + " - ".join(items)``) -suggest-join-with-non-empty-separator = true - -[tool.pylint.reports] -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'fatal', 'error', 'warning', 'refactor', -# 'convention', and 'info' which contain the number of messages in each category, -# as well as 'statement' which is the total number of statements analyzed. This -# score is used by the global evaluation report (RP0004). -evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -# msg-template = - -# Set the output format. Available formats are: text, parseable, colorized, json2 -# (improved json format), json (old json format) and msvs (visual studio). You -# can also give a reporter class, e.g. mypackage.mymodule.MyReporterClass. -# output-format = - -# Tells whether to display a full report or only the messages. -# reports = - -# Activate the evaluation score. -score = true - -[tool.pylint.similarities] -# Comments are removed from the similarity computation -ignore-comments = true - -# Docstrings are removed from the similarity computation -ignore-docstrings = true - -# Imports are removed from the similarity computation -ignore-imports = true - -# Signatures are removed from the similarity computation -ignore-signatures = true - -# Minimum lines number of a similarity. -min-similarity-lines = 4 - -[tool.pylint.spelling] -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions = 4 - -# Spelling dictionary name. No available dictionaries : You need to install both -# the python package and the system dependency for enchant to work. -# spelling-dict = - -# List of comma separated words that should be considered directives if they -# appear at the beginning of a comment and should not be checked. -spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:" - -# List of comma separated words that should not be checked. -# spelling-ignore-words = - -# A path to a file that contains the private dictionary; one word per line. -# spelling-private-dict-file = - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -# spelling-store-unknown-words = - -[tool.pylint.typecheck] -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators = ["contextlib.contextmanager"] - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -# generated-members = - -# Tells whether missing members accessed in mixin class should be ignored. A -# class is considered mixin if its name matches the mixin-class-rgx option. -# Tells whether to warn about missing members when the owner of the attribute is -# inferred to be None. -ignore-none = true - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference can -# return multiple potential results while evaluating a Python object, but some -# branches might not be evaluated, which results in partial inference. In that -# case, it might be useful to still emit no-member and other checks for the rest -# of the inferred objects. -ignore-on-opaque-inference = true - -# List of symbolic message names to ignore for Mixin members. -ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"] - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes = ["optparse.Values", "thread._local", "_thread._local", "argparse.Namespace"] - -# Show a hint with possible names when a member name was not found. The aspect of -# finding the hint is based on edit distance. -missing-member-hint = true - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance = 1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices = 1 - -# Regex pattern to define which classes are considered mixins. -mixin-class-rgx = ".*[Mm]ixin" - -# List of decorators that change the signature of a decorated function. -# signature-mutators = - -[tool.pylint.variables] -# List of additional names supposed to be defined in builtins. Remember that you -# should avoid defining new builtins when possible. -# additional-builtins = - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables = true - -# List of names allowed to shadow builtins -# allowed-redefined-builtins = - -# List of strings which can identify a callback function by name. A callback name -# must start or end with one of those strings. -callbacks = ["cb_", "_cb"] - -# A regular expression matching the name of dummy variables (i.e. expected to not -# be used). -dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" - -# Argument names that match this expression will be ignored. -ignored-argument-names = "_.*|^ignored_|^unused_" - -# Tells whether we should check for unused import in __init__ files. -# init-import = - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] - -