diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml
index 296e259e04..36fa1b4b67 100644
--- a/.github/workflows/linkcheck.yml
+++ b/.github/workflows/linkcheck.yml
@@ -4,7 +4,8 @@ jobs:
check-links:
if: ${{ github.repository != 'stfc/PSyclone-mirror' }}
name: Run linkspector
- runs-on: ubuntu-latest
+ # TODO 2838: linkspector doesn't support ubuntu-latest at the moment.
+ runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Run linkspector
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 6ebc56ae5a..f893e99ea9 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -1,7 +1,7 @@
# -----------------------------------------------------------------------------
# BSD 3-Clause License
#
-# Copyright (c) 2020-2024, Science and Technology Facilities Council.
+# Copyright (c) 2020-2025, Science and Technology Facilities Council.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -36,6 +36,7 @@
# Modified by A. J. Voysey, Met Office
# Modified by J. Henrichs, Bureau of Meteorology
# Modified by N. Nobre, STFC Daresbury Lab
+# Modified by A. B. G. Chalk, STFC Daresbury Lab
# This workflow will install Python dependencies, run tests and lint with a
# variety of Python versions.
@@ -103,7 +104,8 @@ jobs:
- run: cd doc/developer_guide; make html; make doctest
build:
if: ${{ github.repository != 'stfc/PSyclone-mirror' }}
- runs-on: ubuntu-latest
+ # TODO 2837: Switch to ubuntu-latest once Python 3.7 support is removed.
+ runs-on: ubuntu-22.04
strategy:
matrix:
python-version: [3.7, 3.8, 3.13]
diff --git a/README.md b/README.md
index ef731d3d1c..5ad562f740 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
-[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.11190458.svg)](https://doi.org/10.5281/zenodo.11190458)
+[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.11190457.svg)](https://doi.org/10.5281/zenodo.11190457)
![Build Status](https://github.com/stfc/PSyclone/workflows/PSyclone%20tests%20and%20examples/badge.svg)
[![codecov](https://codecov.io/gh/stfc/PSyclone/branch/master/graph/badge.svg)](https://codecov.io/gh/stfc/PSyclone)
diff --git a/changelog b/changelog
index 07de3af556..8867690531 100644
--- a/changelog
+++ b/changelog
@@ -1,3 +1,21 @@
+ 1) PR #2827. Update Zenodo with release 3.0.0 and update link in
+ README.md.
+
+ 2) PR #2759 for #2758. Fixes failure seen in compilation tests if
+ a previous build of infrastructure existed.
+
+ 3) PR #2807 for #2806. Adds a new 'RESOLVE_IMPORTS' global variable
+ to transformation scripts which allows the user to instruct the
+ frontend to chase down some (or all) module imports when constructing
+ the PSyIR for existing code.
+
+ 4) PR #2821 for #1247. Adds support for parsing comments from the fparser2
+ AST tree. There are two new FortranReader arguments to select the behaviour
+ for comments and directives (by default they are not parsed).
+
+ 5) PR #2836 for #2835. Reverts GHA to use Ubuntu 22.04 as python-3.7
+ not available in ubuntu-latest and linkspector is also broken.
+
release 3.0.0 6th of December 2024
1) PR #2477 for #2463. Add support for Fortran Namelist statements.
diff --git a/doc/developer_guide/psyir.rst b/doc/developer_guide/psyir.rst
index b5bc1ea2ba..8e2e629560 100644
--- a/doc/developer_guide/psyir.rst
+++ b/doc/developer_guide/psyir.rst
@@ -898,7 +898,7 @@ class, for example:
.. code-block:: python
- from psyclone.psyir.nodes.commentable_mixin import CommentableMixin
+ from psyclone.psyir.commentable_mixin import CommentableMixin
class MyNode(Node, CommentableMixin):
''' Example node '''
diff --git a/doc/user_guide/transformations.rst b/doc/user_guide/transformations.rst
index adc0865b99..8323ab263b 100644
--- a/doc/user_guide/transformations.rst
+++ b/doc/user_guide/transformations.rst
@@ -47,8 +47,113 @@ in the PSyKAl DSLs, for a particular architecture. However, transformations
could be added for other reasons, such as to aid debugging or for
performance monitoring.
-Finding
--------
+
+.. _sec_transformations_script:
+
+PSyclone User Scripts
+---------------------
+
+A convenient way to apply transformations to a codebase is through the
+:ref:`psyclone_command` tool, which has the optional ``-s ``
+flag that allows users to specify a script file to programatically modify
+input code::
+
+ > psyclone -s optimise.py input_source.f90
+
+In this case, the current directory is prepended to the Python search path
+**PYTHONPATH** which will then be used to try to find the script file. Thus,
+the search begins in the current directory and continues over any pre-existing
+directories in the search path, failing if the file cannot be found.
+
+Alternatively, script files may be specified with a path. In this case
+the file must exist in the specified location. This location is then added to
+the Python search path **PYTHONPATH** as before. For example::
+
+ > psyclone -s ./optimise.py input_source.f90
+ > psyclone -s ../scripts/optimise.py input_source.f90
+ > psyclone -s /home/me/PSyclone/scripts/optimise.py input_source.f90
+
+A valid PSyclone user script file must contain a **trans** function which accepts
+a :ref:`PSyIR node` representing the root of the psy-layer
+code (as a FileConatainer):
+
+.. code-block:: python
+
+ def trans(psyir):
+ # ...
+
+The example below adds an OpenMP directive to a specific PSyKAL kernel:
+
+.. code-block:: python
+
+ def trans(psyir):
+ from psyclone.transformations import OMPParallelLoopTrans
+ from psyclone.psyir.node import Routine
+ for subroutine in psyir.walk(Routine):
+ if subroutine.name == 'invoke_0_v3_kernel_type':
+ ol = OMPParallelLoopTrans()
+ ol.apply(subroutine.children[0])
+
+
+The script may apply as many transformations as is required for the intended
+optimisation, and may also apply transformations to all the routines (i.e. invokes
+and/or kernels) contained within the provided tree.
+The :ref:`examples section` provides a list of psyclone user scripts
+and associated usage instructions for multiple applications.
+
+
+Script Global Variables
++++++++++++++++++++++++
+
+In addition to the ``trans`` function, there are special global variables that can be set
+to control some of the behaviours of the front-end (before the optimisation function
+is applied). These are:
+
+.. code-block:: python
+
+ # List of all files that psyclone will skip processing
+ FILES_TO_SKIP = ["boken_file1.f90", "boken_file2.f90"]
+
+ # Boolean to decide whether PSyclone should chase external modules while
+ # creating a PSyIR tree in order to obtain better external symbol information.
+ # It can also be a list of module names for more precise control
+ RESOLVE_IMPORTS = ["relevant_module1.f90", "relevant_module2.f90"]
+
+ def trans(psyir):
+ # ...
+
+
+PSyKAl algorithm code transformations
++++++++++++++++++++++++++++++++++++++
+
+When using PSyKAl, the ``trans`` functions is used to transform the PSy-layer (the
+layer in charge of the Parallel-System and Loops traversal orders), however, a
+second optional transformation entry point ``trans_alg`` can be provided to
+directly transform the Algorithm-layer (this is currently only implemented for
+GOcean, but in the future it will also affect the LFRic DSL).
+
+.. code-block:: python
+
+ def trans_alg(psyir):
+ # ...
+
+As with the `trans()` function it is up to the script what it does with
+the algorithm PSyIR. Note that the `trans_alg()` script is applied to
+the algorithm layer before the PSy-layer is generated so any changes
+applied to the algorithm layer will be reflected in the PSy-layer PSyIR tree
+object that is passed to the `trans()` function.
+
+For example, if the `trans_alg()` function in the script merged two
+`invoke` calls into one then the PSyIR node passed to the
+`trans()` function of the script would only contain one Routine
+associated with the merged invoke.
+
+An example of the use of a script making use of the `trans_alg()`
+function can be found in examples/gocean/eg7.
+
+
+Finding transformations
+-----------------------
Transformations can be imported directly, but the user needs to know
what transformations are available. A helper class **TransInfo** is
@@ -64,15 +169,14 @@ provided to show the available transformations
.. autoclass:: psyclone.psyGen.TransInfo
:members:
-.. _sec_transformations_available:
-Standard Functionality
-----------------------
+Validating and Applying transformations
+---------------------------------------
Each transformation must provide at least two functions for the
user: one for validation, i.e. to verify that a certain transformation
can be applied, and one to actually apply the transformation. They are
described in detail in the
-:ref:`overview of all transformations`,
+:ref:`overview of all transformations`,
but the following general guidelines apply.
Validation
@@ -116,7 +220,7 @@ provided this way. A simple example::
The same ``options`` dictionary will be used when calling ``validate``.
-.. _available_trans:
+.. _sec_transformations_available:
Available transformations
-------------------------
@@ -615,207 +719,6 @@ applied to either or both the PSy-layer and Kernel-layer PSyIR.
.. note:: This transformation is only supported by the GOcean 1.0 API.
-
-Applying
---------
-
-Transformations can be applied either interactively or through a
-script.
-
-.. _sec_transformations_interactive:
-
-Interactive
-+++++++++++
-
-To apply a transformation interactively we first parse and analyse the
-code. This allows us to generate a "vanilla" PSy layer. For example::
-
- >>> from fparser.common.readfortran import FortranStringReader
- >>> from psyclone.parse.algorithm import Parser
- >>> from psyclone.psyGen import PSyFactory
- >>> from fparser.two.parser import ParserFactory
-
- >>> example_str = (
- ... "program example\n"
- ... " use field_mod, only: field_type\n"
- ... " type(field_type) :: field\n"
- ... " call invoke(setval_c(field, 0.0))\n"
- ... "end program example\n")
-
- >>> parser = ParserFactory().create(std="f2008")
- >>> reader = FortranStringReader(example_str)
- >>> ast = parser(reader)
- >>> invoke_info = Parser().invoke_info(ast)
-
- # This example uses the LFRic API
- >>> api = "lfric"
-
- # Create the PSy-layer object using the invokeInfo
- >>> psy = PSyFactory(api, distributed_memory=False).create(invoke_info)
-
- # Optionally generate the vanilla PSy layer fortran
- >>> print(psy.gen)
- MODULE example_psy
- USE constants_mod, ONLY: r_def, i_def
- USE field_mod, ONLY: field_type, field_proxy_type
- IMPLICIT NONE
- CONTAINS
- SUBROUTINE invoke_0(field)
- TYPE(field_type), intent(in) :: field
- INTEGER(KIND=i_def) df
- INTEGER(KIND=i_def) loop0_start, loop0_stop
- TYPE(field_proxy_type) field_proxy
- INTEGER(KIND=i_def) undf_aspc1_field
- !
- ! Initialise field and/or operator proxies
- !
- field_proxy = field%get_proxy()
- !
- ! Initialise number of DoFs for aspc1_field
- !
- undf_aspc1_field = field_proxy%vspace%get_undf()
- !
- ! Set-up all of the loop bounds
- !
- loop0_start = 1
- loop0_stop = undf_aspc1_field
- !
- ! Call our kernels
- !
- DO df=loop0_start,loop0_stop
- field_proxy%data(df) = 0.0
- END DO
- !
- END SUBROUTINE invoke_0
- END MODULE example_psy
-
-We then extract the particular schedule we are interested
-in. For example::
-
- # List the various invokes that the PSy layer contains
- >>> print(psy.invokes.names)
- dict_keys(['invoke_0'])
-
- # Get the required invoke
- >>> invoke = psy.invokes.get('invoke_0')
-
- # Get the schedule associated with the required invoke
- > schedule = invoke.schedule
- > print(schedule.view())
- InvokeSchedule[invoke='invoke_0', dm=True]
- 0: Loop[type='dof', field_space='any_space_1', it_space='dof', upper_bound='ndofs']
- Literal[value:'NOT_INITIALISED', Scalar]
- Literal[value:'NOT_INITIALISED', Scalar]
- Literal[value:'1', Scalar]
- Schedule[]
- 0: BuiltIn setval_c(field,0.0)
-
-Now we have the schedule we can create and apply a transformation to
-it to create a new schedule and then replace the original schedule
-with the new one. For example::
-
- # Create an OpenMPParallelLoopTrans
- > from psyclone.transformations import OMPParallelLoopTrans
- > ol = OMPParallelLoopTrans()
-
- # Apply it to the loop schedule of the selected invoke
- > ol.apply(schedule.children[0])
- > print(schedule.view())
-
- # Generate the Fortran code for the new PSy layer
- > print(psy.gen)
-
-.. _sec_transformations_script:
-
-Script
-++++++
-
-PSyclone provides a Python script (**psyclone**) that can be used from
-the command line to generate PSy layer code and to modify algorithm
-layer code appropriately. By default this script will generate
-"vanilla" (unoptimised) PSy-layer and algorithm layer code. For
-example::
-
- > psyclone algspec.f90
- > psyclone -oalg alg.f90 -opsy psy.f90 -api lfric algspec.f90
-
-The **psyclone** script has an optional **-s** flag which allows the
-user to specify a script file to modify the PSy layer as
-required. Script files may be specified without a path. For
-example::
-
- > psyclone -s opt.py algspec.f90
-
-In this case, the current directory is prepended to the Python search path
-**PYTHONPATH** which will then be used to try to find the script file. Thus,
-the search begins in the current directory and continues over any pre-existing
-directories in the search path, failing if the file cannot be found.
-
-Alternatively, script files may be specified with a path. In this case
-the file must exist in the specified location. This location is then added to
-the Python search path **PYTHONPATH** as before. For example::
-
- > psyclone -s ./opt.py algspec.f90
- > psyclone -s ../scripts/opt.py algspec.f90
- > psyclone -s /home/me/PSyclone/scripts/opt.py algspec.f90
-
-PSyclone also provides the same functionality via a function (which is
-what the **psyclone** script calls internally).
-
-A valid script file must contain a **trans** function which accepts a
-:ref:`PSyIR node` representing the root of the psy-layer
-code (as a FileConatainer)::
-
- >>> def trans(psyir):
- ... # ...
-
-It is up to the script how to modify the PSyIR representation of the code.
-The example below does the same thing as the example in the
-:ref:`sec_transformations_interactive` section.
-::
-
- >>> def trans(psyir):
- ... from psyclone.transformations import OMPParallelLoopTrans
- ... from psyclone.psyir.node import Routine
- ... for subroutine in psyir.walk(Routine):
- ... if subroutine.name == 'invoke_0_v3_kernel_type':
- ... ol = OMPParallelLoopTrans()
- ... ol.apply(subroutine.children[0])
-
-In the gocean API (and in the future the lfric API) an
-optional **trans_alg** function may also be supplied. This function
-accepts **PSyIR** (representing the algorithm layer) as an argument and
-returns **PSyIR** i.e.:
-::
-
- >>> def trans_alg(psyir):
- ... # ...
-
-As with the `trans()` function it is up to the script what it does with
-the algorithm PSyIR. Note that the `trans_alg()` script is applied to
-the algorithm layer before the PSy-layer is generated so any changes
-applied to the algorithm layer will be reflected in the **PSy** object
-that is passed to the `trans()` function.
-
-For example, if the `trans_alg()` function in the script merged two
-`invoke` calls into one then the **Alg** object passed to the
-`trans()` function of the script would only contain one schedule
-associated with the merged invoke.
-
-Of course the script may apply as many transformations as is required
-for a particular algorithm and/or schedule and may apply
-transformations to all the schedules (i.e. invokes and/or kernels)
-contained within the PSy layer.
-
-Examples of the use of transformation scripts can be found in many of
-the examples, such as examples/lfric/eg3 and
-examples/lfric/scripts. Please read the examples/lfric/README file
-first as it explains how to run the examples (and see also the
-examples/check_examples script).
-
-An example of the use of a script making use of the `trans_alg()`
-function can be found in examples/gocean/eg7.
-
OpenMP
------
diff --git a/examples/nemo/scripts/acc_kernels_trans.py b/examples/nemo/scripts/acc_kernels_trans.py
index 7ae43abdd3..cd972e1173 100755
--- a/examples/nemo/scripts/acc_kernels_trans.py
+++ b/examples/nemo/scripts/acc_kernels_trans.py
@@ -58,7 +58,7 @@
import logging
from utils import (add_profiling, enhance_tree_information, inline_calls,
- NOT_PERFORMANT)
+ NOT_PERFORMANT, NEMO_MODULES_TO_IMPORT)
from psyclone.errors import InternalError
from psyclone.psyGen import TransInfo
from psyclone.psyir.nodes import (
@@ -77,6 +77,10 @@
"tracers": {"variable": "jt"}
})
+# List of all module names that PSyclone will chase during the creation of the
+# PSyIR tree in order to use the symbol information from those modules
+RESOLVE_IMPORTS = NEMO_MODULES_TO_IMPORT
+
# Get the PSyclone transformations we will use
ACC_KERN_TRANS = ACCKernelsTrans()
ACC_LOOP_TRANS = TransInfo().get_trans_name('ACCLoopTrans')
diff --git a/examples/nemo/scripts/acc_loops_trans.py b/examples/nemo/scripts/acc_loops_trans.py
index 41896486c9..4e89724765 100755
--- a/examples/nemo/scripts/acc_loops_trans.py
+++ b/examples/nemo/scripts/acc_loops_trans.py
@@ -39,13 +39,18 @@
from utils import (
insert_explicit_loop_parallelism, normalise_loops, add_profiling,
- enhance_tree_information, NOT_PERFORMANT)
+ enhance_tree_information, NOT_PERFORMANT, NEMO_MODULES_TO_IMPORT)
from psyclone.psyir.nodes import Routine
from psyclone.transformations import (
ACCParallelTrans, ACCLoopTrans, ACCRoutineTrans)
+# Enable the insertion of profiling hooks during the transformation script
PROFILING_ENABLED = True
+# List of all module names that PSyclone will chase during the creation of the
+# PSyIR tree in order to use the symbol information from those modules
+RESOLVE_IMPORTS = NEMO_MODULES_TO_IMPORT
+
# List of all files that psyclone will skip processing
FILES_TO_SKIP = NOT_PERFORMANT
diff --git a/examples/nemo/scripts/omp_cpu_trans.py b/examples/nemo/scripts/omp_cpu_trans.py
index 5a2f1cd827..1102455c61 100755
--- a/examples/nemo/scripts/omp_cpu_trans.py
+++ b/examples/nemo/scripts/omp_cpu_trans.py
@@ -40,12 +40,18 @@
import os
from utils import (
insert_explicit_loop_parallelism, normalise_loops, add_profiling,
- enhance_tree_information, PASSTHROUGH_ISSUES, PARALLELISATION_ISSUES)
+ enhance_tree_information, PASSTHROUGH_ISSUES, PARALLELISATION_ISSUES,
+ NEMO_MODULES_TO_IMPORT)
from psyclone.psyir.nodes import Routine
from psyclone.transformations import OMPLoopTrans
+# Enable the insertion of profiling hooks during the transformation script
PROFILING_ENABLED = False
+# List of all module names that PSyclone will chase during the creation of the
+# PSyIR tree in order to use the symbol information from those modules
+RESOLVE_IMPORTS = NEMO_MODULES_TO_IMPORT
+
# A environment variable can inform if this is targeting NEMOv5, in which case
# array privatisation is enabled.
NEMOV5 = os.environ.get('NEMOV5', False)
diff --git a/examples/nemo/scripts/omp_gpu_trans.py b/examples/nemo/scripts/omp_gpu_trans.py
index 1891a230a7..df7c300ad8 100755
--- a/examples/nemo/scripts/omp_gpu_trans.py
+++ b/examples/nemo/scripts/omp_gpu_trans.py
@@ -39,7 +39,7 @@
from utils import (
insert_explicit_loop_parallelism, normalise_loops, add_profiling,
- enhance_tree_information, NOT_PERFORMANT)
+ enhance_tree_information, NOT_PERFORMANT, NEMO_MODULES_TO_IMPORT)
from psyclone.psyGen import TransInfo
from psyclone.psyir.nodes import (
Loop, Routine, Directive, Assignment, OMPAtomicDirective)
@@ -48,6 +48,10 @@
PROFILING_ENABLED = False
+# List of all module names that PSyclone will chase during the creation of the
+# PSyIR tree in order to use the symbol information from those modules
+RESOLVE_IMPORTS = NEMO_MODULES_TO_IMPORT
+
# List of all files that psyclone will skip processing
FILES_TO_SKIP = NOT_PERFORMANT
diff --git a/examples/nemo/scripts/utils.py b/examples/nemo/scripts/utils.py
index fcc9c543e7..ae875eb384 100755
--- a/examples/nemo/scripts/utils.py
+++ b/examples/nemo/scripts/utils.py
@@ -37,11 +37,10 @@
from psyclone.domain.common.transformations import KernelModuleInlineTrans
from psyclone.psyir.nodes import (
- Assignment, Loop, Directive, Container, Reference, CodeBlock,
+ Assignment, Loop, Directive, Reference, CodeBlock,
Call, Return, IfBlock, Routine, IntrinsicCall)
from psyclone.psyir.symbols import (
- DataSymbol, INTEGER_TYPE, REAL_TYPE, ArrayType, ScalarType,
- RoutineSymbol, ImportInterface)
+ DataSymbol, INTEGER_TYPE, ScalarType, RoutineSymbol)
from psyclone.psyir.transformations import (
ArrayAssignment2LoopsTrans, HoistLoopBoundExprTrans, HoistLocalArraysTrans,
HoistTrans, InlineTrans, Maxval2LoopTrans, ProfileTrans,
@@ -49,13 +48,19 @@
from psyclone.transformations import TransformationError
+# USE statements to chase to gather additional symbol information.
+NEMO_MODULES_TO_IMPORT = [
+ "oce", "par_oce", "dom_oce", "phycst", "ice",
+ "obs_fbm", "flo_oce", "sbc_ice", "wet_dry"
+]
+
# Files that PSyclone could process but would reduce the performance.
NOT_PERFORMANT = [
"bdydta.f90", "bdyvol.f90", "fldread.f90", "icbclv.f90", "icbthm.f90",
"icbdia.f90", "icbini.f90", "icbstp.f90", "iom.f90", "iom_nf90.f90",
"obs_grid.f90", "obs_averg_h2d.f90", "obs_profiles_def.f90",
"obs_types.f90", "obs_read_prof.f90", "obs_write.f90", "tide_mod.f90",
- "zdfosm.f90",
+ "zdfosm.f90", "obs_read_surf.f90",
]
# If routine names contain these substrings then we do not profile them
@@ -119,47 +124,26 @@ def _it_should_be(symbol, of_type, instance):
def enhance_tree_information(schedule):
- ''' Resolve imports in order to populate relevant datatype on the
- tree symbol tables.
+ ''' Manually fix some PSyIR issues produced by not having enough symbol
+ information from external modules. Setting NEMO_MODULES_TO_IMPORT above
+ improve the situation but its not complete (not all symbols are imported)
+ and it is not transitive (imports that inside import other symbols).
:param schedule: the PSyIR Schedule to transform.
:type schedule: :py:class:`psyclone.psyir.nodes.node`
- '''
-
- mod_sym_tab = schedule.ancestor(Container).symbol_table
-
- modules_to_import = ("oce", "par_oce", "dom_oce", "phycst", "ice",
- "obs_fbm", "flo_oce", "sbc_ice", "wet_dry")
-
- for module_name in modules_to_import:
- if module_name in mod_sym_tab:
- mod_symbol = mod_sym_tab.lookup(module_name)
- mod_sym_tab.resolve_imports(container_symbols=[mod_symbol])
+ '''
are_integers = ('jpi', 'jpim1', 'jpj', 'jpjm1', 'jp_tem', 'jp_sal',
'jpkm1', 'jpiglo', 'jpni', 'jpk', 'jpiglo_crs',
'jpmxl_atf', 'jpmxl_ldf', 'jpmxl_zdf', 'jpnij',
'jpts', 'jpvor_bev', 'nleapy', 'nn_ctls', 'jpmxl_npc',
'jpmxl_zdfp', 'npti')
- # Manually set the datatype of some integer and real variables that are
- # important for performance
for reference in schedule.walk(Reference):
if reference.symbol.name in are_integers:
+ # Manually set the datatype of some integer scalars that are
+ # important for performance
_it_should_be(reference.symbol, ScalarType, INTEGER_TYPE)
- elif reference.symbol.name in ('rn_avt_rnf', ):
- _it_should_be(reference.symbol, ScalarType, REAL_TYPE)
- elif isinstance(reference.symbol.interface, ImportInterface) and \
- reference.symbol.interface.container_symbol.name == "phycst":
- # Everything imported from phycst is a REAL
- _it_should_be(reference.symbol, ScalarType, REAL_TYPE)
- elif reference.symbol.name == 'tmask':
- if reference.ancestor(Container).name == "dom_oce":
- continue # Do not update the original declaration
- _it_should_be(reference.symbol, ArrayType, ArrayType(REAL_TYPE, [
- ArrayType.Extent.ATTRIBUTE,
- ArrayType.Extent.ATTRIBUTE,
- ArrayType.Extent.ATTRIBUTE]))
elif reference.symbol.name in NEMO_FUNCTIONS:
if reference.symbol.is_import or reference.symbol.is_unresolved:
# The parser gets these wrong, they are Calls not ArrayRefs
diff --git a/lib/extract/netcdf/dl_esm_inf/.gitignore b/lib/extract/netcdf/dl_esm_inf/.gitignore
index b13e29fa31..1643dcbd30 100644
--- a/lib/extract/netcdf/dl_esm_inf/.gitignore
+++ b/lib/extract/netcdf/dl_esm_inf/.gitignore
@@ -1 +1,2 @@
read_kernel_data_mod.f90
+compare_variables_mod.F90
diff --git a/lib/extract/netcdf/dl_esm_inf/Makefile b/lib/extract/netcdf/dl_esm_inf/Makefile
index b356b873a9..320b31458c 100644
--- a/lib/extract/netcdf/dl_esm_inf/Makefile
+++ b/lib/extract/netcdf/dl_esm_inf/Makefile
@@ -105,7 +105,8 @@ $(INF_LIB):
$(F90) $(F90FLAGS) -c $<
clean:
- rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_netcdf_base.f90
+ rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_netcdf_base.F90 \
+ compare_variables_mod.F90 read_kernel_data_mod.f90
allclean: clean
$(MAKE) -C $(GOCEAN_INF_DIR) clean
diff --git a/lib/extract/netcdf/generic/.gitignore b/lib/extract/netcdf/generic/.gitignore
new file mode 100644
index 0000000000..369edb55a3
--- /dev/null
+++ b/lib/extract/netcdf/generic/.gitignore
@@ -0,0 +1 @@
+compare_variables_mod.F90
diff --git a/lib/extract/netcdf/generic/Makefile b/lib/extract/netcdf/generic/Makefile
index c92c764bf4..f688ffc3c1 100644
--- a/lib/extract/netcdf/generic/Makefile
+++ b/lib/extract/netcdf/generic/Makefile
@@ -90,6 +90,7 @@ compare_variables_mod.F90: $(PSYDATA_LIB_DIR)/extract/compare_variables_mod.jinj
$(F90) $(F90FLAGS) -c $<
clean:
- rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_netcdf_base.f90
+ rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_netcdf_base.f90 \
+ compare_variables_mod.F90 extract_netcdf_base.F90
allclean: clean
diff --git a/lib/extract/netcdf/generic/kernel_data_netcdf.f90 b/lib/extract/netcdf/generic/kernel_data_netcdf.f90
index 7c36d719b3..2ec595caa0 100644
--- a/lib/extract/netcdf/generic/kernel_data_netcdf.f90
+++ b/lib/extract/netcdf/generic/kernel_data_netcdf.f90
@@ -34,8 +34,8 @@
! Author J. Henrichs, Bureau of Meteorology
!> This module implements a simple NetCDF writer using the PSyData
-!! interface. It is specific to the generic NEMO API, i.e. it does
-!! actually not provide any API-specific types like fields, only standard
+!! interface. It is for handling generic Fortran code, i.e. it does
+!! not actually provide any API-specific types like fields, only standard
!! Fortran data types. Therefore, it is just an empty wrapper around
!! the ExtractNetcdfBaseType, which provides the standard module name
!! and class name expected by the extraction scripts.
diff --git a/lib/extract/netcdf/lfric/.gitignore b/lib/extract/netcdf/lfric/.gitignore
index 43711f2e6e..473a2f8267 100644
--- a/lib/extract/netcdf/lfric/.gitignore
+++ b/lib/extract/netcdf/lfric/.gitignore
@@ -1,2 +1,3 @@
kernel_data_netcdf.f90
read_kernel_data_mod.f90
+compare_variables_mod.F90
diff --git a/lib/extract/netcdf/lfric/Makefile b/lib/extract/netcdf/lfric/Makefile
index 629333b9d6..3e580135e9 100644
--- a/lib/extract/netcdf/lfric/Makefile
+++ b/lib/extract/netcdf/lfric/Makefile
@@ -118,8 +118,8 @@ compare_variables_mod.F90: $(PSYDATA_LIB_DIR)/extract/compare_variables_mod.jinj
$(F90) $(F90FLAGS) $(LFRIC_INCLUDE_FLAGS) -c $<
clean:
- rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_netcdf_base.f90 \
- compare_variables_mod.f90
+ rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_netcdf_base.F90 \
+ compare_variables_mod.F90 kernel_data_netcdf.f90 read_kernel_data_mod.f90
allclean: clean
$(MAKE) -C $(LFRIC_PATH) allclean
diff --git a/lib/extract/standalone/dl_esm_inf/Makefile b/lib/extract/standalone/dl_esm_inf/Makefile
index 672cda72cd..360bb21595 100644
--- a/lib/extract/standalone/dl_esm_inf/Makefile
+++ b/lib/extract/standalone/dl_esm_inf/Makefile
@@ -105,7 +105,8 @@ $(INF_LIB):
$(F90) $(F90FLAGS) -c $<
clean:
- rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_standalone_base.f90
+ rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_standalone_base.f90 \
+ compare_variables_mod.F90 read_kernel_data_mod.f90
allclean: clean
$(MAKE) -C $(GOCEAN_INF_DIR) clean
diff --git a/lib/extract/standalone/generic/Makefile b/lib/extract/standalone/generic/Makefile
index f4f3d60b0f..f2069c2626 100644
--- a/lib/extract/standalone/generic/Makefile
+++ b/lib/extract/standalone/generic/Makefile
@@ -89,7 +89,7 @@ compare_variables_mod.F90: $(PSYDATA_LIB_DIR)/extract/compare_variables_mod.jinj
$(F90) $(F90FLAGS) -c $<
clean:
- rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_standalone_base.f90 \
+ rm -f *.o *.mod $(PSYDATA_LIB) psy_data_base.f90 extract_standalone_base.F90 \
compare_variables_mod.F90
allclean: clean
diff --git a/lib/extract/standalone/generic/kernel_data_standalone.f90 b/lib/extract/standalone/generic/kernel_data_standalone.f90
index 6ff8d4b655..bdb2a1f744 100644
--- a/lib/extract/standalone/generic/kernel_data_standalone.f90
+++ b/lib/extract/standalone/generic/kernel_data_standalone.f90
@@ -34,8 +34,8 @@
! Author J. Henrichs, Bureau of Meteorology
!> This module implements a simple binary-file writer using the PSyData
-!! interface. It is specific to the generic NEMO API, i.e. it does
-!! actually not provide any API-specific types like fields, only standard
+!! interface. It is for handling generic Fortran code, i.e. it does
+!! not actually provide any API-specific types like fields, only standard
!! Fortran data types. Therefore, it is just an empty wrapper to around
!! the ExtractNetcdfBaseType, which provides the standard module name
!! and class name expected by the extraction scripts.
diff --git a/lib/extract/standalone/lfric/.gitignore b/lib/extract/standalone/lfric/.gitignore
index 99eaf94920..85a9245fda 100644
--- a/lib/extract/standalone/lfric/.gitignore
+++ b/lib/extract/standalone/lfric/.gitignore
@@ -1,2 +1 @@
kernel_data_standalone.f90
-read_kernel_data_mod.f90
diff --git a/lib/extract/standalone/read_kernel_data_mod.f90 b/lib/extract/standalone/lfric/read_kernel_data_mod.f90
similarity index 84%
rename from lib/extract/standalone/read_kernel_data_mod.f90
rename to lib/extract/standalone/lfric/read_kernel_data_mod.f90
index ea9ad613dc..62a889ac1c 100644
--- a/lib/extract/standalone/read_kernel_data_mod.f90
+++ b/lib/extract/standalone/lfric/read_kernel_data_mod.f90
@@ -80,11 +80,6 @@ module read_kernel_data_mod
procedure :: ReadArray2dInt
procedure :: ReadArray3dInt
procedure :: ReadArray4dInt
- procedure :: ReadScalarLong
- procedure :: ReadArray1dLong
- procedure :: ReadArray2dLong
- procedure :: ReadArray3dLong
- procedure :: ReadArray4dLong
procedure :: ReadScalarLogical
procedure :: ReadArray1dLogical
procedure :: ReadArray2dLogical
@@ -115,11 +110,6 @@ module read_kernel_data_mod
ReadArray2dInt, &
ReadArray3dInt, &
ReadArray4dInt, &
- ReadScalarLong, &
- ReadArray1dLong, &
- ReadArray2dLong, &
- ReadArray3dLong, &
- ReadArray4dLong, &
ReadScalarLogical, &
ReadArray1dLogical, &
ReadArray2dLogical, &
@@ -599,216 +589,6 @@ subroutine ReadArray4dInt(this, name, value)
end subroutine ReadArray4dInt
- ! -------------------------------------------------------------------------
- !> @brief This subroutine reads the value of a scalar integer(kind=int64)
- !! variable from the binary file and returns it to the user. Note that
- !! this function is not part of the PSyData API, but it is convenient to
- !! have these functions together here. The driver can then be linked with
- !! this PSyData library and will be able to read the files.
- !! @param[in,out] this The instance of the ReadKernelDataType.
- !! @param[in] name The name of the variable (string).
- !! @param[out] value The read value is stored here.
- subroutine ReadScalarLong(this, name, value)
-
- implicit none
-
- class(ReadKernelDataType), intent(inout), target :: this
- character(*), intent(in) :: name
- integer(kind=int64), intent(out) :: value
-
- integer :: retval, varid
-
- read(this%unit_number) value
-
- end subroutine ReadScalarLong
-
-
-
- ! -------------------------------------------------------------------------
- !> @brief This subroutine reads the values of a 1D array of integer(kind=int64)
- !! It allocates memory for the allocatable parameter 'value' to store the
- !! read values which is then returned to the caller. If the memory for the
- !! array cannot be allocated, the application will be stopped.
- !! @param[in,out] this The instance of the extract_PsyDataType.
- !! @param[in] name The name of the variable (string).
- !! @param[out] value An allocatable, unallocated 2d-double precision array
- !! which is allocated here and stores the values read.
- subroutine ReadArray1dLong(this, name, value)
-
- implicit none
-
- class(ReadKernelDataType), intent(inout), target :: this
- character(*), intent(in) :: name
- integer(kind=int64), dimension(:), allocatable, intent(out) :: value
-
- integer :: retval, varid
- integer :: dim_id
- integer :: dim_size1
- integer :: ierr
-
- ! First read in the sizes:
- read(this%unit_number) dim_size1
-
- ! Allocate enough space to store the values to be read:
- allocate(value(dim_size1), Stat=ierr)
- if (ierr /= 0) then
- write(stderr,*) "Cannot allocate array for ", name, &
- " of size ", dim_size1, &
- " in ReadArray1dLong."
- stop
- endif
-
- ! Initialise it with 0.0d0, so that an array comparison will work
- ! even though e.g. boundary areas or so might not be set at all. Note
- ! that the compiler will convert the double precision value to the right
- ! type (e.g. int or single precision).
- value = 0.0d0
- read(this%unit_number) value
-
- end subroutine ReadArray1dLong
-
-
-
- ! -------------------------------------------------------------------------
- !> @brief This subroutine reads the values of a 2D array of integer(kind=int64)
- !! It allocates memory for the allocatable parameter 'value' to store the
- !! read values which is then returned to the caller. If the memory for the
- !! array cannot be allocated, the application will be stopped.
- !! @param[in,out] this The instance of the extract_PsyDataType.
- !! @param[in] name The name of the variable (string).
- !! @param[out] value An allocatable, unallocated 2d-double precision array
- !! which is allocated here and stores the values read.
- subroutine ReadArray2dLong(this, name, value)
-
- implicit none
-
- class(ReadKernelDataType), intent(inout), target :: this
- character(*), intent(in) :: name
- integer(kind=int64), dimension(:,:), allocatable, intent(out) :: value
-
- integer :: retval, varid
- integer :: dim_id
- integer :: dim_size1,dim_size2
- integer :: ierr
-
- ! First read in the sizes:
- read(this%unit_number) dim_size1
- read(this%unit_number) dim_size2
-
- ! Allocate enough space to store the values to be read:
- allocate(value(dim_size1,dim_size2), Stat=ierr)
- if (ierr /= 0) then
- write(stderr,*) "Cannot allocate array for ", name, &
- " of size ", dim_size1,dim_size2, &
- " in ReadArray2dLong."
- stop
- endif
-
- ! Initialise it with 0.0d0, so that an array comparison will work
- ! even though e.g. boundary areas or so might not be set at all. Note
- ! that the compiler will convert the double precision value to the right
- ! type (e.g. int or single precision).
- value = 0.0d0
- read(this%unit_number) value
-
- end subroutine ReadArray2dLong
-
-
-
- ! -------------------------------------------------------------------------
- !> @brief This subroutine reads the values of a 3D array of integer(kind=int64)
- !! It allocates memory for the allocatable parameter 'value' to store the
- !! read values which is then returned to the caller. If the memory for the
- !! array cannot be allocated, the application will be stopped.
- !! @param[in,out] this The instance of the extract_PsyDataType.
- !! @param[in] name The name of the variable (string).
- !! @param[out] value An allocatable, unallocated 2d-double precision array
- !! which is allocated here and stores the values read.
- subroutine ReadArray3dLong(this, name, value)
-
- implicit none
-
- class(ReadKernelDataType), intent(inout), target :: this
- character(*), intent(in) :: name
- integer(kind=int64), dimension(:,:,:), allocatable, intent(out) :: value
-
- integer :: retval, varid
- integer :: dim_id
- integer :: dim_size1,dim_size2,dim_size3
- integer :: ierr
-
- ! First read in the sizes:
- read(this%unit_number) dim_size1
- read(this%unit_number) dim_size2
- read(this%unit_number) dim_size3
-
- ! Allocate enough space to store the values to be read:
- allocate(value(dim_size1,dim_size2,dim_size3), Stat=ierr)
- if (ierr /= 0) then
- write(stderr,*) "Cannot allocate array for ", name, &
- " of size ", dim_size1,dim_size2,dim_size3, &
- " in ReadArray3dLong."
- stop
- endif
-
- ! Initialise it with 0.0d0, so that an array comparison will work
- ! even though e.g. boundary areas or so might not be set at all. Note
- ! that the compiler will convert the double precision value to the right
- ! type (e.g. int or single precision).
- value = 0.0d0
- read(this%unit_number) value
-
- end subroutine ReadArray3dLong
-
-
-
- ! -------------------------------------------------------------------------
- !> @brief This subroutine reads the values of a 4D array of integer(kind=int64)
- !! It allocates memory for the allocatable parameter 'value' to store the
- !! read values which is then returned to the caller. If the memory for the
- !! array cannot be allocated, the application will be stopped.
- !! @param[in,out] this The instance of the extract_PsyDataType.
- !! @param[in] name The name of the variable (string).
- !! @param[out] value An allocatable, unallocated 2d-double precision array
- !! which is allocated here and stores the values read.
- subroutine ReadArray4dLong(this, name, value)
-
- implicit none
-
- class(ReadKernelDataType), intent(inout), target :: this
- character(*), intent(in) :: name
- integer(kind=int64), dimension(:,:,:,:), allocatable, intent(out) :: value
-
- integer :: retval, varid
- integer :: dim_id
- integer :: dim_size1,dim_size2,dim_size3,dim_size4
- integer :: ierr
-
- ! First read in the sizes:
- read(this%unit_number) dim_size1
- read(this%unit_number) dim_size2
- read(this%unit_number) dim_size3
- read(this%unit_number) dim_size4
-
- ! Allocate enough space to store the values to be read:
- allocate(value(dim_size1,dim_size2,dim_size3,dim_size4), Stat=ierr)
- if (ierr /= 0) then
- write(stderr,*) "Cannot allocate array for ", name, &
- " of size ", dim_size1,dim_size2,dim_size3,dim_size4, &
- " in ReadArray4dLong."
- stop
- endif
-
- ! Initialise it with 0.0d0, so that an array comparison will work
- ! even though e.g. boundary areas or so might not be set at all. Note
- ! that the compiler will convert the double precision value to the right
- ! type (e.g. int or single precision).
- value = 0.0d0
- read(this%unit_number) value
-
- end subroutine ReadArray4dLong
-
-
! -------------------------------------------------------------------------
!> @brief This subroutine reads the value of a scalar Logical(kind=4)
!! variable from the binary file and returns it to the user. Note that
diff --git a/psyclone.pdf b/psyclone.pdf
index 3fa4af129f..303d1c4dca 100644
Binary files a/psyclone.pdf and b/psyclone.pdf differ
diff --git a/src/psyclone/generator.py b/src/psyclone/generator.py
index 77f18ce15c..95818823e4 100644
--- a/src/psyclone/generator.py
+++ b/src/psyclone/generator.py
@@ -50,6 +50,7 @@
import traceback
import importlib
import shutil
+from typing import Union, Callable, List, Tuple
from fparser.api import get_reader
from fparser.two import Fortran2003
@@ -93,18 +94,21 @@
LFRIC_TESTING = False
-def load_script(script_name, function_name="trans", is_optional=False):
+def load_script(
+ script_name: str, function_name: str = "trans",
+ is_optional: bool = False
+) -> Tuple[Callable, List[str], Union[bool, List[str]]]:
''' Loads the specified script containing a psyclone recipe. We also
prepend the script path to the sys.path, so that the script itself and
any imports that it has from the same directory can be found.
- :param str script_name: name of the script to load.
- :param str function_name: the name of the function to call in the script.
- :param bool is_optional: whether the function is optional or
- not. Defaults to False.
+ :param script_name: name of the script to load.
+ :param function_name: the name of the function to call in the script.
+ :param is_optional: whether the function is optional or not. Defaults to
+ False.
- :returns: callable recipe and list of files to skip.
- :rtype: Tuple[Callable, List[str]]
+ :returns: callable recipe, list of files to skip, whether to resolve
+ modules (or which ones).
:raises IOError: if the file is not found.
:raises GenerationError: if the file does not have .py extension.
@@ -139,13 +143,18 @@ def load_script(script_name, function_name="trans", is_optional=False):
else:
files_to_skip = []
+ if hasattr(recipe_module, "RESOLVE_IMPORTS"):
+ imports_to_resolve = recipe_module.RESOLVE_IMPORTS
+ else:
+ imports_to_resolve = []
+
if hasattr(recipe_module, function_name):
transformation_recipe = getattr(recipe_module, function_name)
if callable(transformation_recipe):
# Everything is good, return recipe and files_to_skip
- return transformation_recipe, files_to_skip
+ return transformation_recipe, files_to_skip, imports_to_resolve
elif is_optional:
- return None, files_to_skip
+ return None, files_to_skip, imports_to_resolve
raise GenerationError(
f"generator: attempted to use specified PSyclone "
f"transformation module '{module_name}' but it does not "
@@ -249,7 +258,7 @@ def generate(filename, api="", kernel_paths=None, script_name=None,
.create(invoke_info)
if script_name is not None:
# Apply provided recipe to PSyIR
- recipe, _ = load_script(script_name)
+ recipe, _, _ = load_script(script_name)
recipe(psy.container.root)
alg_gen = None
@@ -289,7 +298,8 @@ def generate(filename, api="", kernel_paths=None, script_name=None,
if script_name is not None:
# Call the optimisation script for algorithm optimisations
- recipe, _ = load_script(script_name, "trans_alg", is_optional=True)
+ recipe, _, _ = load_script(script_name, "trans_alg",
+ is_optional=True)
if recipe:
recipe(psyir)
@@ -377,7 +387,7 @@ def generate(filename, api="", kernel_paths=None, script_name=None,
if script_name is not None:
# Call the optimisation script for psy-layer optimisations
- recipe, _ = load_script(script_name)
+ recipe, _, _ = load_script(script_name)
recipe(psy.container.root)
# TODO issue #1618 remove Alg class and tests from PSyclone
@@ -699,9 +709,9 @@ def code_transformation_mode(input_file, recipe_file, output_file,
'''
# Load recipe file
if recipe_file:
- transformation_recipe, files_to_skip = load_script(recipe_file)
+ trans_recipe, files_to_skip, resolve_mods = load_script(recipe_file)
else:
- transformation_recipe, files_to_skip = (None, [])
+ trans_recipe, files_to_skip, resolve_mods = (None, [], False)
_, filename = os.path.split(input_file)
if filename not in files_to_skip:
@@ -718,11 +728,12 @@ def code_transformation_mode(input_file, recipe_file, output_file,
sys.exit(1)
# Parse file
- psyir = FortranReader().psyir_from_file(input_file)
+ psyir = FortranReader(resolve_modules=resolve_mods)\
+ .psyir_from_file(input_file)
# Modify file
- if transformation_recipe:
- transformation_recipe(psyir)
+ if trans_recipe:
+ trans_recipe(psyir)
# Add profiling if automatic profiling has been requested
for routine in psyir.walk(Routine):
diff --git a/src/psyclone/psyir/backend/fortran.py b/src/psyclone/psyir/backend/fortran.py
index 743352628f..e2bd7008cb 100644
--- a/src/psyclone/psyir/backend/fortran.py
+++ b/src/psyclone/psyir/backend/fortran.py
@@ -536,6 +536,11 @@ def gen_vardecl(self, symbol, include_visibility=False):
f"and should not be provided to 'gen_vardecl'."
)
+ result = ""
+ if len(symbol.preceding_comment) > 0:
+ for line in symbol.preceding_comment.splitlines():
+ result += f"{self._nindent}{self._COMMENT_PREFIX}{line}\n"
+
# Whether we're dealing with an array declaration and, if so, the
# shape of that array.
if isinstance(symbol.datatype, ArrayType):
@@ -554,10 +559,14 @@ def gen_vardecl(self, symbol, include_visibility=False):
# blocks appearing in SAVE statements.
decln = add_accessibility_to_unsupported_declaration(
symbol)
- return f"{self._nindent}{decln}\n"
-
- decln = symbol.datatype.declaration
- return f"{self._nindent}{decln}\n"
+ else:
+ decln = symbol.datatype.declaration
+ result += f"{self._nindent}{decln}"
+ if symbol.inline_comment != "":
+ result += (f" {self._COMMENT_PREFIX}"
+ f"{symbol.inline_comment}")
+ result += "\n"
+ return result
# The Fortran backend only handles UnsupportedFortranType
# declarations.
raise VisitorError(
@@ -566,7 +575,7 @@ def gen_vardecl(self, symbol, include_visibility=False):
f"supported by the Fortran backend.")
datatype = gen_datatype(symbol.datatype, symbol.name)
- result = f"{self._nindent}{datatype}"
+ result += f"{self._nindent}{datatype}"
if ArrayType.Extent.DEFERRED in array_shape:
# A 'deferred' array extent means this is an allocatable array
@@ -616,6 +625,9 @@ def gen_vardecl(self, symbol, include_visibility=False):
f"However it has an interface of '{symbol.interface}'.")
result += " = " + self._visit(symbol.initial_value)
+ if symbol.inline_comment != "":
+ result += f" {self._COMMENT_PREFIX}{symbol.inline_comment}"
+
return result + "\n"
def gen_interfacedecl(self, symbol):
@@ -696,7 +708,12 @@ def gen_typedecl(self, symbol, include_visibility=True):
f"Fortran backend cannot generate code for symbol "
f"'{symbol.name}' of type '{type(symbol.datatype).__name__}'")
- result = f"{self._nindent}type"
+ result = ""
+ if symbol.preceding_comment != "":
+ for line in symbol.preceding_comment.splitlines():
+ result += f"{self._nindent}{self._COMMENT_PREFIX}{line}\n"
+
+ result += f"{self._nindent}type"
if include_visibility:
if symbol.visibility == Symbol.Visibility.PRIVATE:
@@ -726,7 +743,13 @@ def gen_typedecl(self, symbol, include_visibility=True):
include_visibility=include_visibility)
self._depth -= 1
- result += f"{self._nindent}end type {symbol.name}\n"
+ result += f"{self._nindent}end type {symbol.name}"
+
+ if symbol.inline_comment != "":
+ result += f" {self._COMMENT_PREFIX}{symbol.inline_comment}"
+
+ result += "\n"
+
return result
def gen_default_access_stmt(self, symbol_table):
diff --git a/src/psyclone/psyir/backend/visitor.py b/src/psyclone/psyir/backend/visitor.py
index cb92956c36..b8db98037a 100644
--- a/src/psyclone/psyir/backend/visitor.py
+++ b/src/psyclone/psyir/backend/visitor.py
@@ -45,7 +45,7 @@
from psyclone.errors import PSycloneError
from psyclone.psyir.nodes import Node, Schedule, Container
-from psyclone.psyir.nodes.commentable_mixin import CommentableMixin
+from psyclone.psyir.commentable_mixin import CommentableMixin
class VisitorError(PSycloneError):
diff --git a/src/psyclone/psyir/nodes/commentable_mixin.py b/src/psyclone/psyir/commentable_mixin.py
similarity index 90%
rename from src/psyclone/psyir/nodes/commentable_mixin.py
rename to src/psyclone/psyir/commentable_mixin.py
index 13dde00b28..a3f1ca3ab8 100644
--- a/src/psyclone/psyir/nodes/commentable_mixin.py
+++ b/src/psyclone/psyir/commentable_mixin.py
@@ -63,6 +63,8 @@ def preceding_comment(self):
def preceding_comment(self, comment):
'''
:param str comment: comment preceding this statement.
+
+ :raises TypeError: if the comment is not a string.
'''
if not isinstance(comment, str):
raise TypeError(f"The preceding_comment must be a string but"
@@ -73,6 +75,8 @@ def append_preceding_comment(self, comment):
'''
:param str comment: comment to append after an newline in this
statement-preceding comment.
+
+ :raises TypeError: if the comment is not a string.
'''
if not isinstance(comment, str):
raise TypeError(f"The preceding_comment must be a string but"
@@ -94,10 +98,16 @@ def inline_comment(self):
def inline_comment(self, comment):
'''
:param str comment: inline comment associated with this statement.
+
+ :raises TypeError: if the comment is not a string.
+ :raises ValueError: if the comment contains a newline character.
'''
if not isinstance(comment, str):
raise TypeError(f"The inline_comment must be a string but"
f" found '{type(comment).__name__}'.")
+ if '\n' in comment:
+ raise ValueError(f"The inline_comment must be a single line but "
+ f"found a newline character in '{comment}'.")
self._inline_comment = comment
diff --git a/src/psyclone/psyir/frontend/fortran.py b/src/psyclone/psyir/frontend/fortran.py
index 523712035a..481690b9dd 100644
--- a/src/psyclone/psyir/frontend/fortran.py
+++ b/src/psyclone/psyir/frontend/fortran.py
@@ -38,7 +38,7 @@
import os
-from typing import Optional
+from typing import Optional, Union, List
from fparser.common.readfortran import FortranStringReader, FortranFileReader
from fparser.common.sourceinfo import FortranFormat
from fparser.two import Fortran2003, pattern_tools
@@ -55,14 +55,38 @@ class FortranReader():
''' PSyIR Fortran frontend. This frontend translates Fortran from a string
or a file into PSyIR using the fparser2 utilities.
+ :param free_form: If parsing free-form code or not (default True).
+ :param ignore_comments: If comments should be ignored or not
+ (default True).
+ :param ignore_directives: If directives should be ignored or not
+ (default True). Only has an effect
+ if ignore_comments is False.
+ :param last_comments_as_codeblocks: If the last comments in the
+ a given block (e.g. subroutine,
+ do, if-then body, etc.) should
+ be kept as code blocks or lost
+ (default False).
+ Only has an effect if ignore_comments
+ is False.
+ :param resolve_modules: Whether to resolve modules while parsing a file,
+ for more precise control it also accepts a list of module names.
+ Defaults to False.
+
'''
# Save parser object across instances to reduce the initialisation time
_parser = None
- def __init__(self):
+ def __init__(self, free_form: bool = True, ignore_comments: bool = True,
+ ignore_directives: bool = True,
+ last_comments_as_codeblocks: bool = False,
+ resolve_modules: Union[bool, List[str]] = False):
if not self._parser:
self._parser = ParserFactory().create(std="f2008")
- self._processor = Fparser2Reader()
+ self._free_form = free_form
+ self._ignore_comments = ignore_comments
+ self._processor = Fparser2Reader(ignore_directives,
+ last_comments_as_codeblocks,
+ resolve_modules)
SYMBOL_TABLES.clear()
@staticmethod
@@ -85,11 +109,10 @@ def validate_name(name: str):
raise ValueError(
f"Invalid Fortran name '{name}' found.")
- def psyir_from_source(self, source_code: str, free_form: bool = True):
+ def psyir_from_source(self, source_code: str):
''' Generate the PSyIR tree representing the given Fortran source code.
- :param source_code: text representation of the code to be parsed.
- :param free_form: If parsing free-form code or not (default True).
+ :param str source_code: text representation of the code to be parsed.
:returns: PSyIR representing the provided Fortran source code.
:rtype: :py:class:`psyclone.psyir.nodes.Node`
@@ -97,10 +120,12 @@ def psyir_from_source(self, source_code: str, free_form: bool = True):
'''
SYMBOL_TABLES.clear()
string_reader = FortranStringReader(
- source_code, include_dirs=Config.get().include_paths)
+ source_code, include_dirs=Config.get().include_paths,
+ ignore_comments=self._ignore_comments)
# Set reader to free format.
- string_reader.set_format(FortranFormat(free_form, False))
+ string_reader.set_format(FortranFormat(self._free_form, False))
parse_tree = self._parser(string_reader)
+
psyir = self._processor.generate_psyir(parse_tree)
return psyir
@@ -194,15 +219,12 @@ def psyir_from_statement(self, source_code: str,
self._processor.process_nodes(fake_parent, exec_part.children)
return fake_parent[0].detach()
- def psyir_from_file(self, file_path, free_form=True):
+ def psyir_from_file(self, file_path):
''' Generate the PSyIR tree representing the given Fortran file.
:param file_path: path of the file to be read and parsed.
:type file_path: str or any Python Path format.
- :param free_form: If parsing free-form code or not (default True).
- :type free_form: bool
-
:returns: PSyIR representing the provided Fortran file.
:rtype: :py:class:`psyclone.psyir.nodes.Node`
@@ -217,10 +239,12 @@ def psyir_from_file(self, file_path, free_form=True):
# Using the FortranFileReader instead of manually open the file allows
# fparser to keep the filename information in the tree
reader = FortranFileReader(file_path,
- include_dirs=Config.get().include_paths)
- reader.set_format(FortranFormat(free_form, False))
+ include_dirs=Config.get().include_paths,
+ ignore_comments=self._ignore_comments)
+ reader.set_format(FortranFormat(self._free_form, False))
parse_tree = self._parser(reader)
_, filename = os.path.split(file_path)
+
psyir = self._processor.generate_psyir(parse_tree, filename)
return psyir
diff --git a/src/psyclone/psyir/frontend/fparser2.py b/src/psyclone/psyir/frontend/fparser2.py
index c6855064cf..c5bb2be430 100644
--- a/src/psyclone/psyir/frontend/fparser2.py
+++ b/src/psyclone/psyir/frontend/fparser2.py
@@ -44,15 +44,16 @@
from dataclasses import dataclass, field
import os
import sys
-from typing import Optional, List
+from typing import Optional, List, Iterable
from fparser.common.readfortran import FortranStringReader
from fparser.two import C99Preprocessor, Fortran2003, utils
from fparser.two.parser import ParserFactory
-from fparser.two.utils import walk, BlockBase, StmtBase
+from fparser.two.utils import walk, BlockBase, StmtBase, Base
from psyclone.configuration import Config
from psyclone.errors import InternalError, GenerationError
+from psyclone.psyir.commentable_mixin import CommentableMixin
from psyclone.psyir.nodes import (
ArrayMember, ArrayOfStructuresReference, ArrayReference, Assignment,
BinaryOperation, Call, CodeBlock, Container, Directive, FileContainer,
@@ -712,14 +713,16 @@ def _process_routine_symbols(module_ast, container, visibility_map):
# By default, Fortran routines are not elemental.
is_elemental = False
# Name of the routine.
- name = str(routine.children[0].children[1])
+ stmt = walk(routine, (Fortran2003.Subroutine_Stmt,
+ Fortran2003.Function_Stmt))[0]
+ name = str(stmt.children[1])
# Type to give the RoutineSymbol.
sym_type = type_map[type(routine)]()
# Visibility of the symbol.
vis = visibility_map.get(name.lower(),
container.symbol_table.default_visibility)
# Check any prefixes on the routine declaration.
- prefix = routine.children[0].children[0]
+ prefix = stmt.children[0]
if prefix:
for child in prefix.children:
if isinstance(child, Fortran2003.Prefix_Spec):
@@ -848,8 +851,21 @@ class Fparser2Reader():
'''
Class to encapsulate the functionality for processing the fparser2 AST and
convert the nodes to PSyIR.
- '''
+ :param ignore_directives: Whether directives should be ignored or not
+ (default True). Only has an effect if comments were not ignored when
+ creating the fparser2 AST.
+ :param last_comments_as_codeblocks: Whether the last comments in the a
+ given block (e.g. subroutine, do, if-then body, etc.) should be kept as
+ CodeBlocks or lost (default False). Only has an effect if comments
+ were not ignored when creating the fparser2 AST.
+ :param resolve_modules: Whether to resolve modules while parsing a file,
+ for more precise control it also accepts a list of module names.
+ Defaults to False.
+
+ :raises TypeError: if the constructor argument is not of the expected type.
+
+ '''
unary_operators = OrderedDict([
('+', UnaryOperation.Operator.PLUS),
('-', UnaryOperation.Operator.MINUS),
@@ -932,7 +948,21 @@ class SelectTypeInfo:
num_clauses: int = -1
default_idx: int = -1
- def __init__(self):
+ def __init__(self, ignore_directives: bool = True,
+ last_comments_as_codeblocks: bool = False,
+ resolve_modules: bool = False):
+ if isinstance(resolve_modules, bool):
+ self._resolve_all_modules = resolve_modules
+ self._modules_to_resolve = []
+ elif (isinstance(resolve_modules, Iterable) and
+ all(isinstance(x, str) for x in resolve_modules)):
+ self._resolve_all_modules = False
+ self._modules_to_resolve = [n.lower() for n in resolve_modules]
+ else:
+ raise TypeError(
+ f"The 'resolve_modules' argument must be a boolean or an "
+ f"Iterable[str] but found '{resolve_modules}'.")
+
# Map of fparser2 node types to handlers (which are class methods)
self.handlers = {
Fortran2003.Allocate_Stmt: self._allocate_handler,
@@ -976,6 +1006,12 @@ def __init__(self):
Fortran2003.Main_Program: self._main_program_handler,
Fortran2003.Program: self._program_handler,
}
+ # Used to attach inline comments to the PSyIR symbols and nodes
+ self._last_psyir_parsed_and_span = None
+ # Whether to ignore directives when processing the fparser2 AST
+ self._ignore_directives = ignore_directives
+ # Whether to keep the last comments in a given block as CodeBlocks
+ self._last_comments_as_codeblocks = last_comments_as_codeblocks
@staticmethod
def nodes_to_code_block(parent, fp2_nodes, message=None):
@@ -1390,8 +1426,7 @@ def _process_save_statements(nodes, parent):
explicit_save.remove(name)
return list(explicit_save)
- @staticmethod
- def _process_use_stmts(parent, nodes, visibility_map=None):
+ def _process_use_stmts(self, parent, nodes, visibility_map=None):
'''
Process all of the USE statements in the fparser2 parse tree
supplied as a list of nodes. Imported symbols are added to
@@ -1534,6 +1569,11 @@ def _process_use_stmts(parent, nodes, visibility_map=None):
raise NotImplementedError(f"Found unsupported USE statement: "
f"'{decl}'")
+ # Import symbol information from module/container (if enabled)
+ if (self._resolve_all_modules or
+ container.name.lower() in self._modules_to_resolve):
+ parent.symbol_table.resolve_imports([container])
+
def _process_type_spec(self, parent, type_spec):
'''
Processes the fparser2 parse tree of a type specification in order to
@@ -1662,8 +1702,12 @@ def _process_decln(self, scope, symbol_table, decl, visibility_map=None,
:raises GenerationError: if a set of incompatible Fortran
attributes are found in a symbol declaration.
+ :returns: the newly created symbol.
+ :rtype: :py:class:`psyclone.psyir.symbols.DataSymbol`
+
'''
# pylint: disable=too-many-arguments
+
(type_spec, attr_specs, entities) = decl.items
# Parse the type_spec
@@ -1897,6 +1941,8 @@ def _process_decln(self, scope, symbol_table, decl, visibility_map=None,
f"'{sym_name}'.") from error
symbol_table.add(sym, tag=tag)
+ self._last_psyir_parsed_and_span = (sym, decl.item.span)
+
if init_expr:
# In Fortran, an initialisation expression on a declaration of
# a symbol (whether in a routine or a module) implies that the
@@ -1906,6 +1952,8 @@ def _process_decln(self, scope, symbol_table, decl, visibility_map=None,
else:
sym.interface = this_interface
+ return sym
+
def _process_derived_type_decln(self, parent, decl, visibility_map):
'''
Process the supplied fparser2 parse tree for a derived-type
@@ -1925,6 +1973,9 @@ def _process_derived_type_decln(self, parent, decl, visibility_map):
as the derived type being defined and it is not a DataTypeSymbol
or is not of UnresolvedType.
+ :return: the DataTypeSymbol representing the derived type.
+ :rtype: :py:class:`psyclone.psyir.symbols.DataTypeSymbol`
+
'''
name = str(walk(decl.children[0], Fortran2003.Type_Name)[0]).lower()
# Create a new StructureType for this derived type
@@ -2002,8 +2053,19 @@ def _process_derived_type_decln(self, parent, decl, visibility_map):
local_table = Container("tmp", parent=parent).symbol_table
local_table.default_visibility = default_compt_visibility
- for child in walk(decl, Fortran2003.Data_Component_Def_Stmt):
- self._process_decln(parent, local_table, child)
+ preceding_comments = []
+ for child in decl.children:
+ if isinstance(child, Fortran2003.Comment):
+ self.process_comment(child, preceding_comments)
+ continue
+ if isinstance(child, Fortran2003.Component_Part):
+ for component in walk(child,
+ Fortran2003.Data_Component_Def_Stmt):
+ csym = self._process_decln(parent, local_table,
+ component)
+ csym.preceding_comment = self._comments_list_to_string(
+ preceding_comments)
+ preceding_comments = []
# Convert from Symbols to StructureType components.
for symbol in local_table.symbols:
if symbol.is_unresolved:
@@ -2016,7 +2078,8 @@ def _process_derived_type_decln(self, parent, decl, visibility_map):
datatype = symbol.datatype
initial_value = symbol.initial_value
dtype.add(symbol.name, datatype, symbol.visibility,
- initial_value)
+ initial_value, symbol.preceding_comment,
+ symbol.inline_comment)
# Update its type with the definition we've found
tsymbol.datatype = dtype
@@ -2027,6 +2090,8 @@ def _process_derived_type_decln(self, parent, decl, visibility_map):
tsymbol.datatype = UnsupportedFortranType(str(decl))
tsymbol.interface = UnknownInterface()
+ return tsymbol
+
def _get_partial_datatype(self, node, scope, visibility_map):
'''Try to obtain partial datatype information from node by removing
any unsupported properties in the declaration.
@@ -2312,8 +2377,20 @@ def process_declarations(self, parent, nodes, arg_list,
# Handle any derived-type declarations/definitions before we look
# at general variable declarations in case any of the latter use
# the former.
- for decl in walk(nodes, Fortran2003.Derived_Type_Def):
- self._process_derived_type_decln(parent, decl, visibility_map)
+ preceding_comments = []
+ for node in nodes:
+ if isinstance(node, Fortran2003.Implicit_Part):
+ for comment in walk(node, Fortran2003.Comment):
+ self.process_comment(comment, preceding_comments)
+ elif isinstance(node, Fortran2003.Derived_Type_Def):
+ sym = self._process_derived_type_decln(parent, node,
+ visibility_map)
+ sym.preceding_comment = \
+ self._comments_list_to_string(preceding_comments)
+ preceding_comments = []
+ derived_type_span = (node.children[0].item.span[0],
+ node.children[-1].item.span[1])
+ self._last_psyir_parsed_and_span = (sym, derived_type_span)
# INCLUDE statements are *not* part of the Fortran language and
# can appear anywhere. Therefore we have to do a walk to make sure we
@@ -2324,19 +2401,43 @@ def process_declarations(self, parent, nodes, arg_list,
# The include_handler just raises an error so we use that to
# reduce code duplication.
self._include_handler(incl_nodes[0], parent)
+
# Now we've captured any derived-type definitions, proceed to look
# at the variable declarations.
+ preceding_comments = []
for node in nodes:
- if isinstance(node, Fortran2003.Interface_Block):
-
+ if isinstance(node, Fortran2003.Implicit_Part):
+ for comment in walk(node, Fortran2003.Comment):
+ self.process_comment(comment, preceding_comments)
+ continue
+ # Anything other than a PARAMETER statement or an
+ # IMPLICIT NONE means we can't handle this code.
+ # Any PARAMETER statements are handled separately by the
+ # call to _process_parameter_stmts below.
+ # Any ENTRY statements are checked for in _subroutine_handler.
+ child_nodes = walk(node, Fortran2003.Format_Stmt)
+ if child_nodes:
+ raise NotImplementedError(
+ f"Error processing implicit-part: Format statements "
+ f"are not supported but found '{child_nodes[0]}'")
+ child_nodes = walk(node, Fortran2003.Implicit_Stmt)
+ if any(imp.children != ('NONE',) for imp in child_nodes):
+ raise NotImplementedError(
+ f"Error processing implicit-part: implicit variable "
+ f"declarations not supported but found '{node}'")
+ elif isinstance(node, Fortran2003.Interface_Block):
self._process_interface_block(node, parent.symbol_table,
visibility_map)
elif isinstance(node, Fortran2003.Type_Declaration_Stmt):
try:
- self._process_decln(parent, parent.symbol_table, node,
- visibility_map, statics_list)
+ tsym = self._process_decln(parent, parent.symbol_table,
+ node, visibility_map,
+ statics_list)
+ tsym.preceding_comment = self._comments_list_to_string(
+ preceding_comments)
+ preceding_comments = []
except NotImplementedError:
# Found an unsupported variable declaration. Create a
# DataSymbol with UnsupportedType for each entity being
@@ -2377,14 +2478,19 @@ def process_declarations(self, parent, nodes, arg_list,
# possible that some may have already been processed
# successfully and thus be in the symbol table.
try:
- parent.symbol_table.add(
- DataSymbol(
- symbol_name, UnsupportedFortranType(
- str(node),
- partial_datatype=datatype),
- interface=UnknownInterface(),
- visibility=vis,
- initial_value=init))
+ new_symbol = DataSymbol(
+ symbol_name, UnsupportedFortranType(
+ str(node),
+ partial_datatype=datatype),
+ interface=UnknownInterface(),
+ visibility=vis,
+ initial_value=init)
+ new_symbol.preceding_comment \
+ = '\n'.join(preceding_comments)
+ self._last_psyir_parsed_and_span\
+ = (new_symbol,
+ node.item.span)
+ parent.symbol_table.add(new_symbol)
except KeyError as err:
if len(orig_children) == 1:
raise SymbolError(
@@ -2405,23 +2511,6 @@ def process_declarations(self, parent, nodes, arg_list,
# These node types are handled separately
pass
- elif isinstance(node, Fortran2003.Implicit_Part):
- # Anything other than a PARAMETER statement or an
- # IMPLICIT NONE means we can't handle this code.
- # Any PARAMETER statements are handled separately by the
- # call to _process_parameter_stmts below.
- # Any ENTRY statements are checked for in _subroutine_handler.
- child_nodes = walk(node, Fortran2003.Format_Stmt)
- if child_nodes:
- raise NotImplementedError(
- f"Error processing implicit-part: Format statements "
- f"are not supported but found '{child_nodes[0]}'")
- child_nodes = walk(node, Fortran2003.Implicit_Stmt)
- if any(imp.children != ('NONE',) for imp in child_nodes):
- raise NotImplementedError(
- f"Error processing implicit-part: implicit variable "
- f"declarations not supported but found '{node}'")
-
elif isinstance(node, Fortran2003.Namelist_Stmt):
# Place the declaration statement into the symbol table using
# an internal symbol name. In case that we need more details
@@ -2729,7 +2818,14 @@ def process_nodes(self, parent, nodes):
'''
code_block_nodes = []
message = "PSyclone CodeBlock (unsupported code) reason:"
+ # Store any comments that precede the next node
+ preceding_comments = []
for child in nodes:
+ # If the child is a comment, attach it to the preceding node if
+ # it is an inline comment or store it for the next node.
+ if isinstance(child, Fortran2003.Comment):
+ self.process_comment(child, preceding_comments)
+ continue
try:
psy_child = self._create_child(child, parent)
except NotImplementedError as err:
@@ -2748,12 +2844,39 @@ def process_nodes(self, parent, nodes):
if psy_child:
self.nodes_to_code_block(parent, code_block_nodes, message)
message = "PSyclone CodeBlock (unsupported code) reason:"
+ # Add the comments to nodes that support it and reset the
+ # list of comments
+ if isinstance(psy_child, CommentableMixin):
+ psy_child.preceding_comment\
+ += self._comments_list_to_string(
+ preceding_comments)
+ preceding_comments = []
+ if isinstance(psy_child, CommentableMixin):
+ if child.item is not None:
+ self._last_psyir_parsed_and_span = (psy_child,
+ child.item.span
+ )
+ # If the fparser2 node has no span, try to build one
+ # from the spans of the first and last children.
+ elif (len(child.children) != 0
+ and (isinstance(child.children[0], Base)
+ and child.children[0].item is not None)
+ and (isinstance(child.children[-1], Base)
+ and child.children[-1].item is not None)):
+ span = (child.children[0].item.span[0],
+ child.children[-1].item.span[1])
+ self._last_psyir_parsed_and_span = (psy_child,
+ span)
parent.addchild(psy_child)
# If psy_child is not initialised but it didn't produce a
# NotImplementedError, it means it is safe to ignore it.
+
# Complete any unfinished code-block
self.nodes_to_code_block(parent, code_block_nodes, message)
+ if self._last_comments_as_codeblocks and len(preceding_comments) != 0:
+ self.nodes_to_code_block(parent, preceding_comments)
+
def _create_child(self, child, parent=None):
'''
Create a PSyIR node representing the supplied fparser 2 node.
@@ -2778,6 +2901,7 @@ def _create_child(self, child, parent=None):
# there), so we have to examine the first statement within it. We
# must allow for the case where the block is empty though.
if (child.content and child.content[0] and
+ (not isinstance(child.content[0], Fortran2003.Comment)) and
child.content[0].item and child.content[0].item.label):
raise NotImplementedError("Unsupported labelled statement")
elif isinstance(child, StmtBase):
@@ -3128,8 +3252,28 @@ def _do_construct_handler(self, node, parent):
else:
raise NotImplementedError("Unsupported Loop")
- # Process loop body (ignore 'do' and 'end do' statements with [1:-1])
- self.process_nodes(parent=loop_body, nodes=node.content[1:-1])
+ # Process loop body (ignore 'do' and 'end do' statements)
+ # Keep track of the comments before the 'do' statement
+ loop_body_nodes = []
+ preceding_comments = []
+ found_do_stmt = False
+ for child in node.content:
+ if isinstance(child, Fortran2003.Comment) and not found_do_stmt:
+ self.process_comment(child, preceding_comments)
+ continue
+ if isinstance(child, Fortran2003.Nonlabel_Do_Stmt):
+ found_do_stmt = True
+ continue
+ if isinstance(child, Fortran2003.End_Do_Stmt):
+ continue
+
+ loop_body_nodes.append(child)
+
+ # Add the comments to the loop node.
+ loop.preceding_comment\
+ = self._comments_list_to_string(preceding_comments)
+ # Process the loop body.
+ self.process_nodes(parent=loop_body, nodes=loop_body_nodes)
return loop
@@ -3148,10 +3292,10 @@ def _if_construct_handler(self, node, parent):
'''
# Check that the fparser2 parsetree has the expected structure
- if not isinstance(node.content[0], Fortran2003.If_Then_Stmt):
+ if len(walk(node, Fortran2003.If_Then_Stmt)) == 0:
raise InternalError(
f"Failed to find opening if then statement in: {node}")
- if not isinstance(node.content[-1], Fortran2003.End_If_Stmt):
+ if len(walk(node, Fortran2003.End_If_Stmt)) == 0:
raise InternalError(
f"Failed to find closing end if statement in: {node}")
@@ -3164,6 +3308,14 @@ def _if_construct_handler(self, node, parent):
Fortran2003.End_If_Stmt)):
clause_indices.append(idx)
+ # Get the comments before the 'if' statement
+ preceding_comments = []
+ for child in node.content[:clause_indices[0]]:
+ if isinstance(child, Fortran2003.Comment):
+ self.process_comment(child, preceding_comments)
+ # NOTE: The comments are added to the IfBlock node.
+ # NOTE: Comments before the 'else[if]' statements are not handled.
+
# Deal with each clause: "if", "else if" or "else".
ifblock = None
currentparent = parent
@@ -3194,6 +3346,11 @@ def _if_construct_handler(self, node, parent):
elsebody.ast = node.content[start_idx]
newifblock.ast = node.content[start_idx]
+ # Add the comments to the if block.
+ newifblock.preceding_comment\
+ = self._comments_list_to_string(preceding_comments)
+ preceding_comments = []
+
# Create condition as first child
self.process_nodes(parent=newifblock,
nodes=[clause.items[0]])
@@ -3735,10 +3892,10 @@ def _case_construct_handler(self, node, parent):
'''
# Check that the fparser2 parsetree has the expected structure
- if not isinstance(node.content[0], Fortran2003.Select_Case_Stmt):
+ if len(walk(node, Fortran2003.Select_Case_Stmt)) == 0:
raise InternalError(
f"Failed to find opening case statement in: {node}")
- if not isinstance(node.content[-1], Fortran2003.End_Select_Stmt):
+ if len(walk(node, Fortran2003.End_Select_Stmt)) == 0:
raise InternalError(
f"Failed to find closing case statement in: {node}")
@@ -5125,7 +5282,19 @@ def _subroutine_handler(self, node, parent):
if isinstance(parent, FileContainer):
_process_routine_symbols(node, parent, {})
- name = node.children[0].children[1].string
+ # Get the subroutine or function statement and store the comments
+ # that precede it, or attach it to the last parsed node if it is
+ # on the same line.
+ preceding_comments = []
+ for child in node.children:
+ if isinstance(child, Fortran2003.Comment):
+ self.process_comment(child, preceding_comments)
+ continue
+ if isinstance(child, (Fortran2003.Subroutine_Stmt,
+ Fortran2003.Function_Stmt)):
+ stmt = child
+ break
+ name = stmt.children[1].string
routine = None
# The Routine may have been forward declared in
# _process_routine_symbol, and so may already exist in the
@@ -5144,6 +5313,9 @@ def _subroutine_handler(self, node, parent):
# empty Routine is detached from the tree.
parent.addchild(routine)
+ routine.preceding_comment\
+ = self._comments_list_to_string(preceding_comments)
+
try:
routine._ast = node
@@ -5162,19 +5334,38 @@ def _subroutine_handler(self, node, parent):
# Dummy_Arg_List, even if there's only one of them.
if (isinstance(node, (Fortran2003.Subroutine_Subprogram,
Fortran2003.Function_Subprogram)) and
- isinstance(node.children[0].children[2],
+ isinstance(stmt.children[2],
Fortran2003.Dummy_Arg_List)):
- arg_list = node.children[0].children[2].children
+ arg_list = stmt.children[2].children
else:
# Routine has no arguments
arg_list = []
self.process_declarations(routine, decl_list, arg_list)
+ # fparser puts comments at the end of the declarations
+ # whereas as preceding comments they belong in the execution part
+ # except if it's an inline comment on the last declaration.
+ lost_comments = []
+ if len(decl_list) != 0 and isinstance(decl_list[-1],
+ Fortran2003.Implicit_Part):
+ for comment in walk(decl_list[-1], Fortran2003.Comment):
+ if len(comment.tostr()) == 0:
+ continue
+ if self._last_psyir_parsed_and_span is not None:
+ last_symbol, last_span \
+ = self._last_psyir_parsed_and_span
+ if (last_span is not None
+ and last_span[1] == comment.item.span[0]):
+ last_symbol.inline_comment\
+ = self._comment_to_string(comment)
+ continue
+ lost_comments.append(comment)
+
# Check whether the function-stmt has a prefix specifying the
# return type (other prefixes are handled in
# _process_routine_symbols()).
base_type = None
- prefix = node.children[0].children[0]
+ prefix = stmt.children[0]
if prefix:
for child in prefix.children:
if isinstance(child, Fortran2003.Prefix_Spec):
@@ -5188,7 +5379,7 @@ def _subroutine_handler(self, node, parent):
if isinstance(node, Fortran2003.Function_Subprogram):
# Check whether this function-stmt has a suffix containing
# 'RETURNS'
- suffix = node.children[0].children[3]
+ suffix = stmt.children[3]
if suffix:
# Although the suffix can, in principle, contain a proc-
# language-binding-spec (e.g. BIND(C, "some_name")), this
@@ -5258,7 +5449,9 @@ def _subroutine_handler(self, node, parent):
# valid.
pass
else:
- self.process_nodes(routine, sub_exec.content)
+ # Put the comments from the end of the declarations part
+ # at the start of the execution part manually
+ self.process_nodes(routine, lost_comments + sub_exec.content)
except NotImplementedError as err:
sym = routine.symbol
routine.detach()
@@ -5408,6 +5601,58 @@ def _program_handler(self, node, parent):
self.process_nodes(file_container, node.children)
return file_container
+ def _comment_to_string(self, comment):
+ '''Convert a comment to a string, by stripping the '!' and any
+ leading/trailing whitespace.
+
+ :param comment: Comment to convert to a string.
+ :type comment: :py:class:`fparser.two.utils.Comment`
+
+ :returns: The comment as a string.
+ :rtype: str
+
+ '''
+ return comment.tostr()[1:].strip()
+
+ def _comments_list_to_string(self, comments):
+ '''
+ Convert a list of comments to a single string with line breaks.
+
+ :param comments: List of comments.
+ :type comments: list[:py:class:`fparser.two.utils.Comment`]
+
+ :returns: A single string containing all the comments.
+ :rtype: str
+
+ '''
+ return '\n'.join([self._comment_to_string(comment)
+ for comment in comments])
+
+ def process_comment(self, comment, preceding_comments):
+ '''
+ Process a comment and attach it to the last PSyIR object (Symbol or
+ Node) if it is an inline comment. Otherwise append it to the
+ preceding_comments list. Ignore empty comments.
+
+ :param comment: Comment to process.
+ :type comment: :py:class:`fparser.two.utils.Comment`
+ :param preceding_comments: List of comments that precede the next node.
+ :type preceding_comments: List[:py:class:`fparser.two.utils.Comment`]
+
+ '''
+ if len(comment.tostr()) == 0:
+ return
+ if self._ignore_directives and comment.tostr().startswith("!$"):
+ return
+ if self._last_psyir_parsed_and_span is not None:
+ last_psyir, last_span = self._last_psyir_parsed_and_span
+ if (last_span[1] is not None
+ and last_span[1] == comment.item.span[0]):
+ last_psyir.inline_comment = self._comment_to_string(comment)
+ return
+
+ preceding_comments.append(comment)
+
# For Sphinx AutoAPI documentation generation
__all__ = ["Fparser2Reader"]
diff --git a/src/psyclone/psyir/nodes/container.py b/src/psyclone/psyir/nodes/container.py
index e35b706063..a2b909fc9b 100644
--- a/src/psyclone/psyir/nodes/container.py
+++ b/src/psyclone/psyir/nodes/container.py
@@ -44,7 +44,7 @@
from psyclone.psyir.symbols import (GenericInterfaceSymbol, RoutineSymbol,
Symbol, SymbolTable)
from psyclone.errors import GenerationError
-from psyclone.psyir.nodes.commentable_mixin import CommentableMixin
+from psyclone.psyir.commentable_mixin import CommentableMixin
class Container(ScopingNode, CommentableMixin):
diff --git a/src/psyclone/psyir/nodes/routine.py b/src/psyclone/psyir/nodes/routine.py
index d42256ef76..2f96da91aa 100644
--- a/src/psyclone/psyir/nodes/routine.py
+++ b/src/psyclone/psyir/nodes/routine.py
@@ -45,7 +45,7 @@
from psyclone.errors import GenerationError
from psyclone.psyir.nodes.codeblock import CodeBlock
-from psyclone.psyir.nodes.commentable_mixin import CommentableMixin
+from psyclone.psyir.commentable_mixin import CommentableMixin
from psyclone.psyir.nodes.node import Node
from psyclone.psyir.nodes.schedule import Schedule
from psyclone.psyir.symbols import DataSymbol, RoutineSymbol
diff --git a/src/psyclone/psyir/nodes/statement.py b/src/psyclone/psyir/nodes/statement.py
index 5057e157c5..d25f4ff13d 100644
--- a/src/psyclone/psyir/nodes/statement.py
+++ b/src/psyclone/psyir/nodes/statement.py
@@ -37,7 +37,7 @@
''' This module contains the Statement abstract node implementation.'''
from psyclone.psyir.nodes.node import Node
-from psyclone.psyir.nodes.commentable_mixin import CommentableMixin
+from psyclone.psyir.commentable_mixin import CommentableMixin
class Statement(Node, CommentableMixin):
diff --git a/src/psyclone/psyir/symbols/data_type_symbol.py b/src/psyclone/psyir/symbols/data_type_symbol.py
index 92df96eb9c..3d6363c575 100644
--- a/src/psyclone/psyir/symbols/data_type_symbol.py
+++ b/src/psyclone/psyir/symbols/data_type_symbol.py
@@ -73,8 +73,11 @@ def copy(self):
:rtype: :py:class:`psyclone.psyir.symbols.TypeSymbol`
'''
- return type(self)(self.name, self.datatype, visibility=self.visibility,
+ copy = type(self)(self.name, self.datatype, visibility=self.visibility,
interface=self.interface.copy())
+ copy.preceding_comment = self.preceding_comment
+ copy.inline_comment = self.inline_comment
+ return copy
def __str__(self):
return f"{self.name}: {type(self).__name__}"
diff --git a/src/psyclone/psyir/symbols/datasymbol.py b/src/psyclone/psyir/symbols/datasymbol.py
index c1ddb9e20d..1c84105414 100644
--- a/src/psyclone/psyir/symbols/datasymbol.py
+++ b/src/psyclone/psyir/symbols/datasymbol.py
@@ -319,11 +319,14 @@ def copy(self):
new_datatype = self.datatype.copy()
else:
new_datatype = self.datatype
- return DataSymbol(self.name, new_datatype,
+ copy = DataSymbol(self.name, new_datatype,
visibility=self.visibility,
interface=self.interface.copy(),
is_constant=self.is_constant,
initial_value=new_init_value)
+ copy.preceding_comment = self.preceding_comment
+ copy.inline_comment = self.inline_comment
+ return copy
def copy_properties(self, symbol_in):
'''Replace all properties in this object with the properties from
@@ -341,6 +344,8 @@ def copy_properties(self, symbol_in):
super().copy_properties(symbol_in)
self._is_constant = symbol_in.is_constant
self._initial_value = symbol_in.initial_value
+ self.preceding_comment = symbol_in.preceding_comment
+ self.inline_comment = symbol_in.inline_comment
def replace_symbols_using(self, table):
'''
diff --git a/src/psyclone/psyir/symbols/datatypes.py b/src/psyclone/psyir/symbols/datatypes.py
index ed9a635413..3121343544 100644
--- a/src/psyclone/psyir/symbols/datatypes.py
+++ b/src/psyclone/psyir/symbols/datatypes.py
@@ -45,6 +45,7 @@
from typing import Any, Union
from psyclone.errors import InternalError
+from psyclone.psyir.commentable_mixin import CommentableMixin
from psyclone.psyir.symbols.data_type_symbol import DataTypeSymbol
from psyclone.psyir.symbols.datasymbol import DataSymbol
from psyclone.psyir.symbols.symbol import Symbol
@@ -909,7 +910,7 @@ class StructureType(DataType):
'''
@dataclass(frozen=True)
- class ComponentType:
+ class ComponentType(CommentableMixin):
'''
Represents a member of a StructureType.
@@ -931,19 +932,34 @@ def __init__(self):
def __str__(self):
return "StructureType<>"
+ def __copy__(self):
+ '''
+ :returns: a copy of this StructureType.
+ :rtype: :py:class:`psyclone.psyir.symbols.StructureType`
+ '''
+ new = StructureType()
+ for name, component in self.components.items():
+ new.add(name, component.datatype, component.visibility,
+ component.initial_value, component.preceding_comment,
+ component.inline_comment)
+ return new
+
@staticmethod
def create(components):
'''
Creates a StructureType from the supplied list of properties.
:param components: the name, type, visibility (whether public or
- private) and initial value (if any) of each component.
+ private), initial value (if any), preceding comment (if any)
+ and inline comment (if any) of each component.
:type components: List[tuple[
str,
:py:class:`psyclone.psyir.symbols.DataType` |
:py:class:`psyclone.psyir.symbols.DataTypeSymbol`,
:py:class:`psyclone.psyir.symbols.Symbol.Visibility`,
- Optional[:py:class:`psyclone.psyir.symbols.DataNode`]
+ Optional[:py:class:`psyclone.psyir.symbols.DataNode`],
+ Optional[str],
+ Optional[str]
]]
:returns: the new type object.
@@ -952,10 +968,11 @@ def create(components):
'''
stype = StructureType()
for component in components:
- if len(component) != 4:
+ if len(component) not in (4, 5, 6):
raise TypeError(
- f"Each component must be specified using a 4-tuple of "
- f"(name, type, visibility, initial_value) but found a "
+ f"Each component must be specified using a 4 to 6-tuple "
+ f"of (name, type, visibility, initial_value, "
+ f"preceding_comment, inline_comment) but found a "
f"tuple with {len(component)} members: {component}")
stype.add(*component)
return stype
@@ -968,7 +985,8 @@ def components(self):
'''
return self._components
- def add(self, name, datatype, visibility, initial_value):
+ def add(self, name, datatype, visibility, initial_value,
+ preceding_comment="", inline_comment=""):
'''
Create a component with the supplied attributes and add it to
this StructureType.
@@ -982,6 +1000,11 @@ def add(self, name, datatype, visibility, initial_value):
:param initial_value: the initial value of the new component.
:type initial_value: Optional[
:py:class:`psyclone.psyir.nodes.DataNode`]
+ :param preceding_comment: a comment that precedes this component.
+ :type preceding_comment: Optional[str]
+ :param inline_comment: a comment that follows this component on the
+ same line.
+ :type inline_comment: Optional[str]
:raises TypeError: if any of the supplied values are of the wrong type.
@@ -1016,9 +1039,26 @@ def add(self, name, datatype, visibility, initial_value):
f"The initial value of a component of a StructureType must "
f"be None or an instance of 'DataNode', but got "
f"'{type(initial_value).__name__}'.")
-
- self._components[name] = self.ComponentType(
- name, datatype, visibility, initial_value)
+ if not isinstance(preceding_comment, str):
+ raise TypeError(
+ f"The preceding_comment of a component of a StructureType "
+ f"must be a 'str' but got "
+ f"'{type(preceding_comment).__name__}'")
+ if not isinstance(inline_comment, str):
+ raise TypeError(
+ f"The inline_comment of a component of a StructureType must "
+ f"be a 'str' but got "
+ f"'{type(inline_comment).__name__}'")
+
+ self._components[name] = self.ComponentType(name, datatype, visibility,
+ initial_value)
+ # Use object.__setattr__ due to the frozen nature of ComponentType
+ object.__setattr__(self._components[name],
+ "_preceding_comment",
+ preceding_comment)
+ object.__setattr__(self._components[name],
+ "_inline_comment",
+ inline_comment)
def lookup(self, name):
'''
diff --git a/src/psyclone/psyir/symbols/generic_interface_symbol.py b/src/psyclone/psyir/symbols/generic_interface_symbol.py
index 0e90e79505..abcf7589c9 100644
--- a/src/psyclone/psyir/symbols/generic_interface_symbol.py
+++ b/src/psyclone/psyir/symbols/generic_interface_symbol.py
@@ -46,7 +46,8 @@ class GenericInterfaceSymbol(RoutineSymbol):
different callable routines.
:param str name: name of the interface.
- :param routines: the routines that this interface provides access to.
+ :param routines: the routines that this interface provides access
+ to and whether or not each of them is a module procedure.
:type routines: list[tuple[
:py:class:`psyclone.psyir.symbols.RoutineSymbol`,
bool]]
@@ -69,9 +70,29 @@ class RoutineInfo:
def __init__(self, name, routines, **kwargs):
super().__init__(name, **kwargs)
- # Use the setter for 'routines' as it performs checking.
self._routines = []
- self.routines = routines
+ self._process_arguments(routines=routines,
+ **kwargs)
+
+ def _process_arguments(self, **kwargs):
+ ''' Process the arguments for the constructor and the specialise
+ methods. In this case the 'routines' argument.
+
+ :param kwargs: keyword arguments which can be:\n
+ :param routines: the routines that this interface provides access
+ to and whether or not each of them is a module procedure.
+ :type routines: list[tuple[
+ :py:class:`psyclone.psyir.symbols.RoutineSymbol`,
+ bool]]
+ '''
+
+ if "routines" in kwargs:
+ # Use the setter for 'routines' as it performs checking.
+ self.routines = kwargs.pop("routines")
+ else:
+ self._routines = []
+
+ super()._process_arguments(**kwargs)
@property
def routines(self):
diff --git a/src/psyclone/psyir/symbols/symbol.py b/src/psyclone/psyir/symbols/symbol.py
index a69529919d..241daaa599 100644
--- a/src/psyclone/psyir/symbols/symbol.py
+++ b/src/psyclone/psyir/symbols/symbol.py
@@ -45,6 +45,7 @@
AutomaticInterface, SymbolInterface, ArgumentInterface,
UnresolvedInterface, ImportInterface, UnknownInterface,
CommonBlockInterface, DefaultModuleInterface, StaticInterface)
+from psyclone.psyir.commentable_mixin import CommentableMixin
class SymbolError(PSycloneError):
@@ -59,7 +60,7 @@ def __init__(self, value):
self.value = "PSyclone SymbolTable error: "+str(value)
-class Symbol():
+class Symbol(CommentableMixin):
'''Generic Symbol item for the Symbol Table and PSyIR References.
It has an immutable name label because it must always match with the
key in the SymbolTable. If the symbol is private then it is only visible
@@ -146,8 +147,11 @@ def copy(self):
'''
# The constructors for all Symbol-based classes have 'name' as the
# first positional argument.
- return type(self)(self.name, visibility=self.visibility,
+ copy = type(self)(self.name, visibility=self.visibility,
interface=self.interface.copy())
+ copy.preceding_comment = self.preceding_comment
+ copy.inline_comment = self.inline_comment
+ return copy
def copy_properties(self, symbol_in):
'''Replace all properties in this object with the properties from
diff --git a/src/psyclone/psyir/symbols/typed_symbol.py b/src/psyclone/psyir/symbols/typed_symbol.py
index 89c0c9dd1d..45ed342af1 100644
--- a/src/psyclone/psyir/symbols/typed_symbol.py
+++ b/src/psyclone/psyir/symbols/typed_symbol.py
@@ -131,9 +131,12 @@ def copy(self):
'''
# The constructors for all Symbol-based classes have 'name' as the
# first positional argument.
- return type(self)(self.name, self.datatype.copy(),
+ copy = type(self)(self.name, self.datatype.copy(),
visibility=self.visibility,
interface=self.interface.copy())
+ copy.preceding_comment = self.preceding_comment
+ copy.inline_comment = self.inline_comment
+ return copy
def copy_properties(self, symbol_in):
'''Replace all properties in this object with the properties from
diff --git a/src/psyclone/tests/domain/lfric/lfric_extract_driver_creator_test.py b/src/psyclone/tests/domain/lfric/lfric_extract_driver_creator_test.py
index ec84c5e4ea..e0749b812d 100644
--- a/src/psyclone/tests/domain/lfric/lfric_extract_driver_creator_test.py
+++ b/src/psyclone/tests/domain/lfric/lfric_extract_driver_creator_test.py
@@ -69,7 +69,8 @@ def init_module_manager():
# to read extracted data from a file) relative to the infrastructure path:
psyclone_root = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(os.path.dirname(infrastructure_path)))))
- read_mod_path = os.path.join(psyclone_root, "lib", "extract", "standalone")
+ read_mod_path = os.path.join(psyclone_root, "lib", "extract",
+ "standalone", "lfric")
# Enforce loading of the default ModuleManager
ModuleManager._instance = None
diff --git a/src/psyclone/tests/generator_test.py b/src/psyclone/tests/generator_test.py
index cb9da93c8b..2cfd39bcad 100644
--- a/src/psyclone/tests/generator_test.py
+++ b/src/psyclone/tests/generator_test.py
@@ -911,6 +911,70 @@ def trans(psyir):
in output)
+@pytest.mark.parametrize(
+ "idx, value, output",
+ [("0", "False", "result = a + b"),
+ ("1", "True", "result = 1 + 1"),
+ ("2", "[\"module1\"]", "result = 1 + b"),
+ ("3", "[\"module2\"]", "result = a + 1"),
+ # Now change both with case insensitive names
+ ("4", "[\"mOdule1\",\"moduLe2\"]", "result = 1 + 1")])
+def test_code_transformation_resolve_imports(tmpdir, capsys, monkeypatch,
+ idx, value, output):
+ ''' Test that applying recipes in the code-transformation mode follows the
+ selected list of module names when generating the tree. '''
+
+ module1 = '''
+ module module1
+ integer :: a
+ end module module1
+ '''
+ module2 = '''
+ module module2
+ integer :: b
+ end module module2
+ '''
+ code = '''
+ module test
+ use module1
+ use module2
+ real :: result
+ contains
+ subroutine mytest()
+ result = a + b
+ end subroutine mytest
+ end module test
+ '''
+ recipe = f'''
+from psyclone.psyir.nodes import Reference, Literal
+from psyclone.psyir.symbols import INTEGER_TYPE
+
+RESOLVE_IMPORTS = {value}
+
+def trans(psyir):
+ # Replace all integer references with literal '1', it can only be done if
+ # we have the type of the symbol (resolved from the module).
+ for ref in psyir.walk(Reference):
+ if ref.datatype == INTEGER_TYPE:
+ ref.replace_with(Literal("1", INTEGER_TYPE))
+ '''
+ recipe_name = f"replace_integers_{idx}.py"
+ for filename, content in [("module1.f90", module1),
+ ("module2.f90", module2),
+ ("code.f90", code),
+ (recipe_name, recipe)]:
+ with open(tmpdir.join(filename), "w", encoding='utf-8') as my_file:
+ my_file.write(content)
+
+ # Execute the recipe (no -I needed as we have everything at the same place)
+ monkeypatch.chdir(tmpdir)
+ main(["code.f90", "-s", recipe_name])
+ captured = capsys.readouterr()
+
+ # Compare the generated output to the parametrised expected output
+ assert output in str(captured), str(captured)
+
+
def test_code_transformation_trans(tmpdir):
''' Test that applying recipes that have a trans, and are not listed
in the FILES_TO_SKIP, executes the recipe transformations. '''
diff --git a/src/psyclone/tests/psyir/backend/fortran_gen_decls_test.py b/src/psyclone/tests/psyir/backend/fortran_gen_decls_test.py
index 3961dd42ba..53f3b9f45d 100644
--- a/src/psyclone/tests/psyir/backend/fortran_gen_decls_test.py
+++ b/src/psyclone/tests/psyir/backend/fortran_gen_decls_test.py
@@ -298,6 +298,33 @@ def test_gen_decls_static_variables(fortran_writer):
assert "parameter :: v1 = 1" in fortran_writer.gen_vardecl(sym)
+def test_gen_decls_comments(fortran_writer):
+ '''Test that the gen_vardecl method adds comments to the Fortran code
+ when the symbol has a description.
+
+ '''
+ sym = DataSymbol("v1", datatype=INTEGER_TYPE,
+ initial_value=Literal("1", INTEGER_TYPE),
+ is_constant=True)
+ sym.preceding_comment = "Preceding comment"
+ sym.inline_comment = "Inline comment"
+ result = fortran_writer.gen_vardecl(sym)
+ expected = ("! Preceding comment\n"
+ "integer, parameter :: v1 = 1 ! Inline comment")
+ assert expected in result
+
+ sym2 = DataSymbol("v2", datatype=INTEGER_TYPE,
+ initial_value=Literal("2", INTEGER_TYPE),
+ is_constant=True)
+ sym2.preceding_comment = "Preceding comment\nwith newline"
+ sym2.inline_comment = "Inline comment"
+ result = fortran_writer.gen_vardecl(sym2)
+ expected = ("! Preceding comment\n"
+ "! with newline\n"
+ "integer, parameter :: v2 = 2 ! Inline comment")
+ assert expected in result
+
+
@pytest.mark.parametrize("visibility", ["public", "private"])
def test_visibility_abstract_interface(fortran_reader, fortran_writer,
visibility):
diff --git a/src/psyclone/tests/psyir/backend/fortran_unsupported_declns_test.py b/src/psyclone/tests/psyir/backend/fortran_unsupported_declns_test.py
index 2f88e8266a..4604382b6c 100644
--- a/src/psyclone/tests/psyir/backend/fortran_unsupported_declns_test.py
+++ b/src/psyclone/tests/psyir/backend/fortran_unsupported_declns_test.py
@@ -224,6 +224,22 @@ def test_fw_add_accessibility():
"end type var")
+def test_fw_preceding_and_inline_comment(fortran_writer):
+ '''Test that comments are correctly added to the generated code'''
+ symbol = DataSymbol("var", UnsupportedFortranType("integer :: var"))
+ symbol.preceding_comment = "This is a preceding comment"
+ symbol.inline_comment = "This is an inline comment"
+ expected = ("! This is a preceding comment\n"
+ "integer :: var ! This is an inline comment")
+ assert expected in fortran_writer.gen_vardecl(symbol)
+
+ # include_visibility=True case
+ expected = ("! This is a preceding comment\n"
+ "integer, public :: var ! This is an inline comment")
+ assert expected in fortran_writer.gen_vardecl(symbol,
+ include_visibility=True)
+
+
def test_generating_unsupportedtype_routine_imports(
fortran_reader, tmpdir, monkeypatch, fortran_writer):
''' Tests that generating UnsupportedType imported RoutineSymbols (if their
diff --git a/src/psyclone/tests/psyir/nodes/commentable_mixin_test.py b/src/psyclone/tests/psyir/commentable_mixin_test.py
similarity index 94%
rename from src/psyclone/tests/psyir/nodes/commentable_mixin_test.py
rename to src/psyclone/tests/psyir/commentable_mixin_test.py
index 1fdf0fe2cd..69b192a6e1 100644
--- a/src/psyclone/tests/psyir/nodes/commentable_mixin_test.py
+++ b/src/psyclone/tests/psyir/commentable_mixin_test.py
@@ -68,6 +68,13 @@ def test_statement_comment_properties():
assert "The inline_comment must be a string but found 'int'." \
in str(err.value)
+ # Check that inline_comment cannot contain '\n'
+ with pytest.raises(ValueError) as err:
+ statement.inline_comment = "My inline\ncomment"
+ assert ("The inline_comment must be a single line but "
+ "found a newline character in 'My inline\ncomment'."
+ in str(err.value))
+
# Check the append_preceding_comment method
statement._preceding_comment = None # Uninitialised preceding_comment
with pytest.raises(TypeError) as err:
diff --git a/src/psyclone/tests/psyir/frontend/fortran_test.py b/src/psyclone/tests/psyir/frontend/fortran_test.py
index 2ffe3b983d..9c81ae7905 100644
--- a/src/psyclone/tests/psyir/frontend/fortran_test.py
+++ b/src/psyclone/tests/psyir/frontend/fortran_test.py
@@ -43,7 +43,8 @@
from psyclone.psyir.frontend.fparser2 import Fparser2Reader
from psyclone.psyir.nodes import (
Routine, FileContainer, UnaryOperation, BinaryOperation, Literal,
- Assignment, CodeBlock, IntrinsicCall)
+ Assignment, CodeBlock, IntrinsicCall, Loop)
+from psyclone.psyir.commentable_mixin import CommentableMixin
from psyclone.psyir.symbols import (
SymbolTable, DataSymbol, ScalarType, UnresolvedType)
@@ -69,6 +70,20 @@
end
'''
+CODE_WITH_COMMENTS_AND_DIRECTIVES = '''
+subroutine my_sub
+ integer :: a, b
+
+ ! Comment on do loop
+ !$omp parallel do
+ do a = 1, 10
+ ! Comment on assignment
+ b = a
+ end do
+ !$omp end parallel do
+end subroutine my_sub
+'''
+
def test_fortran_reader_constructor():
''' Test that the constructor initialises the _parser and _processor
@@ -97,9 +112,8 @@ def test_fortran_psyir_from_source_fixed_form():
Test we parse also fixed-form fortran code when enabling the right
option.
'''
- fortran_reader = FortranReader()
- file_container = fortran_reader.psyir_from_source(FIXED_FORM_CODE,
- free_form=False)
+ fortran_reader = FortranReader(free_form=False)
+ file_container = fortran_reader.psyir_from_source(FIXED_FORM_CODE)
assert isinstance(file_container, FileContainer)
subroutine = file_container.children[0]
assert isinstance(subroutine, Routine)
@@ -224,3 +238,40 @@ def test_fortran_psyir_from_file(fortran_reader, tmpdir_factory):
with pytest.raises(IOError) as err:
fortran_reader.psyir_from_file(filename)
assert "No such file or directory: '" + str(filename) in str(err.value)
+
+ # Check that directives and comments are ignored by default
+ filename = str(tmpdir_factory.mktemp('frontend_test').join("comments.f90"))
+ with open(filename, "w", encoding='utf-8') as wfile:
+ wfile.write(CODE_WITH_COMMENTS_AND_DIRECTIVES)
+ file_container = fortran_reader.psyir_from_file(filename)
+ assert isinstance(file_container, FileContainer)
+ for node in file_container.walk(CommentableMixin):
+ assert node.preceding_comment == ""
+ assert node.inline_comment == ""
+
+ # Check that comments can be preserved, and that directives are still
+ # ignored by default
+ fortran_reader = FortranReader(ignore_comments=False)
+ file_container = fortran_reader.psyir_from_file(filename)
+ assert isinstance(file_container, FileContainer)
+ for node in file_container.walk(CommentableMixin):
+ if isinstance(node, Loop):
+ assert node.preceding_comment == "Comment on do loop"
+ elif isinstance(node, Assignment):
+ assert node.preceding_comment == "Comment on assignment"
+ else:
+ assert node.preceding_comment == ""
+
+ # Check that directives can be preserved
+ fortran_reader = FortranReader(ignore_comments=False,
+ ignore_directives=False)
+ file_container = fortran_reader.psyir_from_file(filename)
+ assert isinstance(file_container, FileContainer)
+ for node in file_container.walk(CommentableMixin):
+ if isinstance(node, Loop):
+ assert node.preceding_comment == ("Comment on do loop\n"
+ "$omp parallel do")
+ elif isinstance(node, Assignment):
+ assert node.preceding_comment == "Comment on assignment"
+ else:
+ assert node.preceding_comment == ""
diff --git a/src/psyclone/tests/psyir/frontend/fparser2_comment_test.py b/src/psyclone/tests/psyir/frontend/fparser2_comment_test.py
new file mode 100644
index 0000000000..48fa5b771b
--- /dev/null
+++ b/src/psyclone/tests/psyir/frontend/fparser2_comment_test.py
@@ -0,0 +1,583 @@
+# -----------------------------------------------------------------------------
+# BSD 3-Clause License
+#
+# Copyright (c) 2021-2024, Science and Technology Facilities Council.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+# -----------------------------------------------------------------------------
+# Author Julien Remy, Université Grenoble Alpes & Inria
+
+"""Performs pytest tests on the support for comments in the fparser2
+PSyIR front-end"""
+
+import pytest
+
+from fparser.two import Fortran2003
+
+from psyclone.psyir.frontend.fortran import FortranReader
+from psyclone.psyir.nodes import (
+ Container,
+ Routine,
+ Assignment,
+ Loop,
+ IfBlock,
+ Call,
+ CodeBlock,
+)
+from psyclone.psyir.commentable_mixin import CommentableMixin
+from psyclone.psyir.symbols import DataTypeSymbol, StructureType
+
+from psyclone.psyir.backend.fortran import FortranWriter
+
+# Test code
+CODE = """
+! Comment on module 'test_mod'
+! and second line
+module test_mod
+ implicit none
+ ! Comment on derived type 'my_type'
+ type :: my_type
+ ! Comment on component 'i'
+ ! and second line
+ integer :: i ! Inline comment on 'integer :: i'
+ ! Comment on component 'j'
+ integer :: j
+ end type my_type ! Inline comment on 'end type my_type'
+ ! Comment on derived type 'my_type2'
+ type :: my_type2
+ end type my_type2 ! Inline comment on 'end type my_type2'
+contains
+ ! Comment on a subroutine
+ subroutine test_sub()
+ ! Comment on variable 'a'
+ ! and second line
+ integer :: a
+ ! Comment on variable 'i'
+ integer :: i
+ ! Comment on variable 'j'
+ integer :: j
+ ! Comment on assignment 'a = 1'
+ ! and second line
+ a = 1
+ ! Comment on call 'call test_sub()'
+ call test_sub()
+ ! Comment on if block 'if (a == 1) then'
+ if (a == 1) then
+ ! Comment on assignment 'a = 2'
+ a = 2
+ ! Comment on elseif block 'elseif (a == 2) then' => CodeBlock
+ elseif (a == 2) then
+ ! Comment on assignment 'a = 3'
+ a = 3
+ ! Comment on else block 'else' => CodeBlock
+ else
+ ! Comment on assignment 'a = 4'
+ a = 4
+ ! Comment on 'end if' => CodeBlock
+ end if ! Inline comment on 'end if'
+ ! Comment on loop 'do i = 1, 10'
+ do i = 1, 10
+ ! Comment on assignment 'a = 5'
+ a = 5
+ ! Comment on loop 'do j = 1, 10'
+ do j = 1, 10
+ ! Comment on assignment 'a = 6'
+ a = 6
+ ! Comment at end of loop on j => CodeBlock
+ end do ! Inline comment on 'end do j = 1, 10'
+ ! Comment at end of loop on i => CodeBlock
+ end do ! Inline comment on 'end do i = 1, 10'
+ ! Comment on 'do while (a < 10)'
+ do while (a < 10)
+ ! Comment on assignment 'a = 7'
+ a = 7 ! Inline comment on 'a = 7'
+ ! Comment at end of while loop => CodeBlock
+ end do ! Inline comment on 'end do while (a < 10)'
+ ! Comment at end of subroutine => CodeBlock
+ end subroutine test_sub ! Inline comment on 'end subroutine test_sub'
+! Comment at end of module => CodeBlock
+end module test_mod
+"""
+
+
+def test_no_comments():
+ """Test that the FortranReader is without comments by default"""
+ reader = FortranReader()
+ psyir = reader.psyir_from_source(CODE)
+
+ module = psyir.children[0]
+ assert isinstance(module, Container)
+ assert module.name == "test_mod"
+ assert module.preceding_comment == ""
+
+ my_type_sym = module.symbol_table.lookup("my_type")
+ assert isinstance(my_type_sym, DataTypeSymbol)
+ assert my_type_sym.preceding_comment == ""
+
+ assert isinstance(my_type_sym.datatype, StructureType)
+ for component in my_type_sym.datatype.components.values():
+ assert component.preceding_comment == ""
+
+ routine = module.walk(Routine)[0]
+ assert routine.name == "test_sub"
+ assert routine.preceding_comment == ""
+ for symbol in routine.symbol_table.symbols:
+ assert symbol.preceding_comment == ""
+ commentable_nodes = routine.walk(CommentableMixin)
+ assert len(commentable_nodes) != 0
+ for node in commentable_nodes:
+ assert node.preceding_comment == ""
+
+ assert len(routine.walk(CodeBlock)) == 0
+
+
+@pytest.mark.parametrize("last_comments_as_codeblocks", [True, False])
+def test_comments_and_codeblocks(last_comments_as_codeblocks):
+ """Test that the FortranReader is able to read comments"""
+ reader = FortranReader(
+ ignore_comments=False,
+ last_comments_as_codeblocks=last_comments_as_codeblocks)
+ psyir = reader.psyir_from_source(CODE)
+
+ module = psyir.children[0]
+ assert (
+ module.preceding_comment
+ == "Comment on module 'test_mod'\nand second line"
+ )
+ if last_comments_as_codeblocks:
+ assert isinstance(module.children[-1], CodeBlock)
+ assert isinstance(module.children[-1].ast, Fortran2003.Comment)
+ assert (module.children[-1].ast.tostr()
+ == "! Comment at end of module => CodeBlock")
+ else:
+ assert not isinstance(module.children[-1], CodeBlock)
+
+ my_type_sym = module.symbol_table.lookup("my_type")
+ assert my_type_sym.preceding_comment == "Comment on derived type 'my_type'"
+ assert my_type_sym.inline_comment == "Inline comment on 'end type my_type'"
+
+ assert isinstance(my_type_sym.datatype, StructureType)
+ for i, component in enumerate(my_type_sym.datatype.components.values()):
+ if i == 0:
+ assert (
+ component.preceding_comment
+ == "Comment on component 'i'\nand second line"
+ )
+ assert (
+ component.inline_comment == "Inline comment on 'integer :: i'"
+ )
+ else:
+ assert component.preceding_comment == "Comment on component 'j'"
+ assert component.inline_comment == ""
+
+ my_type2_sym = module.symbol_table.lookup("my_type2")
+ assert (
+ my_type2_sym.preceding_comment == "Comment on derived type 'my_type2'"
+ )
+ assert (
+ my_type2_sym.inline_comment == "Inline comment on 'end type my_type2'"
+ )
+
+ routine = module.walk(Routine)[0]
+ assert routine.preceding_comment == "Comment on a subroutine"
+ assert (
+ routine.inline_comment == "Inline comment on 'end subroutine test_sub'"
+ )
+ last_child = routine.children[-1]
+ if last_comments_as_codeblocks:
+ assert isinstance(last_child, CodeBlock)
+ assert isinstance(last_child.ast, Fortran2003.Comment)
+ assert (
+ last_child.ast.tostr()
+ == "! Comment at end of subroutine => CodeBlock"
+ )
+ else:
+ assert not isinstance(last_child, CodeBlock)
+
+ for i, symbol in enumerate(routine.symbol_table.symbols):
+ if i == 0:
+ assert (
+ symbol.preceding_comment
+ == "Comment on variable 'a'\nand second line"
+ )
+ else:
+ assert (
+ symbol.preceding_comment
+ == f"Comment on variable '{symbol.name}'"
+ )
+
+ for i, assignment in enumerate(routine.walk(Assignment)):
+ if i == 0:
+ assert (
+ assignment.preceding_comment
+ == "Comment on assignment 'a = 1'\nand second line"
+ )
+ else:
+ assert (
+ assignment.preceding_comment
+ == f"Comment on assignment 'a = {i+1}'"
+ )
+
+ call = routine.walk(Call)[0]
+ assert call.preceding_comment == "Comment on call 'call test_sub()'"
+
+ ifblock = routine.walk(IfBlock)[0]
+ assert (
+ ifblock.preceding_comment == "Comment on if block 'if (a == 1) then'"
+ )
+ last_child = ifblock.if_body.children[-1]
+ if last_comments_as_codeblocks:
+ assert isinstance(last_child, CodeBlock)
+ assert isinstance(last_child.ast, Fortran2003.Comment)
+ assert (
+ last_child.ast.tostr()
+ == "! Comment on elseif block 'elseif (a == 2) then' => CodeBlock"
+ )
+ else:
+ assert not isinstance(last_child, CodeBlock)
+ ifblock2 = ifblock.else_body.children[0]
+ last_child = ifblock2.if_body.children[-1]
+ if last_comments_as_codeblocks:
+ assert isinstance(last_child, CodeBlock)
+ assert isinstance(last_child.ast, Fortran2003.Comment)
+ assert (
+ last_child.ast.tostr()
+ == "! Comment on else block 'else' => CodeBlock"
+ )
+ else:
+ assert not isinstance(last_child, CodeBlock)
+ last_child = ifblock2.else_body.children[-1]
+ if last_comments_as_codeblocks:
+ assert isinstance(last_child, CodeBlock)
+ assert isinstance(last_child.ast, Fortran2003.Comment)
+ assert last_child.ast.tostr() == "! Comment on 'end if' => CodeBlock"
+ else:
+ assert not isinstance(last_child, CodeBlock)
+
+ loops = routine.walk(Loop)
+ loop_i = loops[0]
+ assert loop_i.variable.name == "i"
+ assert loop_i.preceding_comment == "Comment on loop 'do i = 1, 10'"
+ assert loop_i.inline_comment == "Inline comment on 'end do i = 1, 10'"
+ last_child = loop_i.loop_body.children[-1]
+ if last_comments_as_codeblocks:
+ assert isinstance(last_child, CodeBlock)
+ assert isinstance(last_child.ast, Fortran2003.Comment)
+ assert (
+ last_child.ast.tostr()
+ == "! Comment at end of loop on i => CodeBlock"
+ )
+ else:
+ assert not isinstance(last_child, CodeBlock)
+
+ loop_j = loops[1]
+ assert loop_j.variable.name == "j"
+ assert loop_j.preceding_comment == "Comment on loop 'do j = 1, 10'"
+ assert loop_j.inline_comment == "Inline comment on 'end do j = 1, 10'"
+ last_child = loop_j.loop_body.children[-1]
+ if last_comments_as_codeblocks:
+ assert isinstance(last_child, CodeBlock)
+ assert isinstance(last_child.ast, Fortran2003.Comment)
+ assert (
+ last_child.ast.tostr()
+ == "! Comment at end of loop on j => CodeBlock"
+ )
+ else:
+ assert not isinstance(last_child, CodeBlock)
+
+
+EXPECTED_WITH_COMMENTS_AND_CODEBLOCKS = """! Comment on module 'test_mod'
+! and second line
+module test_mod
+ implicit none
+ ! Comment on derived type 'my_type'
+ type, public :: my_type
+ ! Comment on component 'i'
+ ! and second line
+ integer, public :: i ! Inline comment on 'integer :: i'
+ ! Comment on component 'j'
+ integer, public :: j
+ end type my_type ! Inline comment on 'end type my_type'
+ ! Comment on derived type 'my_type2'
+ type, public :: my_type2
+ end type my_type2 ! Inline comment on 'end type my_type2'
+ public
+
+ contains
+ ! Comment on a subroutine
+ subroutine test_sub()
+ ! Comment on variable 'a'
+ ! and second line
+ integer :: a
+ ! Comment on variable 'i'
+ integer :: i
+ ! Comment on variable 'j'
+ integer :: j
+
+ ! Comment on assignment 'a = 1'
+ ! and second line
+ a = 1
+ ! Comment on call 'call test_sub()'
+ call test_sub()
+ ! Comment on if block 'if (a == 1) then'
+ if (a == 1) then
+ ! Comment on assignment 'a = 2'
+ a = 2
+ ! Comment on elseif block 'elseif (a == 2) then' => CodeBlock
+ else
+ if (a == 2) then
+ ! Comment on assignment 'a = 3'
+ a = 3
+ ! Comment on else block 'else' => CodeBlock
+ else
+ ! Comment on assignment 'a = 4'
+ a = 4
+ ! Comment on 'end if' => CodeBlock
+ end if
+ end if ! Inline comment on 'end if'
+ ! Comment on loop 'do i = 1, 10'
+ do i = 1, 10, 1
+ ! Comment on assignment 'a = 5'
+ a = 5
+ ! Comment on loop 'do j = 1, 10'
+ do j = 1, 10, 1
+ ! Comment on assignment 'a = 6'
+ a = 6
+ ! Comment at end of loop on j => CodeBlock
+ enddo ! Inline comment on 'end do j = 1, 10'
+ ! Comment at end of loop on i => CodeBlock
+ enddo ! Inline comment on 'end do i = 1, 10'
+ ! Comment on 'do while (a < 10)'
+ do while (a < 10)
+ ! Comment on assignment 'a = 7'
+ a = 7 ! Inline comment on 'a = 7'
+ ! Comment at end of while loop => CodeBlock
+ end do ! Inline comment on 'end do while (a < 10)'
+ ! Comment at end of subroutine => CodeBlock
+
+ end subroutine test_sub ! Inline comment on 'end subroutine test_sub'
+ ! Comment at end of module => CodeBlock
+
+end module test_mod
+"""
+
+EXPECTED_WITH_COMMENTS_AND_NO_CODEBLOCKS = """! Comment on module 'test_mod'
+! and second line
+module test_mod
+ implicit none
+ ! Comment on derived type 'my_type'
+ type, public :: my_type
+ ! Comment on component 'i'
+ ! and second line
+ integer, public :: i ! Inline comment on 'integer :: i'
+ ! Comment on component 'j'
+ integer, public :: j
+ end type my_type ! Inline comment on 'end type my_type'
+ ! Comment on derived type 'my_type2'
+ type, public :: my_type2
+ end type my_type2 ! Inline comment on 'end type my_type2'
+ public
+
+ contains
+ ! Comment on a subroutine
+ subroutine test_sub()
+ ! Comment on variable 'a'
+ ! and second line
+ integer :: a
+ ! Comment on variable 'i'
+ integer :: i
+ ! Comment on variable 'j'
+ integer :: j
+
+ ! Comment on assignment 'a = 1'
+ ! and second line
+ a = 1
+ ! Comment on call 'call test_sub()'
+ call test_sub()
+ ! Comment on if block 'if (a == 1) then'
+ if (a == 1) then
+ ! Comment on assignment 'a = 2'
+ a = 2
+ else
+ if (a == 2) then
+ ! Comment on assignment 'a = 3'
+ a = 3
+ else
+ ! Comment on assignment 'a = 4'
+ a = 4
+ end if
+ end if ! Inline comment on 'end if'
+ ! Comment on loop 'do i = 1, 10'
+ do i = 1, 10, 1
+ ! Comment on assignment 'a = 5'
+ a = 5
+ ! Comment on loop 'do j = 1, 10'
+ do j = 1, 10, 1
+ ! Comment on assignment 'a = 6'
+ a = 6
+ enddo ! Inline comment on 'end do j = 1, 10'
+ enddo ! Inline comment on 'end do i = 1, 10'
+ ! Comment on 'do while (a < 10)'
+ do while (a < 10)
+ ! Comment on assignment 'a = 7'
+ a = 7 ! Inline comment on 'a = 7'
+ end do ! Inline comment on 'end do while (a < 10)'
+
+ end subroutine test_sub ! Inline comment on 'end subroutine test_sub'
+
+end module test_mod
+"""
+
+
+@pytest.mark.parametrize("last_comments_as_codeblocks", [True, False])
+def test_write_comments(last_comments_as_codeblocks):
+ """Test that the comments are written back to the code"""
+ reader = FortranReader(
+ ignore_comments=False,
+ last_comments_as_codeblocks=last_comments_as_codeblocks
+ )
+ writer = FortranWriter()
+ psyir = reader.psyir_from_source(CODE)
+ generated_code = writer(psyir)
+ if last_comments_as_codeblocks:
+ assert generated_code == EXPECTED_WITH_COMMENTS_AND_CODEBLOCKS
+ else:
+ assert generated_code == EXPECTED_WITH_COMMENTS_AND_NO_CODEBLOCKS
+
+
+CODE_WITH_DIRECTIVE = """
+subroutine test_sub()
+ integer :: a
+ integer :: i
+ ! Comment on loop 'do i = 1, 10'
+ !$omp parallel do
+ do i = 1, 10
+ a = 1
+ end do
+end subroutine test_sub
+"""
+
+
+def test_no_directives():
+ """Test that the FortranReader is without directives by default"""
+ reader = FortranReader(ignore_comments=False)
+ psyir = reader.psyir_from_source(CODE_WITH_DIRECTIVE)
+
+ loop = psyir.walk(Loop)[0]
+ assert loop.preceding_comment == "Comment on loop 'do i = 1, 10'"
+
+
+def test_directives():
+ """Test that the FortranReader is able to read directives"""
+ reader = FortranReader(ignore_comments=False, ignore_directives=False)
+ psyir = reader.psyir_from_source(CODE_WITH_DIRECTIVE)
+
+ loop = psyir.walk(Loop)[0]
+ assert (
+ loop.preceding_comment
+ == "Comment on loop 'do i = 1, 10'\n$omp parallel do"
+ )
+
+
+EXPECTED_WITH_DIRECTIVES = """subroutine test_sub()
+ integer :: a
+ integer :: i
+
+ ! Comment on loop 'do i = 1, 10'
+ !$omp parallel do
+ do i = 1, 10, 1
+ a = 1
+ enddo
+
+end subroutine test_sub
+"""
+
+
+@pytest.mark.xfail(
+ reason="Directive is written back as '! $omp parallel do'"
+ "instead of '!$omp parallel do'"
+)
+def test_write_directives():
+ """Test that the directives are written back to the code"""
+ reader = FortranReader(ignore_comments=False, ignore_directives=False)
+ writer = FortranWriter()
+ psyir = reader.psyir_from_source(CODE_WITH_DIRECTIVE)
+ generated_code = writer(psyir)
+ assert generated_code == EXPECTED_WITH_DIRECTIVES
+
+
+CODE_WITH_INLINE_COMMENT = """
+subroutine test_sub()
+ integer :: a ! Inline comment on 'integer :: a'
+ ! Preceding comment on 'i = 1'
+ integer :: i ! Inline comment on 'integer :: i'
+ ! Preceding comment on 'a = 1'
+ a = 1 ! Inline comment on 'a = 1'
+ ! Preceding comment on 'i = 1'
+ i = 1 ! Inline comment on 'i = 1'
+
+ a = & ! First line of inline comment
+ i & ! Second line of inline comment
+ + 1 ! Third line of inline comment
+end subroutine test_sub
+"""
+
+
+def test_inline_comment():
+ """Test that the FortranReader is able to read inline comments"""
+ reader = FortranReader(ignore_comments=False)
+ psyir = reader.psyir_from_source(CODE_WITH_INLINE_COMMENT)
+
+ routine = psyir.walk(Routine)[0]
+ sym_a = routine.symbol_table.lookup("a")
+ assert sym_a.preceding_comment == ""
+ assert sym_a.inline_comment == "Inline comment on 'integer :: a'"
+ sym_i = routine.symbol_table.lookup("i")
+ assert sym_i.preceding_comment == "Preceding comment on 'i = 1'"
+ assert sym_i.inline_comment == "Inline comment on 'integer :: i'"
+
+ assignment = routine.walk(Assignment)[0]
+ assert "a = 1" in assignment.debug_string()
+ assert assignment.preceding_comment == "Preceding comment on 'a = 1'"
+ assert assignment.inline_comment == "Inline comment on 'a = 1'"
+
+ assignment = routine.walk(Assignment)[1]
+ assert "i = 1" in assignment.debug_string()
+ assert assignment.preceding_comment == "Preceding comment on 'i = 1'"
+ assert assignment.inline_comment == "Inline comment on 'i = 1'"
+
+ # When processing
+ # a = & ! First line of inline comment
+ # i & ! Second line of inline comment
+ # + 1 ! Third line of inline comment
+ # only the third comment is kept as inline comment
+ assignment = routine.walk(Assignment)[2]
+ assert "a = i + 1" in assignment.debug_string()
+ assert assignment.preceding_comment == ""
+ assert assignment.inline_comment == "Third line of inline comment"
diff --git a/src/psyclone/tests/psyir/frontend/fparser2_derived_type_test.py b/src/psyclone/tests/psyir/frontend/fparser2_derived_type_test.py
index 2ad3382fb5..54dbe391e0 100644
--- a/src/psyclone/tests/psyir/frontend/fparser2_derived_type_test.py
+++ b/src/psyclone/tests/psyir/frontend/fparser2_derived_type_test.py
@@ -48,7 +48,7 @@
from psyclone.psyir.nodes import (
KernelSchedule, CodeBlock, Assignment, ArrayOfStructuresReference,
StructureReference, Member, StructureMember, ArrayOfStructuresMember,
- ArrayMember, Literal, Reference, Range, IntrinsicCall)
+ ArrayMember, Literal, Reference, Range, IntrinsicCall, Container)
from psyclone.psyir.symbols import (
SymbolError, UnresolvedType, StructureType, DataTypeSymbol, ScalarType,
RoutineSymbol, Symbol, ArrayType, UnsupportedFortranType, DataSymbol,
@@ -185,7 +185,10 @@ def test_name_clash_derived_type_def(f2008_parser):
# This should raise an error because the Container symbol table will
# already contain a RoutineSymbol named 'my_type'
with pytest.raises(SymbolError) as err:
- processor.process_declarations(fake_parent, fparser2spec.content, [])
+ processor.process_declarations(fake_parent,
+ walk(fparser2spec.content,
+ Fortran2003.Derived_Type_Def),
+ [])
assert ("Error processing definition of derived type 'my_type'. The "
"symbol table already contains an entry with this name but it is a"
" 'RoutineSymbol' when it should be a 'DataTypeSymbol' (for the "
@@ -200,7 +203,10 @@ def test_name_clash_derived_type_def(f2008_parser):
" end type my_type2\n"
"end subroutine my_sub2\n"))
with pytest.raises(SymbolError) as err:
- processor.process_declarations(fake_parent, fparser2spec.content, [])
+ processor.process_declarations(fake_parent,
+ walk(fparser2spec.content,
+ Fortran2003.Derived_Type_Def),
+ [])
assert ("Error processing definition of derived type 'my_type2'. The "
"symbol table already contains a DataTypeSymbol with this name but"
" it is of type 'UnsupportedFortranType' when it should be of "
@@ -233,6 +239,42 @@ def test_existing_symbol_derived_type_def(f2008_parser):
assert isinstance(typ.interface, ImportInterface)
+def test_preceding_comments(f2008_parser):
+ ''' Check that the frontend correctly handles comments that precede
+ a derived type definition. '''
+ fake_parent = Container("dummy_container")
+ processor = Fparser2Reader()
+ fparser2spec = f2008_parser(
+ FortranStringReader("subroutine my_sub\n"
+ "! This is a comment\n"
+ "! This is another comment\n"
+ "type :: my_type\n"
+ " integer :: flag\n"
+ "end type my_type ! Inline comment\n"
+ "end subroutine my_sub\n",
+ ignore_comments=False))
+ sub_decl = walk(fparser2spec, types=Fortran2003.Subroutine_Subprogram)
+ sub = processor._subroutine_handler(sub_decl[0], fake_parent)
+ typ = sub.symbol_table.lookup("my_type")
+ assert typ.preceding_comment == ("This is a comment\n"
+ "This is another comment")
+ assert typ.inline_comment == "Inline comment"
+
+ fake_parent = Container("dummy_container")
+ processor = Fparser2Reader()
+ fparser2spec = f2008_parser(FortranStringReader("subroutine my_sub\n"
+ "type :: my_type\n"
+ " integer :: flag\n"
+ "end type my_type\n"
+ "end subroutine my_sub\n",
+ ignore_comments=False))
+ sub_decl = walk(fparser2spec, types=Fortran2003.Subroutine_Subprogram)
+ sub = processor._subroutine_handler(sub_decl[0], fake_parent)
+ typ = sub.symbol_table.lookup("my_type")
+ assert typ.preceding_comment == ""
+ assert typ.inline_comment == ""
+
+
@pytest.mark.usefixtures("f2008_parser")
@pytest.mark.parametrize("use_stmt", ["use grid_mod, only: grid_type",
"use grid_mod, only: GRID_TYPE",
diff --git a/src/psyclone/tests/psyir/frontend/fparser2_test.py b/src/psyclone/tests/psyir/frontend/fparser2_test.py
index f102dd7083..a58b01537a 100644
--- a/src/psyclone/tests/psyir/frontend/fparser2_test.py
+++ b/src/psyclone/tests/psyir/frontend/fparser2_test.py
@@ -49,6 +49,7 @@
Type_Declaration_Stmt)
from fparser.two.utils import walk
+from psyclone.configuration import Config
from psyclone.errors import InternalError, GenerationError
from psyclone.psyir.frontend.fparser2 import (
Fparser2Reader, default_precision, default_integer_type,
@@ -67,8 +68,39 @@
# pylint: disable=too-many-statements
+
# Tests
+
+def test_constructor():
+ ''' Test the constructor and its arguments '''
+ processor = Fparser2Reader()
+
+ # By default it will not resolve external modules
+ assert processor._resolve_all_modules is False
+ assert processor._modules_to_resolve == []
+
+ # But it can be set to true or a list of module names
+ processor = Fparser2Reader(resolve_modules=True)
+ assert processor._resolve_all_modules is True
+ assert processor._modules_to_resolve == []
+
+ processor = Fparser2Reader(resolve_modules=['module1'])
+ assert processor._resolve_all_modules is False
+ assert "module1" in processor._modules_to_resolve
+
+ # Anything else is invalid
+ with pytest.raises(TypeError) as err:
+ processor = Fparser2Reader(resolve_modules=[123])
+ assert ("The 'resolve_modules' argument must be a boolean or an "
+ "Iterable[str] but found '[123]'." in str(err.value))
+
+ with pytest.raises(TypeError) as err:
+ processor = Fparser2Reader(resolve_modules=456)
+ assert ("The 'resolve_modules' argument must be a boolean or an "
+ "Iterable[str] but found '456'." in str(err.value))
+
+
FAKE_KERNEL_METADATA = '''
module dummy_mod
use argument_mod
@@ -1622,6 +1654,97 @@ def test_process_use_stmts_with_accessibility_statements(parser):
assert symtab.lookup("some_var").visibility == Symbol.Visibility.PUBLIC
+@pytest.mark.parametrize("value",
+ [True, # All enabled
+ ["other1", "other2"], # Precise name enabled
+ False]) # Disabled
+def test_process_use_stmts_resolving_external_imports(
+ parser, tmpdir, monkeypatch, value):
+ ''' Test that if the Fparser2Reader if provided with a list of
+ modules_to_import this are used to resolve external symbol information
+ by the frontend.'''
+
+ # Write a first module into a tmp file
+ other1 = str(tmpdir.join("other1.f90"))
+ with open(other1, "w", encoding='utf-8') as my_file:
+ my_file.write('''
+ module other1
+ integer, parameter :: N = 10
+ integer, dimension(N) :: unused_array
+ integer, dimension(N), private :: private_array
+ contains
+ function a_func(i)
+ integer :: a_func
+ integer, intent(in) :: i
+ a_func = 3
+ end function
+ end module other1
+ ''')
+
+ # Write a second module to a tmp file
+ other2 = str(tmpdir.join("other2.F90"))
+ with open(other2, "w", encoding='utf-8') as my_file:
+ my_file.write('''
+ module other2
+ integer, dimension(10) :: an_array
+ integer, dimension(10) :: other_array
+ end module other2
+ ''')
+
+ # Add the path to the include_path and set up a frontend instance
+ # witth the module_to_resolve names
+ monkeypatch.setattr(Config.get(), '_include_paths', [tmpdir])
+ processor = Fparser2Reader(resolve_modules=value)
+ reader = FortranStringReader('''
+ module test
+ use other1
+ contains
+ subroutine test_function()
+ use other2, only : an_array
+ integer :: a
+ a = an_array(1) + a_func(2)
+ end subroutine
+ end module
+ ''')
+ parse_tree = parser(reader)
+ module = parse_tree.children[0]
+ psyir = processor._module_handler(module, None)
+
+ symtab = psyir.symbol_table
+
+ if value is False:
+ # If value is false, the symbol information is not resolved, e.g.
+ # unused public symbols will not be present
+ assert "unused_array" not in symtab
+ return # The rest of the asserts require this information
+
+ # The container, and all its public symbols are now in the table with
+ # the right symbol kind and datatype
+ assert isinstance(symtab.lookup("other1"), ContainerSymbol)
+ assert isinstance(symtab.lookup("a_func"), RoutineSymbol)
+ assert isinstance(symtab.lookup("unused_array"), DataSymbol)
+ assert symtab.lookup("n").datatype == INTEGER_TYPE
+ # But not the private symbols
+ assert "private_array" not in symtab
+
+ routine = psyir.children[0]
+ innersymtab = routine.symbol_table
+ # The container, and all its 'only'-listed symbols are now in the
+ # routine symbol table
+ assert isinstance(innersymtab.lookup("other2"), ContainerSymbol)
+ assert isinstance(innersymtab.lookup("an_array"), DataSymbol)
+ assert isinstance(innersymtab.lookup("an_array").datatype, ArrayType)
+ # But not the other public symbols, nor in the container symbol table.
+ assert "other_array" not in innersymtab
+ assert "an_array" not in symtab
+
+ # The provided info allows the reader to differentiate between function
+ # calls and Array accesses :)
+ stmt_rhs = routine[0].rhs
+ assert isinstance(stmt_rhs.children[0], Reference)
+ assert isinstance(stmt_rhs.children[1], Call)
+
+
def test_intrinsic_use_stmt(parser):
''' Tests that intrinsic value is set correctly for an intrinsic module
use statement.'''
diff --git a/src/psyclone/tests/psyir/symbols/datatype_test.py b/src/psyclone/tests/psyir/symbols/datatype_test.py
index 32b9ebe76b..2bba4e0521 100644
--- a/src/psyclone/tests/psyir/symbols/datatype_test.py
+++ b/src/psyclone/tests/psyir/symbols/datatype_test.py
@@ -920,6 +920,17 @@ def test_structure_type():
assert ("The initial value of a component of a StructureType must be "
"None or an instance of 'DataNode', but got 'str'."
in str(err.value))
+ with pytest.raises(TypeError) as err:
+ stype.add("hello", INTEGER_TYPE, Symbol.Visibility.PUBLIC, None,
+ preceding_comment=None)
+ assert ("The preceding_comment of a component of a StructureType "
+ "must be a 'str' but got 'NoneType'" in str(err.value))
+ with pytest.raises(TypeError) as err:
+ stype.add("hello", INTEGER_TYPE, Symbol.Visibility.PUBLIC, None,
+ inline_comment=None)
+ assert ("The inline_comment of a component of a StructureType "
+ "must be a 'str' but got 'NoneType'" in str(err.value))
+
with pytest.raises(KeyError):
stype.lookup("missing")
# Cannot have a recursive type definition
@@ -954,9 +965,10 @@ def test_create_structuretype():
StructureType.create([
("fred", INTEGER_TYPE, Symbol.Visibility.PUBLIC, None),
("george", Symbol.Visibility.PRIVATE)])
- assert ("Each component must be specified using a 4-tuple of (name, "
- "type, visibility, initial_value) but found a tuple with 2 "
- "members: ('george', " in str(err.value))
+ assert ("Each component must be specified using a 4 to 6-tuple of (name, "
+ "type, visibility, initial_value, preceding_comment, "
+ "inline_comment) but found a tuple with 2 members: ('george', "
+ in str(err.value))
def test_structuretype_eq():
@@ -1017,3 +1029,50 @@ def test_structuretype_replace_symbols():
table.add(newtsymbol)
stype.replace_symbols_using(table)
assert stype.components["barry"].datatype is newtsymbol
+
+
+def test_structuretype_componenttype_eq():
+ '''Test that the equality operator of StructureType.ComponentType does
+ not take the preceding_comment and inline_comment into account.
+ '''
+ comp1 = StructureType.ComponentType("fred", INTEGER_TYPE,
+ Symbol.Visibility.PUBLIC, None)
+ comp2 = StructureType.ComponentType("fred", INTEGER_TYPE,
+ Symbol.Visibility.PUBLIC, None)
+ assert comp1 == comp2
+
+ comp1 = StructureType.ComponentType("fred", INTEGER_TYPE,
+ Symbol.Visibility.PUBLIC, None)
+ object.__setattr__(comp1, "_preceding_comment", "A comment")
+ comp2 = StructureType.ComponentType("fred", INTEGER_TYPE,
+ Symbol.Visibility.PUBLIC, None)
+ object.__setattr__(comp2, "_preceding_comment", "Another comment")
+ assert comp1 == comp2
+
+ comp1 = StructureType.ComponentType("fred", INTEGER_TYPE,
+ Symbol.Visibility.PUBLIC, None)
+ object.__setattr__(comp1, "_inline_comment", "A comment")
+ comp2 = StructureType.ComponentType("fred", INTEGER_TYPE,
+ Symbol.Visibility.PUBLIC, None)
+ object.__setattr__(comp2, "_inline_comment", "Another comment")
+ assert comp1 == comp2
+
+ comp1 = StructureType.ComponentType("fred", INTEGER_TYPE,
+ Symbol.Visibility.PUBLIC, None)
+ comp2 = StructureType.ComponentType("george", INTEGER_TYPE,
+ Symbol.Visibility.PUBLIC, None)
+ assert comp1 != comp2
+
+
+def test_structuretype___copy__():
+ '''Test the __copy__ method of StructureType.'''
+ stype = StructureType.create([
+ ("nancy", INTEGER_TYPE, Symbol.Visibility.PUBLIC, None),
+ ("peggy", REAL_TYPE, Symbol.Visibility.PRIVATE,
+ Literal("1.0", REAL_TYPE))])
+ copied = stype.__copy__()
+ assert copied == stype
+ assert copied is not stype
+ # The components should be the same objects
+ assert copied.components["nancy"] == stype.components["nancy"]
+ assert copied.components["peggy"] == stype.components["peggy"]
diff --git a/src/psyclone/tests/psyir/symbols/generic_interface_symbol_test.py b/src/psyclone/tests/psyir/symbols/generic_interface_symbol_test.py
index f496bb26fe..a0b424b598 100644
--- a/src/psyclone/tests/psyir/symbols/generic_interface_symbol_test.py
+++ b/src/psyclone/tests/psyir/symbols/generic_interface_symbol_test.py
@@ -38,7 +38,7 @@
import pytest
from psyclone.psyir.symbols import (GenericInterfaceSymbol, INTEGER_TYPE,
- RoutineSymbol, SymbolTable)
+ RoutineSymbol, SymbolTable, Symbol)
def test_gis_constructor():
@@ -78,6 +78,27 @@ def test_gis_constructor():
assert oak.container_routines == [nut]
+def test_gis_specialise():
+ '''
+ Specialise a generic symbol into a GenericInterfaceSymbol.
+
+ '''
+ # Specialise symbols without routines
+ symbol = Symbol("no_routines")
+ symbol.specialise(GenericInterfaceSymbol)
+ assert symbol.routines == [] # It now has a routines attribute
+
+ symbol = Symbol("has_routines")
+ impl1 = RoutineSymbol("impl1")
+ impl2 = RoutineSymbol("impl2")
+ symbol.specialise(GenericInterfaceSymbol,
+ routines=[(impl1, True), (impl2, False)])
+ assert symbol.routines[0].symbol is impl1
+ assert symbol.routines[0].from_container is True
+ assert symbol.routines[1].symbol is impl2
+ assert symbol.routines[1].from_container is False
+
+
def test_gis_typedsymbol_keywords():
'''
Test that keyword arguments to the constructor are passed through to the