From d78c8fdc14081cf20dd07acbc322bc1bde26c199 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Tue, 12 Nov 2024 16:40:45 +0100 Subject: [PATCH 01/16] pass json args to the ssh hpc calls --- Makefile | 2 +- .../batch_submit_workflow_job.sh | 55 ++++++++++++------- .../wrapper_submit_workflow_job.sh | 28 ++++++++-- src/utils/operandi_utils/hpc/constants.py | 7 ++- src/utils/operandi_utils/hpc/nhr_executor.py | 48 ++++++++-------- .../{_test_3_hpc => test_3_hpc}/__init__.py | 0 .../_test_1_nhr_executor.py} | 0 .../_test_2_nhr_transfer.py} | 0 .../test_3_nhr_combined.py | 4 +- 9 files changed, 91 insertions(+), 53 deletions(-) rename tests/tests_utils/{_test_3_hpc => test_3_hpc}/__init__.py (100%) rename tests/tests_utils/{_test_3_hpc/test_1_nhr_executor.py => test_3_hpc/_test_1_nhr_executor.py} (100%) rename tests/tests_utils/{_test_3_hpc/test_2_nhr_transfer.py => test_3_hpc/_test_2_nhr_transfer.py} (100%) rename tests/tests_utils/{_test_3_hpc => test_3_hpc}/test_3_nhr_combined.py (97%) diff --git a/Makefile b/Makefile index 778a29b0..026c8c6d 100755 --- a/Makefile +++ b/Makefile @@ -119,7 +119,7 @@ run-tests-utils: run-tests-broker: export $(shell sed 's/=.*//' ./tests/.env) - pytest tests/tests_broker/test_*.py -v + pytest tests/tests_broker/test_*.py -s -v run-tests-harvester: export $(shell sed 's/=.*//' ./tests/.env) diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh index 04ebe84c..d5476663 100755 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh @@ -2,6 +2,9 @@ set -e +module purge +module load jq + # Parameters are as follows: # S0 - This batch script # S1 - The scratch base for slurm workspaces @@ -24,18 +27,32 @@ OCRD_MODELS_DIR_IN_NODE="${TMP_LOCAL}/ocrd_models" OCRD_MODELS_DIR_IN_DOCKER="/usr/local/share/ocrd-resources" BIND_OCRD_MODELS="${OCRD_MODELS_DIR_IN_NODE}/ocrd-resources:${OCRD_MODELS_DIR_IN_DOCKER}" -SCRATCH_BASE=$1 -WORKFLOW_JOB_ID=$2 -NEXTFLOW_SCRIPT_ID=$3 -IN_FILE_GRP=$4 -WORKSPACE_ID=$5 -METS_BASENAME=$6 -CPUS=$7 -RAM=$8 -FORKS=$9 -PAGES=${10} -USE_METS_SERVER=${11} -FILE_GROUPS_TO_REMOVE=${12} +json_args="$1" +SCRATCH_BASE=$(echo "$json_args" | jq .scratch_base_dir | tr -d '"') +WORKFLOW_JOB_ID=$(echo "$json_args" | jq .workflow_job_id | tr -d '"') +NEXTFLOW_SCRIPT_ID=$(echo "$json_args" | jq .nextflow_script_id | tr -d '"') +IN_FILE_GRP=$(echo "$json_args" | jq .input_file_group | tr -d '"') +WORKSPACE_ID=$(echo "$json_args" | jq .workspace_id | tr -d '"') +METS_BASENAME=$(echo "$json_args" | jq .mets_basename | tr -d '"') +CPUS=$(echo "$json_args" | jq .cpus | tr -d '"') +RAM=$(echo "$json_args" | jq .ram | tr -d '"') +FORKS=$(echo "$json_args" | jq .nf_process_forks | tr -d '"') +PAGES=$(echo "$json_args" | jq .ws_pages_amount | tr -d '"') +USE_METS_SERVER=$(echo "$json_args" | jq .use_mets_server_bash_flag | tr -d '"') +FILE_GROUPS_TO_REMOVE=$(echo "$json_args" | jq .file_groups_to_remove | tr -d '"') + +echo "SCRATCH_BASE: $SCRATCH_BASE" +echo "WORKFLOW_JOB_ID: $WORKFLOW_JOB_ID" +echo "NEXTFLOW_SCRIPT_ID: $NEXTFLOW_SCRIPT_ID" +echo "IN_FILE_GRP: $IN_FILE_GRP" +echo "WORKSPACE_ID: $WORKSPACE_ID" +echo "METS_BASENAME: $METS_BASENAME" +echo "CPUS: $CPUS" +echo "RAM: $RAM" +echo "FORKS: $FORKS" +echo "PAGES: $PAGES" +echo "USE_METS_SERVER: $USE_METS_SERVER" +echo "FILE_GROUPS_TO_REMOVE: $FILE_GROUPS_TO_REMOVE" WORKFLOW_JOB_DIR="${SCRATCH_BASE}/${WORKFLOW_JOB_ID}" NF_SCRIPT_PATH="${WORKFLOW_JOB_DIR}/${NEXTFLOW_SCRIPT_ID}" @@ -46,13 +63,6 @@ BIND_METS_FILE_PATH="${WORKSPACE_DIR_IN_DOCKER}/${METS_BASENAME}" METS_SOCKET_BASENAME="mets_server.sock" BIND_METS_SOCKET_PATH="${WORKSPACE_DIR_IN_DOCKER}/${METS_SOCKET_BASENAME}" -hostname -/opt/slurm/etc/scripts/misc/slurm_resources - -module purge -module load apptainer -module load nextflow -# module load spack-user; eval "$(spack load --sh curl%gcc@10.2.0)" echo "ocrd all SIF path: $SIF_PATH" echo "ocrd all SIF path node local: $SIF_PATH_IN_NODE" @@ -62,6 +72,13 @@ echo "Use mets server: $USE_METS_SERVER" echo "Used file group: $IN_FILE_GRP" echo "Pages: $PAGES" +module load apptainer +module load nextflow +# module load spack-user; eval "$(spack load --sh curl%gcc@10.2.0)" + +hostname +# /opt/slurm/etc/scripts/misc/slurm_resource + # To submit separate jobs for each process in the NF script # export NXF_EXECUTOR=slurm diff --git a/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh b/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh index a3f6255a..32a02ff5 100644 --- a/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh @@ -22,12 +22,30 @@ # $18 - Boolean flag showing whether a mets server is utilized or not # $19 - File groups to be removed from the workspace after the processing -if [ "$6" == "48h" ] ; then +module purge +module load jq + +sbatch_args="$1" +partition=$(echo "$sbatch_args" | jq .partition | tr -d '"') +deadline_time=$(echo "$sbatch_args" | jq .job_deadline_time | tr -d '"') +output=$(echo "$sbatch_args" | jq .output_log | tr -d '"') +cpus_per_task=$(echo "$sbatch_args" | jq .cpus | tr -d '"') +memory=$(echo "$sbatch_args" | jq .ram | tr -d '"') +qos=$(echo "$sbatch_args" | jq .qos | tr -d '"') +batch_script_path=$(echo "$sbatch_args" | jq .batch_script_path | tr -d '"') + +if [ "$qos" == "48h" ] ; then # QOS not set, the default of 48h is used - sbatch --partition="$1" --time="$2" --output="$3" --cpus-per-task="$4" --mem="$5" "$7" "$8" "$9" "${10}" "${11}" "${12}" "${13}" "${14}" "${15}" "${16}" "${17}" "${18}" "${19}" + sbatch --partition="$partition" --time="$deadline_time" --output="$output" --cpus-per-task="$cpus_per_task" --mem="$memory" "$batch_script_path" "$2" else - sbatch --partition="$1" --time="$2" --output="$3" --cpus-per-task="$4" --mem="$5" --qos="$6" "$7" "$8" "$9" "${10}" "${11}" "${12}" "${13}" "${14}" "${15}" "${16}" "${17}" "${18}" "${19}" + sbatch --partition="$partition" --time="$deadline_time" --output="$output" --cpus-per-task="$cpus_per_task" --mem="$memory" --qos="$qos" "$batch_script_path" "$2" fi -echo "0:$0 1:$1 2:$2 3:$3 4:$4 5:$5 6:$6 7:$7 8:$8 9:$9 10:${10}" -echo "11:${11} 12:${12} 13:${13} 14:${14} 15:${15} 16:${16} 17:${17} 18:${18} 19:${19}" +echo "executed wrapper script: $0" +echo "partition: $partition" +echo "deadline_time: $deadline_time" +echo "output: $output" +echo "cpus_per_task: $cpus_per_task" +echo "memory: $memory" +echo "qos: $qos" +echo "batch_script_path: $batch_script_path" diff --git a/src/utils/operandi_utils/hpc/constants.py b/src/utils/operandi_utils/hpc/constants.py index 5e0210d4..1d7ef807 100644 --- a/src/utils/operandi_utils/hpc/constants.py +++ b/src/utils/operandi_utils/hpc/constants.py @@ -61,12 +61,13 @@ HPC_DIR_BATCH_SCRIPTS = "batch_scripts" HPC_DIR_SLURM_WORKSPACES = "slurm_workspaces" # TODO: Fix the constant file name - it should be automatically resolved -HPC_BATCH_SUBMIT_WORKFLOW_JOB = f"{HPC_NHR_SCRATCH_EMMY_HDD}/{HPC_DIR_BATCH_SCRIPTS}/batch_submit_workflow_job.sh" -HPC_WRAPPER_SUBMIT_WORKFLOW_JOB = f"{HPC_NHR_SCRATCH_EMMY_HDD}/{HPC_DIR_BATCH_SCRIPTS}/wrapper_submit_workflow_job.sh" +# TODO: Fix the naming when releasing the next Operandi version +HPC_BATCH_SUBMIT_WORKFLOW_JOB = f"{HPC_NHR_SCRATCH_EMMY_HDD}/{HPC_DIR_BATCH_SCRIPTS}/batch_submit_workflow_job2.sh" +HPC_WRAPPER_SUBMIT_WORKFLOW_JOB = f"{HPC_NHR_SCRATCH_EMMY_HDD}/{HPC_DIR_BATCH_SCRIPTS}/wrapper_submit_workflow_job2.sh" HPC_WRAPPER_CHECK_WORKFLOW_JOB_STATUS = f"{HPC_NHR_SCRATCH_EMMY_HDD}/{HPC_DIR_BATCH_SCRIPTS}/wrapper_check_workflow_job_status.sh" HPC_JOB_DEADLINE_TIME_REGULAR = "48:00:00" -HPC_JOB_DEADLINE_TIME_TEST = "0:30:00" +HPC_JOB_DEADLINE_TIME_TEST = "00:30:00" HPC_NHR_JOB_DEFAULT_PARTITION = "standard96s:shared" HPC_NHR_JOB_TEST_PARTITION = "standard96s:shared" diff --git a/src/utils/operandi_utils/hpc/nhr_executor.py b/src/utils/operandi_utils/hpc/nhr_executor.py index 7636f2f2..3f679169 100644 --- a/src/utils/operandi_utils/hpc/nhr_executor.py +++ b/src/utils/operandi_utils/hpc/nhr_executor.py @@ -1,3 +1,4 @@ +from json import dumps from logging import getLogger from pathlib import Path from time import sleep @@ -48,30 +49,31 @@ def trigger_slurm_job( use_mets_server_bash_flag = "true" if use_mets_server else "false" command = f"{HPC_WRAPPER_SUBMIT_WORKFLOW_JOB}" + sbatch_args = { + "partition": partition, + "job_deadline_time": job_deadline_time, + "output_log": f"{self.slurm_workspaces_dir}/{workflow_job_id}/slurm-job-%J.txt", + "cpus": cpus, + "ram": f"{ram}G", + "qos": qos, + "batch_script_path": HPC_BATCH_SUBMIT_WORKFLOW_JOB + } + regular_args = { + "scratch_base_dir": self.slurm_workspaces_dir, + "workflow_job_id": workflow_job_id, + "nextflow_script_id": nextflow_script_id, + "input_file_group": input_file_grp, + "workspace_id": workspace_id, + "mets_basename": mets_basename, + "cpus": cpus, + "ram": ram, + "nf_process_forks": nf_process_forks, + "ws_pages_amount": ws_pages_amount, + "use_mets_server_bash_flag": use_mets_server_bash_flag, + "file_groups_to_remove": file_groups_to_remove + } - # SBATCH arguments passed to the batch script - command += f" {partition}" - command += f" {job_deadline_time}" - command += f" {self.slurm_workspaces_dir}/{workflow_job_id}/slurm-job-%J.txt" - command += f" {cpus}" - command += f" {ram}G" - command += f" {qos}" - - # Regular arguments passed to the batch script - command += f" {HPC_BATCH_SUBMIT_WORKFLOW_JOB}" - command += f" {self.slurm_workspaces_dir}" - command += f" {workflow_job_id}" - command += f" {nextflow_script_id}" - command += f" {input_file_grp}" - command += f" {workspace_id}" - command += f" {mets_basename}" - command += f" {cpus}" - command += f" {ram}" - command += f" {nf_process_forks}" - command += f" {ws_pages_amount}" - command += f" {use_mets_server_bash_flag}" - command += f" {file_groups_to_remove}" - + command += f" '{dumps(sbatch_args)}' '{dumps(regular_args)}'" self.logger.info(f"About to execute a force command: {command}") output, err, return_code = self.execute_blocking(command) self.logger.info(f"Command output: {output}") diff --git a/tests/tests_utils/_test_3_hpc/__init__.py b/tests/tests_utils/test_3_hpc/__init__.py similarity index 100% rename from tests/tests_utils/_test_3_hpc/__init__.py rename to tests/tests_utils/test_3_hpc/__init__.py diff --git a/tests/tests_utils/_test_3_hpc/test_1_nhr_executor.py b/tests/tests_utils/test_3_hpc/_test_1_nhr_executor.py similarity index 100% rename from tests/tests_utils/_test_3_hpc/test_1_nhr_executor.py rename to tests/tests_utils/test_3_hpc/_test_1_nhr_executor.py diff --git a/tests/tests_utils/_test_3_hpc/test_2_nhr_transfer.py b/tests/tests_utils/test_3_hpc/_test_2_nhr_transfer.py similarity index 100% rename from tests/tests_utils/_test_3_hpc/test_2_nhr_transfer.py rename to tests/tests_utils/test_3_hpc/_test_2_nhr_transfer.py diff --git a/tests/tests_utils/_test_3_hpc/test_3_nhr_combined.py b/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py similarity index 97% rename from tests/tests_utils/_test_3_hpc/test_3_nhr_combined.py rename to tests/tests_utils/test_3_hpc/test_3_nhr_combined.py index 3309b538..73d7ad09 100644 --- a/tests/tests_utils/_test_3_hpc/test_3_nhr_combined.py +++ b/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py @@ -62,7 +62,7 @@ def test_hpc_connector_run_batch_script( workflow_job_id=ID_WORKFLOW_JOB, nextflow_script_path=Path(template_workflow), input_file_grp=DEFAULT_FILE_GRP, workspace_id=ID_WORKSPACE, mets_basename=DEFAULT_METS_BASENAME, nf_process_forks=2, ws_pages_amount=8, use_mets_server=False, - file_groups_to_remove="", cpus=2, ram=16, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, + file_groups_to_remove="", cpus=2, ram=8, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, partition=HPC_NHR_JOB_TEST_PARTITION, qos=HPC_JOB_QOS_SHORT) finished_successfully = hpc_nhr_command_executor.poll_till_end_slurm_job_state( slurm_job_id=slurm_job_id, interval=5, timeout=300) @@ -84,7 +84,7 @@ def test_hpc_connector_run_batch_script_with_ms( workflow_job_id=ID_WORKFLOW_JOB_WITH_MS, nextflow_script_path=Path(template_workflow_with_ms), input_file_grp=DEFAULT_FILE_GRP, workspace_id=ID_WORKSPACE_WITH_MS, mets_basename=DEFAULT_METS_BASENAME, nf_process_forks=2, ws_pages_amount=8, - use_mets_server=True, file_groups_to_remove="", cpus=3, ram=16, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, + use_mets_server=True, file_groups_to_remove="", cpus=3, ram=8, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, partition=HPC_NHR_JOB_TEST_PARTITION, qos=HPC_JOB_QOS_SHORT) finished_successfully = hpc_nhr_command_executor.poll_till_end_slurm_job_state( slurm_job_id=slurm_job_id, interval=5, timeout=300) From 17c15f66d616f69cc2817a684c278d3e4a0fbf77 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Tue, 12 Nov 2024 17:02:02 +0100 Subject: [PATCH 02/16] clean: remove redundant comments --- .../batch_submit_workflow_job.sh | 70 ++++--------------- .../wrapper_submit_workflow_job.sh | 32 +-------- src/utils/operandi_utils/hpc/nhr_executor.py | 2 +- ...nhr_executor.py => test_1_nhr_executor.py} | 0 ...nhr_transfer.py => test_2_nhr_transfer.py} | 0 .../test_3_hpc/test_3_nhr_combined.py | 4 +- 6 files changed, 17 insertions(+), 91 deletions(-) rename tests/tests_utils/test_3_hpc/{_test_1_nhr_executor.py => test_1_nhr_executor.py} (100%) rename tests/tests_utils/test_3_hpc/{_test_2_nhr_transfer.py => test_2_nhr_transfer.py} (100%) diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh index d5476663..849b6ae3 100755 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh @@ -4,28 +4,15 @@ set -e module purge module load jq +module load apptainer +module load nextflow +# module load spack-user; eval "$(spack load --sh curl%gcc@10.2.0)" -# Parameters are as follows: -# S0 - This batch script -# S1 - The scratch base for slurm workspaces -# $2 - Workflow job id -# $3 - Nextflow script id -# $4 - Entry input file group -# $5 - Workspace id -# $6 - Mets basename - default "mets.xml" -# $7 - CPUs for the Nextflow processes -# $8 - RAM for the Nextflow processes -# $9 - Amount of forks per OCR-D processor in the NF script -# $10 - Amount of pages in the workspace -# $11 - Boolean flag showing whether a mets server is utilized or not -# $12 - File groups to be removed from the workspace after the processing +hostname +# /opt/slurm/etc/scripts/misc/slurm_resource -SIF_PATH="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_all_maximum_image.sif" -SIF_PATH_IN_NODE="${TMP_LOCAL}/ocrd_all_maximum_image.sif" -OCRD_MODELS_DIR="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_models" -OCRD_MODELS_DIR_IN_NODE="${TMP_LOCAL}/ocrd_models" -OCRD_MODELS_DIR_IN_DOCKER="/usr/local/share/ocrd-resources" -BIND_OCRD_MODELS="${OCRD_MODELS_DIR_IN_NODE}/ocrd-resources:${OCRD_MODELS_DIR_IN_DOCKER}" +# To submit separate jobs for each process in the NF script +# export NXF_EXECUTOR=slurm json_args="$1" SCRATCH_BASE=$(echo "$json_args" | jq .scratch_base_dir | tr -d '"') @@ -41,18 +28,12 @@ PAGES=$(echo "$json_args" | jq .ws_pages_amount | tr -d '"') USE_METS_SERVER=$(echo "$json_args" | jq .use_mets_server_bash_flag | tr -d '"') FILE_GROUPS_TO_REMOVE=$(echo "$json_args" | jq .file_groups_to_remove | tr -d '"') -echo "SCRATCH_BASE: $SCRATCH_BASE" -echo "WORKFLOW_JOB_ID: $WORKFLOW_JOB_ID" -echo "NEXTFLOW_SCRIPT_ID: $NEXTFLOW_SCRIPT_ID" -echo "IN_FILE_GRP: $IN_FILE_GRP" -echo "WORKSPACE_ID: $WORKSPACE_ID" -echo "METS_BASENAME: $METS_BASENAME" -echo "CPUS: $CPUS" -echo "RAM: $RAM" -echo "FORKS: $FORKS" -echo "PAGES: $PAGES" -echo "USE_METS_SERVER: $USE_METS_SERVER" -echo "FILE_GROUPS_TO_REMOVE: $FILE_GROUPS_TO_REMOVE" +SIF_PATH="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_all_maximum_image.sif" +SIF_PATH_IN_NODE="${TMP_LOCAL}/ocrd_all_maximum_image.sif" +OCRD_MODELS_DIR="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_models" +OCRD_MODELS_DIR_IN_NODE="${TMP_LOCAL}/ocrd_models" +OCRD_MODELS_DIR_IN_DOCKER="/usr/local/share/ocrd-resources" +BIND_OCRD_MODELS="${OCRD_MODELS_DIR_IN_NODE}/ocrd-resources:${OCRD_MODELS_DIR_IN_DOCKER}" WORKFLOW_JOB_DIR="${SCRATCH_BASE}/${WORKFLOW_JOB_ID}" NF_SCRIPT_PATH="${WORKFLOW_JOB_DIR}/${NEXTFLOW_SCRIPT_ID}" @@ -63,7 +44,6 @@ BIND_METS_FILE_PATH="${WORKSPACE_DIR_IN_DOCKER}/${METS_BASENAME}" METS_SOCKET_BASENAME="mets_server.sock" BIND_METS_SOCKET_PATH="${WORKSPACE_DIR_IN_DOCKER}/${METS_SOCKET_BASENAME}" - echo "ocrd all SIF path: $SIF_PATH" echo "ocrd all SIF path node local: $SIF_PATH_IN_NODE" echo "Workspace dir: $WORKSPACE_DIR" @@ -72,17 +52,6 @@ echo "Use mets server: $USE_METS_SERVER" echo "Used file group: $IN_FILE_GRP" echo "Pages: $PAGES" -module load apptainer -module load nextflow -# module load spack-user; eval "$(spack load --sh curl%gcc@10.2.0)" - -hostname -# /opt/slurm/etc/scripts/misc/slurm_resource - -# To submit separate jobs for each process in the NF script -# export NXF_EXECUTOR=slurm - - # Define functions to be used check_existence_of_paths() { # The SIF file of the OCR-D All docker image must be previously created @@ -159,13 +128,6 @@ unzip_workflow_job_dir() { } start_mets_server() { - # TODO: Would be better to start the mets server as an instance, but this is still broken - # apptainer instance start \ - # --bind "${BIND_WORKSPACE_DIR}" \ - # "${SIF_PATH_IN_NODE}" \ - # instance_mets_server \ - # ocrd workspace -U "${BIND_METS_SOCKET_PATH}" -d "${WORKSPACE_DIR_IN_DOCKER}" server start - if [ "$1" == "true" ] ; then echo "Starting the mets server for the specific workspace in the background" apptainer exec \ @@ -178,12 +140,6 @@ start_mets_server() { } stop_mets_server() { - # Not supported in the HPC (the version there is <7.40) - # curl -X DELETE --unix-socket "${WORKSPACE_DIR}/${METS_SOCKET_BASENAME}" "http://localhost/" - - # TODO Stop the instance here - # singularity instance stop instance_mets_server - if [ "$1" == "true" ] ; then echo "Stopping the mets server" apptainer exec \ diff --git a/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh b/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh index 32a02ff5..b662459b 100644 --- a/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh @@ -1,27 +1,5 @@ #!/bin/bash -# $0 - This bash script -# $1 - Slurm parameter - partition -# $2 - Slurm parameter - time -# $3 - Slurm parameter - output -# $4 - Slurm parameter - cpus-per-task -# $5 - Slurm parameter - mem -# $6 - Slurm parameter - qos - -# $7 - The batch script path to execute -# $8 - The scratch base for slurm workspaces -# $9 - Workflow job id -# $10 - Nextflow script id -# $11 - Entry input file group -# $12 - Workspace id -# $13 - Mets basename - default "mets.xml" -# $14 - CPUs for the Nextflow processes -# $15 - RAM for the Nextflow processes -# $16 - Amount of forks per OCR-D processor in the NF script -# $17 - Amount of pages in the workspace -# $18 - Boolean flag showing whether a mets server is utilized or not -# $19 - File groups to be removed from the workspace after the processing - module purge module load jq @@ -34,18 +12,10 @@ memory=$(echo "$sbatch_args" | jq .ram | tr -d '"') qos=$(echo "$sbatch_args" | jq .qos | tr -d '"') batch_script_path=$(echo "$sbatch_args" | jq .batch_script_path | tr -d '"') +# $2 is a json of regular arguments used inside the `batch_submit_workflow_job.sh` if [ "$qos" == "48h" ] ; then # QOS not set, the default of 48h is used sbatch --partition="$partition" --time="$deadline_time" --output="$output" --cpus-per-task="$cpus_per_task" --mem="$memory" "$batch_script_path" "$2" else sbatch --partition="$partition" --time="$deadline_time" --output="$output" --cpus-per-task="$cpus_per_task" --mem="$memory" --qos="$qos" "$batch_script_path" "$2" fi - -echo "executed wrapper script: $0" -echo "partition: $partition" -echo "deadline_time: $deadline_time" -echo "output: $output" -echo "cpus_per_task: $cpus_per_task" -echo "memory: $memory" -echo "qos: $qos" -echo "batch_script_path: $batch_script_path" diff --git a/src/utils/operandi_utils/hpc/nhr_executor.py b/src/utils/operandi_utils/hpc/nhr_executor.py index 3f679169..d450cbc6 100644 --- a/src/utils/operandi_utils/hpc/nhr_executor.py +++ b/src/utils/operandi_utils/hpc/nhr_executor.py @@ -72,8 +72,8 @@ def trigger_slurm_job( "use_mets_server_bash_flag": use_mets_server_bash_flag, "file_groups_to_remove": file_groups_to_remove } - command += f" '{dumps(sbatch_args)}' '{dumps(regular_args)}'" + self.logger.info(f"About to execute a force command: {command}") output, err, return_code = self.execute_blocking(command) self.logger.info(f"Command output: {output}") diff --git a/tests/tests_utils/test_3_hpc/_test_1_nhr_executor.py b/tests/tests_utils/test_3_hpc/test_1_nhr_executor.py similarity index 100% rename from tests/tests_utils/test_3_hpc/_test_1_nhr_executor.py rename to tests/tests_utils/test_3_hpc/test_1_nhr_executor.py diff --git a/tests/tests_utils/test_3_hpc/_test_2_nhr_transfer.py b/tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py similarity index 100% rename from tests/tests_utils/test_3_hpc/_test_2_nhr_transfer.py rename to tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py diff --git a/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py b/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py index 73d7ad09..3309b538 100644 --- a/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py +++ b/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py @@ -62,7 +62,7 @@ def test_hpc_connector_run_batch_script( workflow_job_id=ID_WORKFLOW_JOB, nextflow_script_path=Path(template_workflow), input_file_grp=DEFAULT_FILE_GRP, workspace_id=ID_WORKSPACE, mets_basename=DEFAULT_METS_BASENAME, nf_process_forks=2, ws_pages_amount=8, use_mets_server=False, - file_groups_to_remove="", cpus=2, ram=8, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, + file_groups_to_remove="", cpus=2, ram=16, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, partition=HPC_NHR_JOB_TEST_PARTITION, qos=HPC_JOB_QOS_SHORT) finished_successfully = hpc_nhr_command_executor.poll_till_end_slurm_job_state( slurm_job_id=slurm_job_id, interval=5, timeout=300) @@ -84,7 +84,7 @@ def test_hpc_connector_run_batch_script_with_ms( workflow_job_id=ID_WORKFLOW_JOB_WITH_MS, nextflow_script_path=Path(template_workflow_with_ms), input_file_grp=DEFAULT_FILE_GRP, workspace_id=ID_WORKSPACE_WITH_MS, mets_basename=DEFAULT_METS_BASENAME, nf_process_forks=2, ws_pages_amount=8, - use_mets_server=True, file_groups_to_remove="", cpus=3, ram=8, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, + use_mets_server=True, file_groups_to_remove="", cpus=3, ram=16, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, partition=HPC_NHR_JOB_TEST_PARTITION, qos=HPC_JOB_QOS_SHORT) finished_successfully = hpc_nhr_command_executor.poll_till_end_slurm_job_state( slurm_job_id=slurm_job_id, interval=5, timeout=300) From 5caaa98f2f84125132ee62b0bfae8b220af745ee Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Sun, 24 Nov 2024 21:09:04 +0100 Subject: [PATCH 03/16] another portion of changes to slim containers --- .../batch_check_ocrd_all_version.sh | 94 ++++++- .../batch_create_ocrd_all_maximum_sif.sh | 2 +- .../batch_download_ocrd_all_models.sh | 4 +- .../batch_submit_workflow_job.sh | 238 ++++++++++-------- src/utils/operandi_utils/hpc/constants.py | 5 +- src/utils/operandi_utils/hpc/nhr_connector.py | 7 +- src/utils/operandi_utils/hpc/nhr_executor.py | 86 ++++++- ...hr_executor.py => _test_1_nhr_executor.py} | 0 ...hr_transfer.py => _test_2_nhr_transfer.py} | 0 9 files changed, 316 insertions(+), 120 deletions(-) rename tests/tests_utils/test_3_hpc/{test_1_nhr_executor.py => _test_1_nhr_executor.py} (100%) rename tests/tests_utils/test_3_hpc/{test_2_nhr_transfer.py => _test_2_nhr_transfer.py} (100%) diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_check_ocrd_all_version.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_check_ocrd_all_version.sh index 24466582..35372adc 100644 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_check_ocrd_all_version.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_check_ocrd_all_version.sh @@ -1,6 +1,6 @@ #!/bin/bash -#SBATCH --partition standard96:shared -#SBATCH --time 00:05:00 +#SBATCH --partition standard96s:shared +#SBATCH --time 00:20:00 #SBATCH --qos 2h #SBATCH --output check_ocrd_all_version_job-%J.txt #SBATCH --cpus-per-task 1 @@ -9,12 +9,96 @@ set -e hostname -/opt/slurm/etc/scripts/misc/slurm_resources +# /opt/slurm/etc/scripts/misc/slurm_resources module purge module load apptainer -SIF_PATH="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_all_maximum_image.sif" +SIF_PATH="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_processor_sifs/ocrd_all_maximum_image.sif" -apptainer exec "$SIF_PATH" ocrd --version apptainer exec "$SIF_PATH" ocrd-tesserocr-recognize --dump-module-dir apptainer exec "$SIF_PATH" ls -la /models + +ocrd_processors=( +"ocrd-anybaseocr-binarize" +"ocrd-anybaseocr-block-segmentation" +"ocrd-anybaseocr-crop" +"ocrd-anybaseocr-deskew" +"ocrd-anybaseocr-dewarp" +"ocrd-anybaseocr-layout-analysis" +"ocrd-anybaseocr-textline" +"ocrd-anybaseocr-tiseg" +"ocrd-calamari-recognize" +"ocrd-cis-align" +"ocrd-cis-data" +"ocrd-cis-ocropy-binarize" +"ocrd-cis-ocropy-clip" +"ocrd-cis-ocropy-denoise" +"ocrd-cis-ocropy-deskew" +"ocrd-cis-ocropy-dewarp" +"ocrd-cis-ocropy-recognize" +"ocrd-cis-ocropy-resegment" +"ocrd-cis-ocropy-segment" +"ocrd-cis-ocropy-train" +"ocrd-cis-postcorrect" +"ocrd-cor-asv-ann-align" +"ocrd-cor-asv-ann-evaluate" +"ocrd-cor-asv-ann-join" +"ocrd-cor-asv-ann-mark" +"ocrd-cor-asv-ann-process" +"ocrd-detectron2-segment" +"ocrd-dinglehopper" +"ocrd-docstruct" +"ocrd-doxa-binarize" +"ocrd-dummy" +"ocrd-eynollah-segment" +"ocrd-fileformat-transform" +"ocrd-froc-recognize" +"ocrd-im6convert" +"ocrd-import" +"ocrd-keraslm-rate" +"ocrd-kraken-binarize" +"ocrd-kraken-recognize" +"ocrd-kraken-segment" +"ocrd-make" +"ocrd-nmalign-merge" +"ocrd-olahd-client" +"ocrd-olena-binarize" +"ocrd-page2alto-transform" +"ocrd-pagetopdf" +"ocrd-page-transform" +"ocrd-preprocess-image" +"ocrd-repair-inconsistencies" +"ocrd-sbb-binarize" +"ocrd-segment-evaluate" +"ocrd-segment-extract-glyphs" +"ocrd-segment-extract-lines" +"ocrd-segment-extract-pages" +"ocrd-segment-extract-regions" +"ocrd-segment-extract-words" +"ocrd-segment-from-coco" +"ocrd-segment-from-masks" +"ocrd-segment-project" +"ocrd-segment-repair" +"ocrd-segment-replace-original" +"ocrd-segment-replace-page" +"ocrd-segment-replace-text" +"ocrd-skimage-binarize" +"ocrd-skimage-denoise" +"ocrd-skimage-denoise-raw" +"ocrd-skimage-normalize" +"ocrd-tesserocr-binarize" +"ocrd-tesserocr-crop" +"ocrd-tesserocr-deskew" +"ocrd-tesserocr-fontshape" +"ocrd-tesserocr-recognize" +"ocrd-tesserocr-segment" +"ocrd-tesserocr-segment-line" +"ocrd-tesserocr-segment-region" +"ocrd-tesserocr-segment-table" +"ocrd-tesserocr-segment-word" +) + +for ocrd_processor in "${ocrd_processors[@]}" +do + echo -n "$ocrd_processor " & apptainer exec "$SIF_PATH" "$ocrd_processor" --version || true +done diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_all_maximum_sif.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_all_maximum_sif.sh index 96500594..7ac3f522 100755 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_all_maximum_sif.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_all_maximum_sif.sh @@ -1,5 +1,5 @@ #!/bin/bash -#SBATCH --partition standard96:shared +#SBATCH --partition standard96s:shared #SBATCH --time 2:00:00 #SBATCH --output create_ocrd_all_sif_job-%J.txt #SBATCH --cpus-per-task 16 diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh index 05e543ab..fffdf2d5 100644 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh @@ -1,5 +1,5 @@ #!/bin/bash -#SBATCH --partition standard96:shared +#SBATCH --partition standard96s:shared #SBATCH --time 6:00:00 #SBATCH --output download_all_ocrd_models_job-%J.txt #SBATCH --cpus-per-task 16 @@ -14,7 +14,7 @@ hostname /opt/slurm/etc/scripts/misc/slurm_resources # This sif file is generated with another batch script -SIF_PATH="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_all_maximum_image.sif" +SIF_PATH="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_processor_sifs/ocrd_all_maximum_image.sif" OCRD_MODELS_DIR="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_models" OCRD_MODELS_DIR_IN_DOCKER="/usr/local/share" diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh index 849b6ae3..8f807194 100755 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh @@ -14,92 +14,129 @@ hostname # To submit separate jobs for each process in the NF script # export NXF_EXECUTOR=slurm +# TODO: Use the -r (or --raw-output) option to emit raw strings as output: json_args="$1" + +ocrd_processor_images=() +mapfile -t ocrd_processor_images < <(echo "$json_args" | jq .ocrd_processor_images | tr -d '"' | tr "," "\n") +echo "Ocrd total images in request: ${#ocrd_processor_images[@]}" +echo "Ocrd images: " +for ocrd_image in "${ocrd_processor_images[@]}" +do + echo -n "${ocrd_image} " +done + +PROJECT_BASE_DIR=$(echo "$json_args" | jq .project_base_dir | tr -d '"') SCRATCH_BASE=$(echo "$json_args" | jq .scratch_base_dir | tr -d '"') WORKFLOW_JOB_ID=$(echo "$json_args" | jq .workflow_job_id | tr -d '"') -NEXTFLOW_SCRIPT_ID=$(echo "$json_args" | jq .nextflow_script_id | tr -d '"') -IN_FILE_GRP=$(echo "$json_args" | jq .input_file_group | tr -d '"') WORKSPACE_ID=$(echo "$json_args" | jq .workspace_id | tr -d '"') -METS_BASENAME=$(echo "$json_args" | jq .mets_basename | tr -d '"') -CPUS=$(echo "$json_args" | jq .cpus | tr -d '"') -RAM=$(echo "$json_args" | jq .ram | tr -d '"') -FORKS=$(echo "$json_args" | jq .nf_process_forks | tr -d '"') -PAGES=$(echo "$json_args" | jq .ws_pages_amount | tr -d '"') USE_METS_SERVER=$(echo "$json_args" | jq .use_mets_server_bash_flag | tr -d '"') FILE_GROUPS_TO_REMOVE=$(echo "$json_args" | jq .file_groups_to_remove | tr -d '"') -SIF_PATH="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_all_maximum_image.sif" -SIF_PATH_IN_NODE="${TMP_LOCAL}/ocrd_all_maximum_image.sif" -OCRD_MODELS_DIR="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_models" -OCRD_MODELS_DIR_IN_NODE="${TMP_LOCAL}/ocrd_models" -OCRD_MODELS_DIR_IN_DOCKER="/usr/local/share/ocrd-resources" -BIND_OCRD_MODELS="${OCRD_MODELS_DIR_IN_NODE}/ocrd-resources:${OCRD_MODELS_DIR_IN_DOCKER}" - -WORKFLOW_JOB_DIR="${SCRATCH_BASE}/${WORKFLOW_JOB_ID}" -NF_SCRIPT_PATH="${WORKFLOW_JOB_DIR}/${NEXTFLOW_SCRIPT_ID}" -WORKSPACE_DIR="${WORKFLOW_JOB_DIR}/${WORKSPACE_ID}" -WORKSPACE_DIR_IN_DOCKER="/ws_data" -BIND_WORKSPACE_DIR="${WORKSPACE_DIR}:${WORKSPACE_DIR_IN_DOCKER}" -BIND_METS_FILE_PATH="${WORKSPACE_DIR_IN_DOCKER}/${METS_BASENAME}" -METS_SOCKET_BASENAME="mets_server.sock" -BIND_METS_SOCKET_PATH="${WORKSPACE_DIR_IN_DOCKER}/${METS_SOCKET_BASENAME}" - -echo "ocrd all SIF path: $SIF_PATH" -echo "ocrd all SIF path node local: $SIF_PATH_IN_NODE" +WORKFLOW_JOB_DIR=$(echo "$json_args" | jq .hpc_workflow_job_dir | tr -d '"') +WORKSPACE_DIR=$(echo "$json_args" | jq .hpc_workspace_dir | tr -d '"') +NF_RUN_COMMAND=$(echo "$json_args" | jq .nf_run_command | tr -d '"') +START_METS_SERVER_COMMAND=$(echo "$json_args" | jq .start_mets_server_command | tr -d '"') +STOP_METS_SERVER_COMMAND=$(echo "$json_args" | jq .stop_mets_server_command | tr -d '"') +LIST_FILE_GROUPS_COMMAND=$(echo "$json_args" | jq .list_file_groups_command | tr -d '"') +REMOVE_FILE_GROUP_COMMAND=$(echo "$json_args" | jq .remove_file_group_command | tr -d '"') + +PROJECT_DIR_OCRD_MODELS="${PROJECT_BASE_DIR}/ocrd_models" +PROJECT_DIR_PROCESSOR_SIFS="${PROJECT_BASE_DIR}/ocrd_processor_sifs" +PROJECT_SIF_PATH_OCRD_ALL="${PROJECT_BASE_DIR}/ocrd_processor_sifs/ocrd_all_maximum_image.sif" + +NODE_DIR_OCRD_MODELS="${TMP_LOCAL}/ocrd_models" +NODE_DIR_PROCESSOR_SIFS="${TMP_LOCAL}/ocrd_processor_sifs" +NODE_SIF_PATH_OCRD_ALL="${TMP_LOCAL}/ocrd_processor_sifs/ocrd_all_maximum_image.sif" + +echo "" +echo "Project dir ocrd models: $PROJECT_DIR_OCRD_MODELS" +echo "Project dir processor sifs: $PROJECT_DIR_PROCESSOR_SIFS" +echo "Project sif path ocrd all: $PROJECT_SIF_PATH_OCRD_ALL" +echo "Node dir ocrd models: $NODE_DIR_OCRD_MODELS" +echo "Node dir processor sifs: $NODE_DIR_PROCESSOR_SIFS" +echo "Node sif path ocrd all: $NODE_SIF_PATH_OCRD_ALL" +echo "" + echo "Workspace dir: $WORKSPACE_DIR" -echo "Nextflow script path: $NF_SCRIPT_PATH" echo "Use mets server: $USE_METS_SERVER" -echo "Used file group: $IN_FILE_GRP" -echo "Pages: $PAGES" -# Define functions to be used -check_existence_of_paths() { - # The SIF file of the OCR-D All docker image must be previously created - if [ ! -f "${SIF_PATH}" ]; then - echo "Required ocrd_all_image sif file not found at: ${SIF_PATH}" - exit 1 - fi - echo "Required ocrd_all_image sif file found at: ${SIF_PATH}" +echo "" +echo "Nf run command with Node placeholders: $NF_RUN_COMMAND" +NF_RUN_COMMAND="${NF_RUN_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" +NF_RUN_COMMAND="${NF_RUN_COMMAND//PH_NODE_DIR_OCRD_MODELS/$NODE_DIR_OCRD_MODELS}" +NF_RUN_COMMAND="${NF_RUN_COMMAND//PH_CMD_WRAPPER/\'}" +echo "" +echo "Nf run command without placeholders: $NF_RUN_COMMAND" +echo "" - # Models directory must be previously filled with OCR-D models - if [ ! -d "${OCRD_MODELS_DIR}" ]; then - echo "Ocrd models directory not found at: ${OCRD_MODELS_DIR}" - exit 1 - fi - echo "Ocrd models directory found at: ${OCRD_MODELS_DIR}" +echo "Replacing ocrd core image sif placeholder of commands" +START_METS_SERVER_COMMAND="${START_METS_SERVER_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" +STOP_METS_SERVER_COMMAND="${STOP_METS_SERVER_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" +LIST_FILE_GROUPS_COMMAND="${LIST_FILE_GROUPS_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" +REMOVE_FILE_GROUP_COMMAND="${REMOVE_FILE_GROUP_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" +echo "" +check_existence_of_dir_scratch_base(){ if [ ! -d "${SCRATCH_BASE}" ]; then + echo "Creating non-existing SCRATCH_BASE folder" mkdir -p "${SCRATCH_BASE}" fi - if [ ! -d "${SCRATCH_BASE}" ]; then - echo "Required scratch base dir was not created: ${SCRATCH_BASE}" + echo "Required scratch base dir was not found: ${SCRATCH_BASE}" exit 1 fi echo "Scratch base found/created at: ${SCRATCH_BASE}" } -clear_data_from_computing_node() { - echo "If existing, removing the SIF from the computing node, path: ${SIF_PATH_IN_NODE}" - rm -f "${SIF_PATH_IN_NODE}" - echo "If existing, removing the OCR-D models from the computing node, path: ${OCRD_MODELS_DIR_IN_NODE}" - rm -rf "${OCRD_MODELS_DIR_IN_NODE}" +check_existence_of_dir_ocrd_models(){ + # Models directory must be previously filled with OCR-D models + if [ ! -d "${PROJECT_DIR_OCRD_MODELS}" ]; then + echo "Ocrd models directory not found at: ${PROJECT_DIR_OCRD_MODELS}" + exit 1 + fi + echo "Ocrd models directory found at: ${PROJECT_DIR_OCRD_MODELS}" } -transfer_requirements_to_node_storage() { - cp "${SIF_PATH}" "${SIF_PATH_IN_NODE}" - # Check if transfer successful - if [ ! -f "${SIF_PATH_IN_NODE}" ]; then - echo "Required ocrd_all_image sif file not found at node local storage: ${SIF_PATH_IN_NODE}" +check_existence_of_sif_path_ocrd_all(){ + # The SIF file of the OCR-D All docker image must be previously created + if [ ! -f "${PROJECT_SIF_PATH_OCRD_ALL}" ]; then + echo "Required ocrd_all_image sif file not found at: ${PROJECT_SIF_PATH_OCRD_ALL}" exit 1 - else - echo "Successfully transferred SIF to node local storage" - apptainer exec "$SIF_PATH_IN_NODE" ocrd --version fi + echo "Required ocrd_all_image sif file found at: ${PROJECT_SIF_PATH_OCRD_ALL}" +} + +check_existence_of_ocrd_processor_images_to_be_used(){ + for ocrd_image in "${ocrd_processor_images[@]}" + do + image_path="${PROJECT_DIR_PROCESSOR_SIFS}/${ocrd_image}" + if [ ! -f "$image_path" ]; then + echo "Expected ocrd processor image not found at: $image_path" + exit 1 + fi + done +} - cp -R "${OCRD_MODELS_DIR}" "${OCRD_MODELS_DIR_IN_NODE}" - if [ ! -d "${OCRD_MODELS_DIR_IN_NODE}" ]; then - echo "Ocrd models directory not found at node local storage: ${OCRD_MODELS_DIR_IN_NODE}" +check_existence_of_paths() { + check_existence_of_dir_scratch_base + check_existence_of_dir_ocrd_models + check_existence_of_sif_path_ocrd_all + check_existence_of_ocrd_processor_images_to_be_used +} + +clear_data_from_computing_node() { + echo "" + echo "Removing the OCR-D models directory from the computing node, path: ${NODE_DIR_OCRD_MODELS}" + rm -rf "${NODE_DIR_OCRD_MODELS}" + echo "Removing the OCR-D processor images (SIF) directory from the computing node, path: ${NODE_DIR_PROCESSOR_SIFS}" + rm -rf "${NODE_DIR_PROCESSOR_SIFS}" +} + +transfer_to_node_storage_processor_models(){ + cp -R "${PROJECT_DIR_OCRD_MODELS}" "${NODE_DIR_OCRD_MODELS}" + if [ ! -d "${NODE_DIR_OCRD_MODELS}" ]; then + echo "Ocrd models directory not found at node local storage: ${NODE_DIR_OCRD_MODELS}" clear_data_from_computing_node exit 1 else @@ -107,6 +144,38 @@ transfer_requirements_to_node_storage() { fi } +transfer_to_node_storage_processor_images(){ + if [ ! -d "${NODE_DIR_PROCESSOR_SIFS}" ]; then + echo "Creating non-existing processor sif images dir: $NODE_DIR_PROCESSOR_SIFS" + mkdir -p "${NODE_DIR_PROCESSOR_SIFS}" + fi + if [ ! -d "${NODE_DIR_PROCESSOR_SIFS}" ]; then + echo "Required node processor sif images dir was not found: ${NODE_DIR_PROCESSOR_SIFS}" + exit 1 + fi + + for ocrd_image in "${ocrd_processor_images[@]}" + do + ocrd_image_path="${PROJECT_DIR_PROCESSOR_SIFS}/${ocrd_image}" + node_ocrd_image_path="${NODE_DIR_PROCESSOR_SIFS}/${ocrd_image}" + if [ ! -f "$ocrd_image_path" ]; then + echo "Expected ocrd processor image not found at: $ocrd_image_path" + exit 1 + else + echo "Transferring ocrd processor image to the compute node: ${ocrd_image}" + cp "${ocrd_image_path}" "${node_ocrd_image_path}" + echo "Ocrd processor image was transferred to: ${node_ocrd_image_path}" + if [ ! -f "${node_ocrd_image_path}" ]; then + echo "Expected ocrd processor image was copied but not found locally at: ${node_ocrd_image_path}" + exit 1 + fi + fi + done + echo "" + apptainer exec "$NODE_SIF_PATH_OCRD_ALL" ocrd --version + echo "" +} + unzip_workflow_job_dir() { if [ ! -f "${WORKFLOW_JOB_DIR}.zip" ]; then echo "Required scratch slurm workspace zip is not available: ${WORKFLOW_JOB_DIR}.zip" @@ -130,55 +199,25 @@ unzip_workflow_job_dir() { start_mets_server() { if [ "$1" == "true" ] ; then echo "Starting the mets server for the specific workspace in the background" - apptainer exec \ - --bind "${BIND_WORKSPACE_DIR}" \ - "${SIF_PATH_IN_NODE}" \ - ocrd workspace -U "${BIND_METS_SOCKET_PATH}" -d "${WORKSPACE_DIR_IN_DOCKER}" server start \ - > "${WORKSPACE_DIR}/mets_server.log" 2>&1 & + eval "$START_METS_SERVER_COMMAND" + sleep 10 fi - sleep 10 } stop_mets_server() { if [ "$1" == "true" ] ; then echo "Stopping the mets server" - apptainer exec \ - --bind "${BIND_WORKSPACE_DIR}" \ - "${SIF_PATH_IN_NODE}" \ - ocrd workspace -U "${BIND_METS_SOCKET_PATH}" -d "${WORKSPACE_DIR_IN_DOCKER}" server stop + eval "$STOP_METS_SERVER_COMMAND" fi } execute_nextflow_workflow() { - local APPTAINER_CMD="apptainer exec --bind ${BIND_WORKSPACE_DIR} --bind ${BIND_OCRD_MODELS} --env OCRD_METS_CACHING=false ${SIF_PATH_IN_NODE}" if [ "$1" == "true" ] ; then echo "Executing the nextflow workflow with mets server" - nextflow run "${NF_SCRIPT_PATH}" \ - -ansi-log false \ - -with-report \ - --input_file_group "${IN_FILE_GRP}" \ - --mets "${BIND_METS_FILE_PATH}" \ - --mets_socket "${BIND_METS_SOCKET_PATH}" \ - --workspace_dir "${WORKSPACE_DIR_IN_DOCKER}" \ - --pages "${PAGES}" \ - --singularity_wrapper "${APPTAINER_CMD}" \ - --cpus "${CPUS}" \ - --ram "${RAM}" \ - --forks "${FORKS}" else echo "Executing the nextflow workflow without mets server" - nextflow run "${NF_SCRIPT_PATH}" \ - -ansi-log false \ - -with-report \ - --input_file_group "${IN_FILE_GRP}" \ - --mets "${BIND_METS_FILE_PATH}" \ - --workspace_dir "${WORKSPACE_DIR_IN_DOCKER}" \ - --pages "${PAGES}" \ - --singularity_wrapper "${APPTAINER_CMD}" \ - --cpus "${CPUS}" \ - --ram "${RAM}" \ - --forks "${FORKS}" fi + eval "$NF_RUN_COMMAND" case $? in 0) echo "The nextflow workflow execution has finished successfully" ;; @@ -188,7 +227,7 @@ execute_nextflow_workflow() { list_file_groups_from_workspace() { all_file_groups=() - mapfile -t all_file_groups < <(apptainer exec --bind "${BIND_WORKSPACE_DIR}" "${SIF_PATH_IN_NODE}" ocrd workspace -d "${WORKSPACE_DIR_IN_DOCKER}" list-group) + mapfile -t all_file_groups < <($LIST_FILE_GROUPS_COMMAND) file_groups_length=${#all_file_groups[@]} echo -n "File groups: " for file_group in "${all_file_groups[@]}" @@ -201,9 +240,8 @@ list_file_groups_from_workspace() { remove_file_group_from_workspace() { echo "Removing file group: $1" - apptainer exec --bind "${BIND_WORKSPACE_DIR}" "${SIF_PATH_IN_NODE}" \ - ocrd workspace -d "${WORKSPACE_DIR_IN_DOCKER}" remove-group -r -f "$1" \ - > "${WORKSPACE_DIR}/remove_file_groups.log" 2>&1 + REMOVE_FILE_GROUP_COMMAND="${REMOVE_FILE_GROUP_COMMAND//FILE_GROUP_PLACEHOLDER/$1}" + eval "$REMOVE_FILE_GROUP_COMMAND" } remove_file_groups_from_workspace() { @@ -244,7 +282,9 @@ zip_results() { # Main loop for workflow job execution check_existence_of_paths unzip_workflow_job_dir -transfer_requirements_to_node_storage +echo "" +transfer_to_node_storage_processor_models +transfer_to_node_storage_processor_images start_mets_server "$USE_METS_SERVER" execute_nextflow_workflow "$USE_METS_SERVER" stop_mets_server "$USE_METS_SERVER" diff --git a/src/utils/operandi_utils/hpc/constants.py b/src/utils/operandi_utils/hpc/constants.py index 1d7ef807..156cd45a 100644 --- a/src/utils/operandi_utils/hpc/constants.py +++ b/src/utils/operandi_utils/hpc/constants.py @@ -68,8 +68,9 @@ HPC_JOB_DEADLINE_TIME_REGULAR = "48:00:00" HPC_JOB_DEADLINE_TIME_TEST = "00:30:00" -HPC_NHR_JOB_DEFAULT_PARTITION = "standard96s:shared" -HPC_NHR_JOB_TEST_PARTITION = "standard96s:shared" +# TODO: Use again "standard96s:shared" +HPC_NHR_JOB_DEFAULT_PARTITION = "standard96:shared" +HPC_NHR_JOB_TEST_PARTITION = "standard96:shared" # Check here: https://docs.hpc.gwdg.de/getting_started/transition/index.html HPC_JOB_QOS_SHORT = "2h" diff --git a/src/utils/operandi_utils/hpc/nhr_connector.py b/src/utils/operandi_utils/hpc/nhr_connector.py index 8d549bd4..6ea693bf 100644 --- a/src/utils/operandi_utils/hpc/nhr_connector.py +++ b/src/utils/operandi_utils/hpc/nhr_connector.py @@ -33,9 +33,10 @@ def __init__( self._ssh_reconnect_tries = 5 self._ssh_reconnect_tries_remaining = self._ssh_reconnect_tries # TODO: Make the sub cluster options selectable - self.project_root_dir = join(HPC_NHR_CLUSTERS["EmmyPhase2"]["scratch-emmy-hdd"], project_env) - self.batch_scripts_dir = join(self.project_root_dir, "batch_scripts") - self.slurm_workspaces_dir = join(self.project_root_dir, "slurm_workspaces") + self.project_root_dir: str = HPC_NHR_CLUSTERS["EmmyPhase2"]["scratch-emmy-hdd"] + self.project_root_dir_with_env: str = join(self.project_root_dir, project_env) + self.batch_scripts_dir: str = join(self.project_root_dir, project_env, "batch_scripts") + self.slurm_workspaces_dir: str = join(self.project_root_dir, project_env, "slurm_workspaces") @property def ssh_client(self): diff --git a/src/utils/operandi_utils/hpc/nhr_executor.py b/src/utils/operandi_utils/hpc/nhr_executor.py index d450cbc6..90f4c4c0 100644 --- a/src/utils/operandi_utils/hpc/nhr_executor.py +++ b/src/utils/operandi_utils/hpc/nhr_executor.py @@ -1,5 +1,6 @@ from json import dumps from logging import getLogger +from os.path import join from pathlib import Path from time import sleep @@ -10,6 +11,7 @@ ) from .nhr_connector import NHRConnector + class NHRExecutor(NHRConnector): def __init__(self) -> None: logger = getLogger(name=self.__class__.__name__) @@ -58,19 +60,38 @@ def trigger_slurm_job( "qos": qos, "batch_script_path": HPC_BATCH_SUBMIT_WORKFLOW_JOB } + + hpc_workflow_job_dir = join(self.slurm_workspaces_dir, workflow_job_id) + hpc_nf_script_path = join(self.slurm_workspaces_dir, workflow_job_id, nextflow_script_id) + hpc_workspace_dir = join(self.slurm_workspaces_dir, workflow_job_id, workspace_id) + + # NODE_PATH_OCRD_MODELS_PLACEHOLDER and NODE_PATH_SIF_PLACEHOLDER are just placeholders to be replaced + # with actual paths that are dynamically allocated inside the node that runs the HPC slurm job + ph_node_dir_ocrd_models = "PH_NODE_DIR_OCRD_MODELS" + ph_node_sif_path_ocrd_all = "PH_NODE_SIF_PATH_OCRD_ALL" + + nf_run_command = self.cmd_nextflow_run( + hpc_nf_script_path=hpc_nf_script_path, hpc_ws_dir=hpc_workspace_dir, + bind_ocrd_models=f"{ph_node_dir_ocrd_models}/ocrd-resources:/usr/local/share/ocrd-resources", + ph_sif_ocrd_all=ph_node_sif_path_ocrd_all, input_file_grp=input_file_grp, mets_basename=mets_basename, + use_mets_server=use_mets_server, ws_pages_amount=ws_pages_amount, cpus=cpus, ram=ram, forks=nf_process_forks + ) + regular_args = { + "project_base_dir": self.project_root_dir, "scratch_base_dir": self.slurm_workspaces_dir, + "ocrd_processor_images": "ocrd_all_maximum_image.sif,dummy1.sif,dummy2.sif,dummy3.sif", "workflow_job_id": workflow_job_id, - "nextflow_script_id": nextflow_script_id, - "input_file_group": input_file_grp, "workspace_id": workspace_id, - "mets_basename": mets_basename, - "cpus": cpus, - "ram": ram, - "nf_process_forks": nf_process_forks, - "ws_pages_amount": ws_pages_amount, "use_mets_server_bash_flag": use_mets_server_bash_flag, - "file_groups_to_remove": file_groups_to_remove + "file_groups_to_remove": file_groups_to_remove, + "hpc_workflow_job_dir": hpc_workflow_job_dir, + "hpc_workspace_dir": hpc_workspace_dir, + "nf_run_command": nf_run_command, + "start_mets_server_command": self.cmd_core_start_mets_server(hpc_workspace_dir, ph_node_sif_path_ocrd_all), + "stop_mets_server_command": self.cmd_core_stop_mets_server(hpc_workspace_dir, ph_node_sif_path_ocrd_all), + "list_file_groups_command": self.cmd_core_list_file_groups(hpc_workspace_dir, ph_node_sif_path_ocrd_all), + "remove_file_group_command": self.cmd_core_remove_file_group(hpc_workspace_dir, ph_node_sif_path_ocrd_all) } command += f" '{dumps(sbatch_args)}' '{dumps(regular_args)}'" @@ -149,3 +170,52 @@ def poll_till_end_slurm_job_state(self, slurm_job_id: str, interval: int = 5, ti # Timeout reached self.logger.info("Polling slurm job status timeout reached") return False + + @staticmethod + def cmd_nextflow_run( + hpc_nf_script_path: str, hpc_ws_dir: str, bind_ocrd_models: str, ph_sif_ocrd_all: str, input_file_grp: str, + mets_basename: str, use_mets_server: bool, ws_pages_amount: int, cpus: int, ram: int, forks: int + ) -> str: + apptainer_cmd = f"apptainer exec --bind {hpc_ws_dir}:/ws_data --bind {bind_ocrd_models}" + apptainer_cmd += f" --env OCRD_METS_CACHING=false {ph_sif_ocrd_all}" + + nf_run_command = f"nextflow run {hpc_nf_script_path} -ansi-log false -with-report" + nf_run_command += f" --input_file_group {input_file_grp}" + nf_run_command += f" --mets /ws_data/{mets_basename}" + if use_mets_server: + nf_run_command += f" --mets_socket /ws_data/mets_server.sock" + nf_run_command += f" --workspace_dir /ws_data" + nf_run_command += f" --pages {ws_pages_amount}" + # Command wrapper placeholder. Each occurrence is replaced with a single quote ' to avoid json parsing errors + ph_cmd_wrapper = "PH_CMD_WRAPPER" + nf_run_command += f" --singularity_wrapper {ph_cmd_wrapper}{apptainer_cmd}{ph_cmd_wrapper}" + nf_run_command += f" --cpus {cpus}" + nf_run_command += f" --ram {ram}" + nf_run_command += f" --forks {forks}" + return nf_run_command + + @staticmethod + def cmd_core_start_mets_server(hpc_ws_dir: str, ph_sif_core: str) -> str: + command = f"apptainer exec --bind {hpc_ws_dir}:/ws_data {ph_sif_core}" + command += f" ocrd workspace -d /ws_data -U /ws_data/mets_server.sock server start" + command += f" > {hpc_ws_dir}/mets_server.log 2>&1 &" + return command + + @staticmethod + def cmd_core_stop_mets_server(hpc_ws_dir: str, ph_sif_core: str) -> str: + command = f"apptainer exec --bind {hpc_ws_dir}:/ws_data {ph_sif_core}" + command += " ocrd workspace -d /ws_data -U /ws_data/mets_server.sock server stop" + return command + + @staticmethod + def cmd_core_list_file_groups(hpc_ws_dir: str, ph_sif_core: str) -> str: + command = f"apptainer exec --bind {hpc_ws_dir}:/ws_data {ph_sif_core}" + command += " ocrd workspace -d /ws_data list-group" + return command + + @staticmethod + def cmd_core_remove_file_group(hpc_ws_dir: str, ph_sif_core: str) -> str: + command = f"apptainer exec --bind {hpc_ws_dir}:/ws_data {ph_sif_core}" + command += " ocrd workspace -d /ws_data remove-group -r -f FILE_GROUP_PLACEHOLDER" + command += f" > {hpc_ws_dir}/remove_file_groups.log 2>&1" + return command diff --git a/tests/tests_utils/test_3_hpc/test_1_nhr_executor.py b/tests/tests_utils/test_3_hpc/_test_1_nhr_executor.py similarity index 100% rename from tests/tests_utils/test_3_hpc/test_1_nhr_executor.py rename to tests/tests_utils/test_3_hpc/_test_1_nhr_executor.py diff --git a/tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py b/tests/tests_utils/test_3_hpc/_test_2_nhr_transfer.py similarity index 100% rename from tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py rename to tests/tests_utils/test_3_hpc/_test_2_nhr_transfer.py From 7317db34026c5d535b905c0ebf6e322dffe0174f Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Tue, 26 Nov 2024 14:58:35 +0100 Subject: [PATCH 04/16] add executable to image mapping --- src/utils/operandi_utils/constants.py | 75 +++++++++++++++++++ .../batch_check_ocrd_all_version.sh | 0 .../batch_create_ocrd_all_maximum_sif.sh | 2 +- .../batch_create_ocrd_slim_sif_images.sh | 64 ++++++++++++++++ .../batch_download_ocrd_all_models.sh | 2 +- .../wrapper_check_workflow_job_status.sh | 0 .../wrapper_submit_workflow_job.sh | 0 7 files changed, 141 insertions(+), 2 deletions(-) mode change 100644 => 100755 src/utils/operandi_utils/hpc/batch_scripts/batch_check_ocrd_all_version.sh create mode 100755 src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh mode change 100644 => 100755 src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh mode change 100644 => 100755 src/utils/operandi_utils/hpc/batch_scripts/wrapper_check_workflow_job_status.sh mode change 100644 => 100755 src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh diff --git a/src/utils/operandi_utils/constants.py b/src/utils/operandi_utils/constants.py index 542fcdaf..57fe6e17 100644 --- a/src/utils/operandi_utils/constants.py +++ b/src/utils/operandi_utils/constants.py @@ -165,3 +165,78 @@ class StateWorkspace(str, Enum): TRANSFERRING_TO_HPC = "TRANSFERRING_TO_HPC" TRANSFERRING_FROM_HPC = "TRANSFERRING_FROM_HPC" UNSET = "UNSET" + +# TODO: Find a more optimal way of achieving this dynamically +OCRD_PROCESSOR_NAME_TO_IMAGE = { + "ocrd": "ocrd_core.sif", + "ocrd-tesserocr-crop": "ocrd_tesserocr.sif", + "ocrd-tesserocr-deskew": "ocrd_tesserocr.sif", + "ocrd-tesserocr-recognize": "ocrd_tesserocr.sif", + "ocrd-tesserocr-segment": "ocrd_tesserocr.sif", + "ocrd-tesserocr-segment-line": "ocrd_tesserocr.sif", + "ocrd-tesserocr-segment-region": "ocrd_tesserocr.sif", + "ocrd-tesserocr-segment-table": "ocrd_tesserocr.sif", + "ocrd-tesserocr-segment-word": "ocrd_tesserocr.sif", + "ocrd-tesserocr-fontshape": "ocrd_tesserocr.sif", + "ocrd-tesserocr-binarize": "ocrd_tesserocr.sif", + "ocrd-cis-ocropy-binarize": "ocrd_cis.sif", + "ocrd-cis-ocropy-denoise": "ocrd_cis.sif", + "ocrd-cis-ocropy-deskew": "ocrd_cis.sif", + "ocrd-cis-ocropy-dewarp": "ocrd_cis.sif", + "ocrd-cis-ocropy-segment": "ocrd_cis.sif", + "ocrd-cis-ocropy-resegment": "ocrd_cis.sif", + "ocrd-cis-ocropy-clip": "ocrd_cis.sif", + "ocrd-cis-ocropy-recognize": "ocrd_cis.sif", + "ocrd-cis-ocropy-train": "ocrd_cis.sif", + "ocrd-cis-align": "ocrd_cis.sif", + "ocrd-cis-postcorrect": "ocrd_cis.sif", + "ocrd-kraken-recognize": "ocrd_kraken.sif", + "ocrd-kraken-segment": "ocrd_kraken.sif", + "ocrd-kraken-binarize": "ocrd_kraken.sif", + "ocrd-preprocess-image": "ocrd_wrap.sif", + "ocrd-skimage-normalize": "ocrd_wrap.sif", + "ocrd-skimage-binarize": "ocrd_wrap.sif", + "ocrd-skimage-denoise": "ocrd_wrap.sif", + "ocrd-skimage-denoise-raw": "ocrd_wrap.sif", + "ocrd-calamari-recognize": "ocrd_calamari.sif", + "ocrd-olena-binarize": "ocrd_olena.sif", + "ocrd-dinglehopper": "ocrd_dinglehopper.sif", + "ocrd-eynollah-segment": "ocrd_eynollah.sif", + "ocrd-fileformat-transform": "ocrd_fileformat.sif", + "ocrd-nmalign-merge": "ocrd_nmalign.sif", + "ocrd-segment-extract-glyphs": "ocrd_segment.sif", + "ocrd-segment-extract-lines": "ocrd_segment.sif", + "ocrd-segment-extract-pages": "ocrd_segment.sif", + "ocrd-segment-extract-regions": "ocrd_segment.sif", + "ocrd-segment-extract-words": "ocrd_segment.sif", + "ocrd-segment-from-coco": "ocrd_segment.sif", + "ocrd-segment-from-masks": "ocrd_segment.sif", + "ocrd-segment-project": "ocrd_segment.sif", + "ocrd-segment-repair": "ocrd_segment.sif", + "ocrd-segment-replace-original": "ocrd_segment.sif", + "ocrd-segment-replace-page": "ocrd_segment.sif", + "ocrd-segment-replace-text": "ocrd_segment.sif", + "ocrd-segment-evaluate": "ocrd_segment.sif", + "ocrd-anybaseocr-dewarp": "ocrd_anybaseocr.sif", + "ocrd-anybaseocr-crop": "ocrd_anybaseocr.sif", + "ocrd-anybaseocr-binarize": "ocrd_anybaseocr.sif", + "ocrd-anybaseocr-layout-analysis": "ocrd_anybaseocr.sif", + "ocrd-anybaseocr-textline": "ocrd_anybaseocr.sif", + "ocrd-anybaseocr-tiseg": "ocrd_anybaseocr.sif", + "ocrd-anybaseocr-block-segmentation": "ocrd_anybaseocr.sif", + "ocrd-anybaseocr-deskew": "ocrd_anybaseocr.sif", + "ocrd-sbb-binarize": "ocrd_sbb_binarization.sif", + "ocrd-detectron2-segment": "ocrd_detectron2.sif", + "ocrd-froc": "ocrd_froc.sif", + "ocrd-pagetopdf": "ocrd_pagetopdf.sif", + "ocrd-keraslm-rate": "ocrd_keraslm.sif", + "ocrd-docstruct": "ocrd_docstruct.sif", + "ocrd-doxa-binarize": "ocrd_doxa.sif", + "ocrd-im6convert": "ocrd_im6convert.sif", + "ocrd-olahd-client": "ocrd_olahd-client.sif", + "ocrd-cor-asv-ann-mark": "ocrd_cor-asv-ann.sif", + "ocrd-cor-asv-ann-align": "ocrd_cor-asv-ann.sif", + "ocrd-cor-asv-ann-evaluate": "ocrd_cor-asv-ann.sif", + "ocrd-cor-asv-ann-join": "ocrd_cor-asv-ann.sif", + "ocrd-cor-asv-ann-process": "ocrd_cor-asv-ann.sif" +} diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_check_ocrd_all_version.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_check_ocrd_all_version.sh old mode 100644 new mode 100755 diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_all_maximum_sif.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_all_maximum_sif.sh index 7ac3f522..d5e68d31 100755 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_all_maximum_sif.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_all_maximum_sif.sh @@ -11,7 +11,7 @@ module purge module load apptainer hostname -/opt/slurm/etc/scripts/misc/slurm_resources +# /opt/slurm/etc/scripts/misc/slurm_resources APPTAINER_TMPDIR="$LOCAL_TMPDIR" APPTAINER_CACHE_DIR="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr" diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh new file mode 100755 index 00000000..c750316a --- /dev/null +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh @@ -0,0 +1,64 @@ +#!/bin/bash +#SBATCH --partition standard96s:shared +#SBATCH --time 4:00:00 +#SBATCH --output create_ocrd_slim_sif_images_job-%J.txt +#SBATCH --cpus-per-task 16 +#SBATCH --mem 64G + +set -e + +module purge +module load apptainer + +hostname +# /opt/slurm/etc/scripts/misc/slurm_resources + +APPTAINER_TMPDIR="$LOCAL_TMPDIR" +APPTAINER_CACHE_DIR="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_processor_sifs_tmp" + +if [ ! -d "${APPTAINER_CACHE_DIR}" ]; then + echo "Creating non-existing APPTAINER_CACHE_DIR folder" + mkdir -p "${APPTAINER_CACHE_DIR}" +fi + +cd "${APPTAINER_CACHE_DIR}" || exit +# apptainer build --disable-cache "ocrd_all_maximum_image_new.sif" "docker://ocrd/all:latest" +# apptainer exec "ocrd_all_maximum_image_new.sif" ocrd --version + +declare -a images=( +"core" +"tesserocr" +"cis" +"kraken" +"wrap" +"calamari" +"olena" +"dinglehopper" +"eynollah" +"fileformat" +"nmalign" +"segment" +"anybaseocr" +"sbb_binarization" +"detectron2" +"froc" +"pagetopdf" +"keraslm" +"docstruct" +"doxa" +"im6convert" +"olahd-client" +"cor-asv-ann" +) + +for image in "${images[@]}" +do + if [ -f "$APPTAINER_CACHE_DIR/ocrd_$image.sif" ]; then + echo "Already exists, skipping: $APPTAINER_CACHE_DIR/ocrd_$image.sif" + continue + fi + echo "Building SIF of $image" + apptainer build --disable-cache "ocrd_$image.sif" "docker://ocrd/$image:latest" + echo "Building complete: $APPTAINER_CACHE_DIR/ocrd_$image.sif" + echo "" +done diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh old mode 100644 new mode 100755 index fffdf2d5..ed2b7a88 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_download_ocrd_all_models.sh @@ -11,7 +11,7 @@ module purge module load apptainer hostname -/opt/slurm/etc/scripts/misc/slurm_resources +# /opt/slurm/etc/scripts/misc/slurm_resources # This sif file is generated with another batch script SIF_PATH="/mnt/lustre-emmy-hdd/projects/project_pwieder_ocr_nhr/ocrd_processor_sifs/ocrd_all_maximum_image.sif" diff --git a/src/utils/operandi_utils/hpc/batch_scripts/wrapper_check_workflow_job_status.sh b/src/utils/operandi_utils/hpc/batch_scripts/wrapper_check_workflow_job_status.sh old mode 100644 new mode 100755 diff --git a/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh b/src/utils/operandi_utils/hpc/batch_scripts/wrapper_submit_workflow_job.sh old mode 100644 new mode 100755 From 26834553266fcb0a02abf3b325f137a9324eb8ab Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Tue, 26 Nov 2024 15:31:01 +0100 Subject: [PATCH 05/16] remove unnecessary dummy images --- .../hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh | 7 +++++-- src/utils/operandi_utils/hpc/nhr_executor.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh index c750316a..711b264e 100755 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_create_ocrd_slim_sif_images.sh @@ -40,7 +40,6 @@ declare -a images=( "segment" "anybaseocr" "sbb_binarization" -"detectron2" "froc" "pagetopdf" "keraslm" @@ -48,6 +47,7 @@ declare -a images=( "doxa" "im6convert" "olahd-client" +"detectron2" "cor-asv-ann" ) @@ -59,6 +59,9 @@ do fi echo "Building SIF of $image" apptainer build --disable-cache "ocrd_$image.sif" "docker://ocrd/$image:latest" - echo "Building complete: $APPTAINER_CACHE_DIR/ocrd_$image.sif" + case $? in + 0) echo "Building complete: $APPTAINER_CACHE_DIR/ocrd_$image.sif" ;; + *) echo "Building failed, error code: $?" >&2 ;; + esac echo "" done diff --git a/src/utils/operandi_utils/hpc/nhr_executor.py b/src/utils/operandi_utils/hpc/nhr_executor.py index 90f4c4c0..1684dc9b 100644 --- a/src/utils/operandi_utils/hpc/nhr_executor.py +++ b/src/utils/operandi_utils/hpc/nhr_executor.py @@ -80,7 +80,7 @@ def trigger_slurm_job( regular_args = { "project_base_dir": self.project_root_dir, "scratch_base_dir": self.slurm_workspaces_dir, - "ocrd_processor_images": "ocrd_all_maximum_image.sif,dummy1.sif,dummy2.sif,dummy3.sif", + "ocrd_processor_images": "ocrd_all_maximum_image.sif", "workflow_job_id": workflow_job_id, "workspace_id": workspace_id, "use_mets_server_bash_flag": use_mets_server_bash_flag, From 1f0e1ee1edc6df905f529248d58a22a4ec27e329 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Tue, 26 Nov 2024 17:05:29 +0100 Subject: [PATCH 06/16] OtoN: assigns different wrappers per step --- src/utils/operandi_utils/oton/constants.py | 35 ++++-------- .../operandi_utils/oton/nf_block_process.py | 7 ++- .../operandi_utils/oton/nf_block_workflow.py | 6 +- .../operandi_utils/oton/nf_file_executable.py | 55 ++++++++++--------- .../oton/process_call_arguments.py | 14 ++--- tests/assets/oton/constants.py | 10 +++- .../oton/test_output_nextflow1_apptainer.nf | 34 +++++++----- ...test_output_nextflow1_apptainer_with_MS.nf | 28 ++++++---- .../oton/test_output_nextflow1_docker.nf | 34 +++++++----- .../test_output_nextflow1_docker_with_MS.nf | 28 ++++++---- .../oton/test_output_nextflow1_local.nf | 8 +++ .../test_output_nextflow1_local_with_MS.nf | 8 +++ tests/assets/oton/test_output_nextflow2.nf | 7 +++ tests/assets/oton/test_output_nextflow3.nf | 3 + tests/assets/oton/test_output_nextflow4.nf | 13 +++++ tests/tests_utils/test_2_oton/assert_utils.py | 12 ++-- ...nhr_executor.py => test_1_nhr_executor.py} | 0 ...nhr_transfer.py => test_2_nhr_transfer.py} | 0 18 files changed, 184 insertions(+), 118 deletions(-) rename tests/tests_utils/test_3_hpc/{_test_1_nhr_executor.py => test_1_nhr_executor.py} (100%) rename tests/tests_utils/test_3_hpc/{_test_2_nhr_transfer.py => test_2_nhr_transfer.py} (100%) diff --git a/src/utils/operandi_utils/oton/constants.py b/src/utils/operandi_utils/oton/constants.py index c743bd5d..80750029 100644 --- a/src/utils/operandi_utils/oton/constants.py +++ b/src/utils/operandi_utils/oton/constants.py @@ -3,6 +3,15 @@ from pkg_resources import resource_filename from operandi_utils.constants import OPERANDI_VERSION +BS: str = '{}' +SPACES = ' ' +CONST_DIR_IN: str = 'input_group' +CONST_DIR_OUT: str = 'output_group' +CONST_PAGE_RANGE: str = 'page_range' +CONST_METS_PATH: str = 'mets_path' +CONST_METS_SOCKET_PATH: str = 'mets_socket_path' +CONST_WORKSPACE_DIR: str = 'workspace_dir' + OCRD_ALL_JSON_FILE = resource_filename(__name__, 'ocrd_all_tool.json') with open(OCRD_ALL_JSON_FILE) as f: OCRD_ALL_JSON = load(f) @@ -10,11 +19,12 @@ OTON_LOG_LEVEL = environ.get("OTON_LOG_LEVEL", "INFO") OTON_LOG_FORMAT = '%(asctime)s %(levelname)s %(name)s:%(funcName)s: %(lineno)s: %(message)s' +PARAMS_KEY_ENV_WRAPPER_CMD_CORE: str = 'params.env_wrapper_cmd_core' +PARAMS_KEY_ENV_WRAPPER_CMD_STEP: str = 'params.env_wrapper_cmd_step' PARAMS_KEY_INPUT_FILE_GRP: str = 'params.input_file_group' PARAMS_KEY_METS_PATH: str = 'params.mets_path' PARAMS_KEY_METS_SOCKET_PATH: str = 'params.mets_socket_path' PARAMS_KEY_WORKSPACE_DIR: str = 'params.workspace_dir' -PARAMS_KEY_ENV_WRAPPER_CMD: str = 'params.env_wrapper_cmd' PARAMS_KEY_PAGES: str = 'params.pages' PARAMS_KEY_CPUS: str = 'params.cpus' PARAMS_KEY_RAM: str = 'params.ram' @@ -26,7 +36,7 @@ REPR_METS_PATH: str = f"""{PARAMS_KEY_METS_PATH} = "null\"""" REPR_METS_SOCKET_PATH: str = f"""{PARAMS_KEY_METS_SOCKET_PATH} = "null\"""" REPR_WORKSPACE_DIR: str = f"""{PARAMS_KEY_WORKSPACE_DIR} = "null\"""" -REPR_ENV_WRAPPER_CMD: str = f"""{PARAMS_KEY_ENV_WRAPPER_CMD} = "null\"""" +REPR_ENV_WRAPPER_CMD_CORE: str = f"""{PARAMS_KEY_ENV_WRAPPER_CMD_CORE} = "null\"""" REPR_PAGES: str = f"""{PARAMS_KEY_PAGES} = "null\"""" REPR_CPUS: str = f"""{PARAMS_KEY_CPUS} = "null\"""" REPR_RAM: str = f"""{PARAMS_KEY_RAM} = "null\"""" @@ -35,25 +45,4 @@ REPR_CPUS_PER_FORK: str = f"""{PARAMS_KEY_CPUS_PER_FORK} = ({PARAMS_KEY_CPUS}.toInteger() / {PARAMS_KEY_FORKS}.toInteger()).intValue()""" REPR_RAM_PER_FORK: str = f"""{PARAMS_KEY_RAM_PER_FORK} = sprintf("%dGB", ({PARAMS_KEY_RAM}.toInteger() / {PARAMS_KEY_FORKS}.toInteger()).intValue())""" -DIR_IN: str = 'input_group' -DIR_OUT: str = 'output_group' -PAGE_RANGE: str = 'page_range' -METS_PATH: str = 'mets_path' -METS_SOCKET_PATH: str = 'mets_socket_path' -WORKSPACE_DIR: str = 'workspace_dir' - -# Placeholders -BS: str = '{}' -PH_ENV_WRAPPER_CMD: str = f'${BS[0]}{PARAMS_KEY_ENV_WRAPPER_CMD}{BS[1]}' -PH_DIR_IN: str = f'${BS[0]}{DIR_IN}{BS[1]}' -PH_DIR_OUT: str = f'${BS[0]}{DIR_OUT}{BS[1]}' -PH_PAGE_RANGE: str = f'${BS[0]}{PAGE_RANGE}{BS[1]}' -PH_WORKSPACE_DIR: str = f'${BS[0]}{WORKSPACE_DIR}{BS[1]}' -PH_METS_PATH: str = f'${BS[0]}{METS_PATH}{BS[1]}' -PH_METS_SOCKET_PATH: str = f'${BS[0]}{METS_SOCKET_PATH}{BS[1]}' -PH_PARAMS_FORKS: str = f'${BS[0]}{PARAMS_KEY_FORKS}{BS[1]}' -PH_PARAMS_METS_PATH: str = f'${BS[0]}{PARAMS_KEY_METS_PATH}{BS[1]}' -PH_PARAMS_WORKSPACE_DIR: str = f'${BS[0]}{PARAMS_KEY_WORKSPACE_DIR}{BS[1]}' -SPACES = ' ' - WORKFLOW_COMMENT = f"// This workflow was automatically generated by the v{OPERANDI_VERSION} operandi_utils.oton module" diff --git a/src/utils/operandi_utils/oton/nf_block_process.py b/src/utils/operandi_utils/oton/nf_block_process.py index f1d20991..2f1cb832 100644 --- a/src/utils/operandi_utils/oton/nf_block_process.py +++ b/src/utils/operandi_utils/oton/nf_block_process.py @@ -1,16 +1,17 @@ from logging import getLevelName, getLogger from operandi_utils.oton.ocrd_validator import ProcessorCallArguments -from operandi_utils.oton.constants import OTON_LOG_LEVEL, PH_ENV_WRAPPER_CMD, SPACES +from operandi_utils.oton.constants import BS, OTON_LOG_LEVEL, PARAMS_KEY_ENV_WRAPPER_CMD_STEP, SPACES class NextflowBlockProcess: def __init__(self, processor_call_arguments: ProcessorCallArguments, index_pos: int, env_wrapper: bool = False): self.logger = getLogger(__name__) self.logger.setLevel(getLevelName(OTON_LOG_LEVEL)) + self.index_pos = str(index_pos) self.processor_call_arguments: ProcessorCallArguments = processor_call_arguments self.env_wrapper: bool = env_wrapper - self.nf_process_name: str = processor_call_arguments.executable.replace('-', '_') + "_" + str(index_pos) + self.nf_process_name: str = processor_call_arguments.executable.replace('-', '_') + f"_{self.index_pos}" self.directives = {} self.input_params = {} self.output_params = {} @@ -61,7 +62,7 @@ def dump_script(self, local_script: bool = False) -> str: dump += f'{SPACES}{SPACES}"""\n' dump += f'{SPACES}{SPACES}' if self.env_wrapper: - dump += f'{PH_ENV_WRAPPER_CMD} ' + dump += f'${BS[0]}{PARAMS_KEY_ENV_WRAPPER_CMD_STEP}{self.index_pos}{BS[1]} ' dump += f'{self.ocrd_command_bash_placeholders}\n' dump += f'{SPACES}{SPACES}"""\n' return dump diff --git a/src/utils/operandi_utils/oton/nf_block_workflow.py b/src/utils/operandi_utils/oton/nf_block_workflow.py index f73041fd..f6d17374 100644 --- a/src/utils/operandi_utils/oton/nf_block_workflow.py +++ b/src/utils/operandi_utils/oton/nf_block_workflow.py @@ -1,11 +1,7 @@ from logging import getLevelName, getLogger from typing import List from operandi_utils.oton.constants import ( - OTON_LOG_LEVEL, - PARAMS_KEY_WORKSPACE_DIR, - PARAMS_KEY_INPUT_FILE_GRP, - PARAMS_KEY_FORKS, - SPACES + OTON_LOG_LEVEL, PARAMS_KEY_WORKSPACE_DIR, PARAMS_KEY_INPUT_FILE_GRP, PARAMS_KEY_FORKS, SPACES ) from operandi_utils.oton.nf_block_process import NextflowBlockProcess diff --git a/src/utils/operandi_utils/oton/nf_file_executable.py b/src/utils/operandi_utils/oton/nf_file_executable.py index 6007fc0b..43a94744 100644 --- a/src/utils/operandi_utils/oton/nf_file_executable.py +++ b/src/utils/operandi_utils/oton/nf_file_executable.py @@ -3,15 +3,17 @@ from operandi_utils.oton.ocrd_validator import ProcessorCallArguments from operandi_utils.oton.constants import ( - DIR_IN, DIR_OUT, PAGE_RANGE, METS_PATH, WORKSPACE_DIR, + BS, CONST_DIR_IN, CONST_DIR_OUT, CONST_PAGE_RANGE, CONST_METS_PATH, CONST_WORKSPACE_DIR, OTON_LOG_LEVEL, PARAMS_KEY_INPUT_FILE_GRP, - PH_PARAMS_WORKSPACE_DIR, PH_PARAMS_METS_PATH, PH_PARAMS_FORKS, - PH_ENV_WRAPPER_CMD, + PARAMS_KEY_METS_PATH, + PARAMS_KEY_WORKSPACE_DIR, + PARAMS_KEY_ENV_WRAPPER_CMD_CORE, + PARAMS_KEY_ENV_WRAPPER_CMD_STEP, PARAMS_KEY_FORKS, PARAMS_KEY_CPUS_PER_FORK, PARAMS_KEY_RAM_PER_FORK, - REPR_ENV_WRAPPER_CMD, + REPR_ENV_WRAPPER_CMD_CORE, REPR_INPUT_FILE_GRP, REPR_METS_PATH, REPR_METS_SOCKET_PATH, @@ -60,16 +62,14 @@ def build_parameters(self, environment: str, with_mets_server: bool): self.nf_lines_parameters.append(REPR_FORKS_NULL) if environment == "docker": self.nf_lines_parameters.append(REPR_FORKS_NULL) - self.nf_lines_parameters.append(REPR_ENV_WRAPPER_CMD) + self.nf_lines_parameters.append(REPR_ENV_WRAPPER_CMD_CORE) if environment == "apptainer": self.nf_lines_parameters.append(REPR_CPUS) self.nf_lines_parameters.append(REPR_RAM) self.nf_lines_parameters.append(REPR_FORKS) self.nf_lines_parameters.append(REPR_CPUS_PER_FORK) self.nf_lines_parameters.append(REPR_RAM_PER_FORK) - self.nf_lines_parameters.append(REPR_ENV_WRAPPER_CMD) - - self.nf_lines_parameters.append('') + self.nf_lines_parameters.append(REPR_ENV_WRAPPER_CMD_CORE) # TODO: Refactor later def build_split_page_ranges_process(self, environment: str, with_mets_server: bool) -> NextflowBlockProcess: @@ -91,24 +91,24 @@ def build_split_page_ranges_process(self, environment: str, with_mets_server: bo PH_RANGE_MULTIPLIER = '${range_multiplier}' bash_cmd_ocrd_ws = ( - f"ocrd workspace -d {PH_PARAMS_WORKSPACE_DIR} list-page -f comma-separated " - f"-D {PH_PARAMS_FORKS} -C {PH_RANGE_MULTIPLIER}" + f"ocrd workspace -d ${BS[0]}{PARAMS_KEY_WORKSPACE_DIR}{BS[1]} list-page -f comma-separated " + f"-D ${BS[0]}{PARAMS_KEY_FORKS}{BS[1]} -C {PH_RANGE_MULTIPLIER}" ) - bash_cmd_copy_mets_chunk = f"cp -p {PH_PARAMS_METS_PATH} \\$mets_file_chunk" + bash_cmd_copy_mets_chunk = f"cp -p ${BS[0]}{PARAMS_KEY_METS_PATH}{BS[1]} \\$mets_file_chunk" script = f'{SPACES}{SPACES}"""\n{SPACES}{SPACES}' script += f"current_range_pages=\\$(" if environment == "apptainer" or environment == "docker": - script += f"{PH_ENV_WRAPPER_CMD} " + script += f"${BS[0]}{PARAMS_KEY_ENV_WRAPPER_CMD_CORE}{BS[1]} " script += f"{bash_cmd_ocrd_ws})\n" script += f'{SPACES}{SPACES}echo "Current range is: \\$current_range_pages"\n' if not with_mets_server: - script += f"{SPACES}{SPACES}mets_file_chunk=\\$(echo {PH_PARAMS_WORKSPACE_DIR}/mets_{PH_RANGE_MULTIPLIER}.xml)\n" + script += f"{SPACES}{SPACES}mets_file_chunk=\\$(echo ${BS[0]}{PARAMS_KEY_WORKSPACE_DIR}{BS[1]}/mets_{PH_RANGE_MULTIPLIER}.xml)\n" script += f'{SPACES}{SPACES}echo "Mets file chunk path: \\$mets_file_chunk"\n' script += f"{SPACES}{SPACES}\\$(" if environment == "apptainer" or environment == "docker": - script += f"{PH_ENV_WRAPPER_CMD} " + script += f"${BS[0]}{PARAMS_KEY_ENV_WRAPPER_CMD_CORE}{BS[1]} " script += f"{bash_cmd_copy_mets_chunk})\n" script += f'{SPACES}{SPACES}"""\n' block.script = script @@ -135,15 +135,16 @@ def build_merge_mets_process(self, environment: str) -> NextflowBlockProcess: PH_METS_FILE_CHUNK = "${mets_file_chunk}" PH_PAGE_RANGE = "${page_range}" bash_cmd_ocrd_ws = ( - f"ocrd workspace -d {PH_PARAMS_WORKSPACE_DIR} merge --force --no-copy-files {PH_METS_FILE_CHUNK} " + f"ocrd workspace -d ${BS[0]}{PARAMS_KEY_WORKSPACE_DIR}{BS[1]} " + f"merge --force --no-copy-files {PH_METS_FILE_CHUNK} " f"--page-id {PH_PAGE_RANGE}" ) script = f'{SPACES}{SPACES}"""\n{SPACES}{SPACES}' if environment == "apptainer" or environment == "docker": - script += f"{PH_ENV_WRAPPER_CMD} " + script += f"${BS[0]}{PARAMS_KEY_ENV_WRAPPER_CMD_CORE}{BS[1]} " script += f"{bash_cmd_ocrd_ws}\n{SPACES}{SPACES}" if environment == "apptainer" or environment == "docker": - script += f"{PH_ENV_WRAPPER_CMD} " + script += f"${BS[0]}{PARAMS_KEY_ENV_WRAPPER_CMD_CORE}{BS[1]} " script += f"rm {PH_METS_FILE_CHUNK}\n" script += f'{SPACES}{SPACES}"""\n' block.script = script @@ -168,15 +169,16 @@ def build_nextflow_processes( nf_process_block.add_directive(directive='memory', value=PARAMS_KEY_RAM_PER_FORK) # Add Nextflow process parameters - nf_process_block.add_parameter_input(parameter=METS_PATH, parameter_type='val') - nf_process_block.add_parameter_input(parameter=PAGE_RANGE, parameter_type='val') - nf_process_block.add_parameter_input(parameter=WORKSPACE_DIR, parameter_type='val') - nf_process_block.add_parameter_input(parameter=DIR_IN, parameter_type='val') - nf_process_block.add_parameter_input(parameter=DIR_OUT, parameter_type='val') - - nf_process_block.add_parameter_output(parameter=METS_PATH, parameter_type='val') - nf_process_block.add_parameter_output(parameter=PAGE_RANGE, parameter_type='val') - nf_process_block.add_parameter_output(parameter=WORKSPACE_DIR, parameter_type='val') + nf_process_block.add_parameter_input(parameter=CONST_METS_PATH, parameter_type='val') + nf_process_block.add_parameter_input(parameter=CONST_PAGE_RANGE, parameter_type='val') + nf_process_block.add_parameter_input(parameter=CONST_WORKSPACE_DIR, parameter_type='val') + nf_process_block.add_parameter_input(parameter=CONST_DIR_IN, parameter_type='val') + nf_process_block.add_parameter_input(parameter=CONST_DIR_OUT, parameter_type='val') + + nf_process_block.add_parameter_output(parameter=CONST_METS_PATH, parameter_type='val') + nf_process_block.add_parameter_output(parameter=CONST_PAGE_RANGE, parameter_type='val') + nf_process_block.add_parameter_output(parameter=CONST_WORKSPACE_DIR, parameter_type='val') + self.nf_lines_parameters.append(f'{PARAMS_KEY_ENV_WRAPPER_CMD_STEP}{index} = "null"') self.nf_blocks_process.append(nf_process_block) index += 1 @@ -208,6 +210,7 @@ def produce_nextflow_file(self, output_path: str, environment: str, with_mets_se nextflow_file.write(f"{WORKFLOW_COMMENT}\n") for nextflow_line in self.nf_lines_parameters: nextflow_file.write(f'{nextflow_line}\n') + nextflow_file.write("\n") nextflow_file.write(f'{self.nf_process_split_range.file_representation(local_script=True)}\n') for block in self.nf_blocks_process: nextflow_file.write(f'{block.file_representation(local_script=False)}\n') diff --git a/src/utils/operandi_utils/oton/process_call_arguments.py b/src/utils/operandi_utils/oton/process_call_arguments.py index d9eb8005..e1f7600e 100644 --- a/src/utils/operandi_utils/oton/process_call_arguments.py +++ b/src/utils/operandi_utils/oton/process_call_arguments.py @@ -2,8 +2,8 @@ from logging import getLevelName, getLogger from typing import Optional from operandi_utils.oton.constants import ( - OCRD_ALL_JSON, OTON_LOG_LEVEL, - PH_DIR_IN, PH_DIR_OUT, PH_WORKSPACE_DIR, PH_METS_PATH, PH_METS_SOCKET_PATH, PH_PAGE_RANGE + BS, CONST_DIR_IN, CONST_DIR_OUT, CONST_WORKSPACE_DIR, CONST_METS_PATH, CONST_METS_SOCKET_PATH, + OCRD_ALL_JSON, OTON_LOG_LEVEL ) # This class is based on ocrd.task_sequence.ProcessorTask @@ -55,11 +55,11 @@ def dump_bash_form_with_placeholders(self): dump = '' dump += f'{self.executable}' if self.mets_socket_path: - dump += f' -U {PH_METS_SOCKET_PATH}' - dump += f' -w {PH_WORKSPACE_DIR}' - dump += f' -m {PH_METS_PATH}' - dump += f' -I {PH_DIR_IN}' - dump += f' -O {PH_DIR_OUT}' + dump += f' -U ${BS[0]}{CONST_METS_SOCKET_PATH}{BS[1]}' + dump += f' -w ${BS[0]}{CONST_WORKSPACE_DIR}{BS[1]}' + dump += f' -m ${BS[0]}{CONST_METS_PATH}{BS[1]}' + dump += f' -I ${BS[0]}{CONST_DIR_IN}{BS[1]}' + dump += f' -O ${BS[0]}{CONST_DIR_OUT}{BS[1]}' if self.parameters: dump += f" -p '{json_dumps(self.parameters)}'" return dump diff --git a/tests/assets/oton/constants.py b/tests/assets/oton/constants.py index e336d360..91404cda 100644 --- a/tests/assets/oton/constants.py +++ b/tests/assets/oton/constants.py @@ -118,7 +118,10 @@ PARAMETERS_DOCKER = [ 'params.forks = "4"', - 'params.env_wrapper_cmd = "null"' + 'params.env_wrapper_cmd_core = "null"', + 'params.env_wrapper_cmd_step0 = "null"', + 'params.env_wrapper_cmd_step1 = "null"', + 'params.env_wrapper_cmd_step2 = "null"', ] PARAMETERS_APPTAINER = [ @@ -127,5 +130,8 @@ 'params.forks = params.cpus', 'params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue()', 'params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue())', - 'params.env_wrapper_cmd = "null"' + 'params.env_wrapper_cmd_core = "null"', + 'params.env_wrapper_cmd_step0 = "null"', + 'params.env_wrapper_cmd_step1 = "null"', + 'params.env_wrapper_cmd_step2 = "null"', ] diff --git a/tests/assets/oton/test_output_nextflow1_apptainer.nf b/tests/assets/oton/test_output_nextflow1_apptainer.nf index b1f6420e..836b69a0 100644 --- a/tests/assets/oton/test_output_nextflow1_apptainer.nf +++ b/tests/assets/oton/test_output_nextflow1_apptainer.nf @@ -10,7 +10,15 @@ params.ram = "null" params.forks = params.cpus params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) -params.env_wrapper_cmd = "null" +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" process split_page_ranges { debug true @@ -27,11 +35,11 @@ process split_page_ranges { script: """ - current_range_pages=\$(${params.env_wrapper_cmd} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) echo "Current range is: \$current_range_pages" mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) echo "Mets file chunk path: \$mets_file_chunk" - \$(${params.env_wrapper_cmd} cp -p ${params.mets_path} \$mets_file_chunk) + \$(${params.env_wrapper_cmd_core} cp -p ${params.mets_path} \$mets_file_chunk) """ } @@ -55,7 +63,7 @@ process ocrd_cis_ocropy_binarize_0 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -79,7 +87,7 @@ process ocrd_anybaseocr_crop_1 { script: """ - ${params.env_wrapper_cmd} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step1} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -103,7 +111,7 @@ process ocrd_skimage_binarize_2 { script: """ - ${params.env_wrapper_cmd} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' + ${params.env_wrapper_cmd_step2} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' """ } @@ -127,7 +135,7 @@ process ocrd_skimage_denoise_3 { script: """ - ${params.env_wrapper_cmd} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + ${params.env_wrapper_cmd_step3} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' """ } @@ -151,7 +159,7 @@ process ocrd_tesserocr_deskew_4 { script: """ - ${params.env_wrapper_cmd} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' + ${params.env_wrapper_cmd_step4} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' """ } @@ -175,7 +183,7 @@ process ocrd_cis_ocropy_segment_5 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + ${params.env_wrapper_cmd_step5} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' """ } @@ -199,7 +207,7 @@ process ocrd_cis_ocropy_dewarp_6 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step6} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -223,7 +231,7 @@ process ocrd_calamari_recognize_7 { script: """ - ${params.env_wrapper_cmd} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' + ${params.env_wrapper_cmd_step7} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' """ } @@ -239,8 +247,8 @@ process merging_mets { script: """ - ${params.env_wrapper_cmd} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} - ${params.env_wrapper_cmd} rm ${mets_file_chunk} + ${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} + ${params.env_wrapper_cmd_core} rm ${mets_file_chunk} """ } diff --git a/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf b/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf index 8161c006..ce5de9c7 100644 --- a/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf @@ -11,7 +11,15 @@ params.ram = "null" params.forks = params.cpus params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) -params.env_wrapper_cmd = "null" +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" process split_page_ranges { debug true @@ -27,7 +35,7 @@ process split_page_ranges { script: """ - current_range_pages=\$(${params.env_wrapper_cmd} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) echo "Current range is: \$current_range_pages" """ } @@ -52,7 +60,7 @@ process ocrd_cis_ocropy_binarize_0 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -76,7 +84,7 @@ process ocrd_anybaseocr_crop_1 { script: """ - ${params.env_wrapper_cmd} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step1} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -100,7 +108,7 @@ process ocrd_skimage_binarize_2 { script: """ - ${params.env_wrapper_cmd} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' + ${params.env_wrapper_cmd_step2} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' """ } @@ -124,7 +132,7 @@ process ocrd_skimage_denoise_3 { script: """ - ${params.env_wrapper_cmd} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + ${params.env_wrapper_cmd_step3} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' """ } @@ -148,7 +156,7 @@ process ocrd_tesserocr_deskew_4 { script: """ - ${params.env_wrapper_cmd} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' + ${params.env_wrapper_cmd_step4} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' """ } @@ -172,7 +180,7 @@ process ocrd_cis_ocropy_segment_5 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + ${params.env_wrapper_cmd_step5} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' """ } @@ -196,7 +204,7 @@ process ocrd_cis_ocropy_dewarp_6 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step6} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -220,7 +228,7 @@ process ocrd_calamari_recognize_7 { script: """ - ${params.env_wrapper_cmd} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' + ${params.env_wrapper_cmd_step7} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' """ } diff --git a/tests/assets/oton/test_output_nextflow1_docker.nf b/tests/assets/oton/test_output_nextflow1_docker.nf index bbe7888d..aa10c21c 100644 --- a/tests/assets/oton/test_output_nextflow1_docker.nf +++ b/tests/assets/oton/test_output_nextflow1_docker.nf @@ -6,7 +6,15 @@ params.mets_path = "null" params.workspace_dir = "null" params.pages = "null" params.forks = "4" -params.env_wrapper_cmd = "null" +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" process split_page_ranges { debug true @@ -21,11 +29,11 @@ process split_page_ranges { script: """ - current_range_pages=\$(${params.env_wrapper_cmd} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) echo "Current range is: \$current_range_pages" mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) echo "Mets file chunk path: \$mets_file_chunk" - \$(${params.env_wrapper_cmd} cp -p ${params.mets_path} \$mets_file_chunk) + \$(${params.env_wrapper_cmd_core} cp -p ${params.mets_path} \$mets_file_chunk) """ } @@ -47,7 +55,7 @@ process ocrd_cis_ocropy_binarize_0 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -69,7 +77,7 @@ process ocrd_anybaseocr_crop_1 { script: """ - ${params.env_wrapper_cmd} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step1} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -91,7 +99,7 @@ process ocrd_skimage_binarize_2 { script: """ - ${params.env_wrapper_cmd} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' + ${params.env_wrapper_cmd_step2} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' """ } @@ -113,7 +121,7 @@ process ocrd_skimage_denoise_3 { script: """ - ${params.env_wrapper_cmd} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + ${params.env_wrapper_cmd_step3} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' """ } @@ -135,7 +143,7 @@ process ocrd_tesserocr_deskew_4 { script: """ - ${params.env_wrapper_cmd} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' + ${params.env_wrapper_cmd_step4} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' """ } @@ -157,7 +165,7 @@ process ocrd_cis_ocropy_segment_5 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + ${params.env_wrapper_cmd_step5} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' """ } @@ -179,7 +187,7 @@ process ocrd_cis_ocropy_dewarp_6 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step6} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -201,7 +209,7 @@ process ocrd_calamari_recognize_7 { script: """ - ${params.env_wrapper_cmd} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' + ${params.env_wrapper_cmd_step7} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' """ } @@ -215,8 +223,8 @@ process merging_mets { script: """ - ${params.env_wrapper_cmd} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} - ${params.env_wrapper_cmd} rm ${mets_file_chunk} + ${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} + ${params.env_wrapper_cmd_core} rm ${mets_file_chunk} """ } diff --git a/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf b/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf index e864b89a..870ea30c 100644 --- a/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf @@ -7,7 +7,15 @@ params.workspace_dir = "null" params.pages = "null" params.mets_socket_path = "null" params.forks = "4" -params.env_wrapper_cmd = "null" +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" process split_page_ranges { debug true @@ -21,7 +29,7 @@ process split_page_ranges { script: """ - current_range_pages=\$(${params.env_wrapper_cmd} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) echo "Current range is: \$current_range_pages" """ } @@ -44,7 +52,7 @@ process ocrd_cis_ocropy_binarize_0 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -66,7 +74,7 @@ process ocrd_anybaseocr_crop_1 { script: """ - ${params.env_wrapper_cmd} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step1} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -88,7 +96,7 @@ process ocrd_skimage_binarize_2 { script: """ - ${params.env_wrapper_cmd} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' + ${params.env_wrapper_cmd_step2} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' """ } @@ -110,7 +118,7 @@ process ocrd_skimage_denoise_3 { script: """ - ${params.env_wrapper_cmd} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + ${params.env_wrapper_cmd_step3} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' """ } @@ -132,7 +140,7 @@ process ocrd_tesserocr_deskew_4 { script: """ - ${params.env_wrapper_cmd} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' + ${params.env_wrapper_cmd_step4} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' """ } @@ -154,7 +162,7 @@ process ocrd_cis_ocropy_segment_5 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + ${params.env_wrapper_cmd_step5} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' """ } @@ -176,7 +184,7 @@ process ocrd_cis_ocropy_dewarp_6 { script: """ - ${params.env_wrapper_cmd} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + ${params.env_wrapper_cmd_step6} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} """ } @@ -198,7 +206,7 @@ process ocrd_calamari_recognize_7 { script: """ - ${params.env_wrapper_cmd} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' + ${params.env_wrapper_cmd_step7} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' """ } diff --git a/tests/assets/oton/test_output_nextflow1_local.nf b/tests/assets/oton/test_output_nextflow1_local.nf index d89109a5..0e2cafde 100644 --- a/tests/assets/oton/test_output_nextflow1_local.nf +++ b/tests/assets/oton/test_output_nextflow1_local.nf @@ -6,6 +6,14 @@ params.mets_path = "null" params.workspace_dir = "null" params.pages = "null" params.forks = "4" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" process split_page_ranges { debug true diff --git a/tests/assets/oton/test_output_nextflow1_local_with_MS.nf b/tests/assets/oton/test_output_nextflow1_local_with_MS.nf index 6b9f4eac..8f2655cf 100644 --- a/tests/assets/oton/test_output_nextflow1_local_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_local_with_MS.nf @@ -7,6 +7,14 @@ params.workspace_dir = "null" params.pages = "null" params.mets_socket_path = "null" params.forks = "4" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" process split_page_ranges { debug true diff --git a/tests/assets/oton/test_output_nextflow2.nf b/tests/assets/oton/test_output_nextflow2.nf index 099883e1..43a633f9 100644 --- a/tests/assets/oton/test_output_nextflow2.nf +++ b/tests/assets/oton/test_output_nextflow2.nf @@ -6,6 +6,13 @@ params.mets_path = "null" params.workspace_dir = "null" params.pages = "null" params.forks = "4" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" process split_page_ranges { debug true diff --git a/tests/assets/oton/test_output_nextflow3.nf b/tests/assets/oton/test_output_nextflow3.nf index 205cb236..eaa7ab06 100644 --- a/tests/assets/oton/test_output_nextflow3.nf +++ b/tests/assets/oton/test_output_nextflow3.nf @@ -6,6 +6,9 @@ params.mets_path = "null" params.workspace_dir = "null" params.pages = "null" params.forks = "4" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" process split_page_ranges { debug true diff --git a/tests/assets/oton/test_output_nextflow4.nf b/tests/assets/oton/test_output_nextflow4.nf index a5ef0d12..e1138699 100644 --- a/tests/assets/oton/test_output_nextflow4.nf +++ b/tests/assets/oton/test_output_nextflow4.nf @@ -6,6 +6,19 @@ params.mets_path = "null" params.workspace_dir = "null" params.pages = "null" params.forks = "4" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" +params.env_wrapper_cmd_step8 = "null" +params.env_wrapper_cmd_step9 = "null" +params.env_wrapper_cmd_step10 = "null" +params.env_wrapper_cmd_step11 = "null" +params.env_wrapper_cmd_step12 = "null" process split_page_ranges { debug true diff --git a/tests/tests_utils/test_2_oton/assert_utils.py b/tests/tests_utils/test_2_oton/assert_utils.py index 0b2fbbcf..ed197bea 100644 --- a/tests/tests_utils/test_2_oton/assert_utils.py +++ b/tests/tests_utils/test_2_oton/assert_utils.py @@ -30,8 +30,8 @@ def assert_common_features_local(nextflow_file_class): assert parameter in parameters, f"{parameter} is not in {parameters}" blocks_process = nextflow_file_class.nf_blocks_process for block in blocks_process: - assert '${params.env_wrapper_cmd}' not in block.dump_script(), \ - "${params.env_wrapper_cmd} found but should not exist in " + f"'{block.ocrd_command_bash_placeholders}'" + assert 'params.env_wrapper_cmd_step' not in block.dump_script(), \ + "params.env_wrapper_cmd_step found but should not exist in " + f"'{block.ocrd_command_bash_placeholders}'" def assert_common_features_docker(nextflow_file_class): @@ -40,8 +40,8 @@ def assert_common_features_docker(nextflow_file_class): assert parameter in parameters, f"{parameter} is not in {parameters}" blocks_process = nextflow_file_class.nf_blocks_process for block in blocks_process: - assert '${params.env_wrapper_cmd}' in block.dump_script(), \ - "${params.env_wrapper_cmd} not found but should exist in " + f"'{block.ocrd_command_bash_placeholders}'" + assert 'params.env_wrapper_cmd_step' in block.dump_script(), \ + "params.env_wrapper_cmd_step not found but should exist in " + f"'{block.ocrd_command_bash_placeholders}'" def assert_common_features_apptainer(nextflow_file_class): @@ -50,8 +50,8 @@ def assert_common_features_apptainer(nextflow_file_class): assert parameter in parameters, f"{parameter} is not in {parameters}" blocks_process = nextflow_file_class.nf_blocks_process for block in blocks_process: - assert '${params.env_wrapper_cmd}' in block.dump_script(), \ - "${params.env_wrapper_cmd} not found but should exist in " + f"'{block.ocrd_command_bash_placeholders}'" + assert 'params.env_wrapper_cmd_step' in block.dump_script(), \ + "params.env_wrapper_cmd_step not found but should exist in " + f"'{block.ocrd_command_bash_placeholders}'" def assert_compare_workflow_blocks(output_file_path, expected_wf, clean_files: bool = False): diff --git a/tests/tests_utils/test_3_hpc/_test_1_nhr_executor.py b/tests/tests_utils/test_3_hpc/test_1_nhr_executor.py similarity index 100% rename from tests/tests_utils/test_3_hpc/_test_1_nhr_executor.py rename to tests/tests_utils/test_3_hpc/test_1_nhr_executor.py diff --git a/tests/tests_utils/test_3_hpc/_test_2_nhr_transfer.py b/tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py similarity index 100% rename from tests/tests_utils/test_3_hpc/_test_2_nhr_transfer.py rename to tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py From 3762a5f45eaad5a77ea66c93139734c1d16c02fc Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Fri, 29 Nov 2024 13:31:12 +0100 Subject: [PATCH 07/16] fix: oton convert endpoint test --- tests/tests_server/test_endpoint_workflow.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/tests_server/test_endpoint_workflow.py b/tests/tests_server/test_endpoint_workflow.py index 49e68e2f..1b812806 100644 --- a/tests/tests_server/test_endpoint_workflow.py +++ b/tests/tests_server/test_endpoint_workflow.py @@ -134,7 +134,7 @@ def test_convert_txt_to_nextflow_success(operandi, auth): # Verify the status code and content assert_response_status_code(response.status_code, expected_floor=2) assert "params.mets_path" in nf_file_content - assert "params.env_wrapper" not in nf_file_content + assert "params.env_wrapper_cmd_core" not in nf_file_content assert "params.mets_socket_path" not in nf_file_content assert "merging_mets" in nf_file_content @@ -155,7 +155,7 @@ def test_convert_txt_to_nextflow_success_with_mets_server(operandi, auth): # Verify the status code and content assert_response_status_code(response.status_code, expected_floor=2) assert "params.mets_path" in nf_file_content - assert "params.env_wrapper" not in nf_file_content + assert "params.env_wrapper_cmd_core" not in nf_file_content assert "params.mets_socket_path" in nf_file_content assert "merging_mets" not in nf_file_content @@ -207,7 +207,7 @@ def test_convert_txt_to_nextflow_docker_success(operandi, auth): nf_file_content = response.content.decode('utf-8') assert_response_status_code(response.status_code, expected_floor=2) assert "params.mets_path" in nf_file_content - assert "params.env_wrapper" in nf_file_content + assert "params.env_wrapper_cmd_core" in nf_file_content assert "params.mets_socket_path" not in nf_file_content assert "merging_mets" in nf_file_content @@ -226,6 +226,6 @@ def test_convert_txt_to_nextflow_docker_success_with_mets_server(operandi, auth) nf_file_content = response.content.decode('utf-8') assert_response_status_code(response.status_code, expected_floor=2) assert "params.mets_path" in nf_file_content - assert "params.env_wrapper" in nf_file_content + assert "params.env_wrapper_cmd_core" in nf_file_content assert "params.mets_socket_path" in nf_file_content assert "merging_mets" not in nf_file_content From f97918074d4c341762203401a5b1775c25a7ce11 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Fri, 29 Nov 2024 13:53:48 +0100 Subject: [PATCH 08/16] fix: checking whether a nf script uses mets server --- src/server/operandi_server/routers/workflow_utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/server/operandi_server/routers/workflow_utils.py b/src/server/operandi_server/routers/workflow_utils.py index 94c2e6e2..49d2a517 100644 --- a/src/server/operandi_server/routers/workflow_utils.py +++ b/src/server/operandi_server/routers/workflow_utils.py @@ -1,9 +1,11 @@ from fastapi import HTTPException, status from pathlib import Path +from typing import List from operandi_utils.database import db_get_workflow, db_get_workflow_job from operandi_utils.database.models import DBWorkflow, DBWorkflowJob from operandi_utils.oton import OTONConverter, OCRDValidator +from operandi_utils.oton.constants import PARAMS_KEY_METS_SOCKET_PATH async def get_db_workflow_with_handling( @@ -41,7 +43,7 @@ async def get_db_workflow_job_with_handling(logger, job_id: str, check_local_exi async def nf_script_uses_mets_server_with_handling( - logger, nf_script_path: str, search_string: str = "params.mets_socket" + logger, nf_script_path: str, search_string: str = PARAMS_KEY_METS_SOCKET_PATH ) -> bool: try: with open(nf_script_path) as nf_file: From c66fcdef92bd196de4c83cbb67fe4fed3ec4f5c1 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Fri, 29 Nov 2024 16:54:42 +0100 Subject: [PATCH 09/16] another big portion of working changes --- src/broker/operandi_broker/worker.py | 14 +- .../operandi_server/routers/workflow.py | 48 +++- .../operandi_server/routers/workflow_utils.py | 28 +- src/server/operandi_server/server.py | 1 + src/utils/operandi_utils/__init__.py | 6 +- src/utils/operandi_utils/constants.py | 3 +- .../operandi_utils/database/db_workflow.py | 12 +- src/utils/operandi_utils/database/models.py | 2 + .../nextflow_workflows/default_workflow.nf | 222 ++++++++-------- .../default_workflow_with_MS.nf | 192 ++++++++------ .../hpc/nextflow_workflows/odem_workflow.nf | 240 ++++++++++-------- .../odem_workflow_with_MS.nf | 208 +++++++++------ .../hpc/nextflow_workflows/sbb_workflow.nf | 83 +++--- .../sbb_workflow_with_MS.nf | 69 +++-- .../nextflow_workflows/template_workflow.nf | 83 +++--- .../template_workflow_with_MS.nf | 69 +++-- src/utils/operandi_utils/hpc/nhr_executor.py | 27 +- .../default_workflow.txt | 9 + .../ocrd_process_workflows/odem_workflow.txt | 11 + .../ocrd_process_workflows/sbb_workflow.txt | 2 + .../template_workflow.txt | 2 + .../operandi_utils/oton/nf_block_workflow.py | 2 +- .../operandi_utils/oton/nf_file_executable.py | 6 +- src/utils/operandi_utils/utils.py | 4 +- tests/assets/oton/constants.py | 10 +- .../oton/test_output_nextflow1_apptainer.nf | 2 +- ...test_output_nextflow1_apptainer_with_MS.nf | 4 +- .../oton/test_output_nextflow1_docker.nf | 2 +- .../test_output_nextflow1_docker_with_MS.nf | 4 +- .../oton/test_output_nextflow1_local.nf | 2 +- .../test_output_nextflow1_local_with_MS.nf | 4 +- tests/assets/oton/test_output_nextflow2.nf | 2 +- tests/assets/oton/test_output_nextflow3.nf | 2 +- tests/assets/oton/test_output_nextflow4.nf | 2 +- tests/tests_server/test_endpoint_workflow.py | 17 +- .../test_3_hpc/test_1_nhr_executor.py | 8 +- .../test_3_hpc/test_2_nhr_transfer.py | 4 +- .../test_3_hpc/test_3_nhr_combined.py | 8 +- 38 files changed, 816 insertions(+), 598 deletions(-) create mode 100644 src/utils/operandi_utils/hpc/ocrd_process_workflows/default_workflow.txt create mode 100644 src/utils/operandi_utils/hpc/ocrd_process_workflows/odem_workflow.txt create mode 100644 src/utils/operandi_utils/hpc/ocrd_process_workflows/sbb_workflow.txt create mode 100644 src/utils/operandi_utils/hpc/ocrd_process_workflows/template_workflow.txt diff --git a/src/broker/operandi_broker/worker.py b/src/broker/operandi_broker/worker.py index 181614ea..5c709628 100644 --- a/src/broker/operandi_broker/worker.py +++ b/src/broker/operandi_broker/worker.py @@ -5,6 +5,7 @@ from os.path import join from pathlib import Path from sys import exit +from typing import List from operandi_utils import reconfigure_all_loggers, get_log_file_path_prefix from operandi_utils.constants import LOG_LEVEL_WORKER, StateJob, StateWorkspace @@ -111,6 +112,7 @@ def __callback(self, ch, method, properties, body): workflow_script_path = Path(workflow_db.workflow_script_path) nf_uses_mets_server = workflow_db.uses_mets_server + nf_executable_steps = workflow_db.executable_steps workspace_dir = Path(workspace_db.workspace_dir) mets_basename = workspace_db.mets_basename ws_pages_amount = workspace_db.pages_amount @@ -132,8 +134,8 @@ def __callback(self, ch, method, properties, body): workspace_dir=workspace_dir, workspace_base_mets=mets_basename, workflow_script_path=workflow_script_path, input_file_grp=input_file_grp, nf_process_forks=nf_process_forks, ws_pages_amount=ws_pages_amount, use_mets_server=nf_uses_mets_server, - file_groups_to_remove=remove_file_grps, cpus=slurm_job_cpus, ram=slurm_job_ram, - partition=slurm_job_partition + nf_executable_steps=nf_executable_steps, file_groups_to_remove=remove_file_grps, cpus=slurm_job_cpus, + ram=slurm_job_ram, partition=slurm_job_partition ) self.log.info(f"The HPC slurm job was successfully submitted") except Exception as error: @@ -200,7 +202,8 @@ def signal_handler(self, sig, frame): def prepare_and_trigger_slurm_job( self, workflow_job_id: str, workspace_id: str, workspace_dir: Path, workspace_base_mets: str, workflow_script_path: Path, input_file_grp: str, nf_process_forks: int, ws_pages_amount: int, - use_mets_server: bool, file_groups_to_remove: str, cpus: int, ram: int, partition: str + use_mets_server: bool, nf_executable_steps: List[str], file_groups_to_remove: str, cpus: int, ram: int, + partition: str ) -> str: if self.test_sbatch: job_deadline_time = HPC_JOB_DEADLINE_TIME_TEST @@ -232,8 +235,9 @@ def prepare_and_trigger_slurm_job( workflow_job_id=workflow_job_id, nextflow_script_path=workflow_script_path, workspace_id=workspace_id, mets_basename=workspace_base_mets, input_file_grp=input_file_grp, nf_process_forks=nf_process_forks, ws_pages_amount=ws_pages_amount, - use_mets_server=use_mets_server, file_groups_to_remove=file_groups_to_remove, cpus=cpus, ram=ram, - job_deadline_time=job_deadline_time, partition=partition, qos=qos) + use_mets_server=use_mets_server, nf_executable_steps=nf_executable_steps, + file_groups_to_remove=file_groups_to_remove, cpus=cpus, ram=ram, job_deadline_time=job_deadline_time, + partition=partition, qos=qos) except Exception as error: db_stats = sync_db_increase_processing_stats( find_user_id=self.current_message_user_id, pages_failed=ws_pages_amount) diff --git a/src/server/operandi_server/routers/workflow.py b/src/server/operandi_server/routers/workflow.py index ab26bf00..5a7a1121 100644 --- a/src/server/operandi_server/routers/workflow.py +++ b/src/server/operandi_server/routers/workflow.py @@ -12,11 +12,12 @@ from fastapi.security import HTTPBasic, HTTPBasicCredentials from starlette.status import HTTP_404_NOT_FOUND -from operandi_utils import get_nf_workflows_dir +from operandi_utils import get_nf_wfs_dir, get_ocrd_process_wfs_dir from operandi_utils.constants import AccountType, ServerApiTag, StateJob, StateWorkspace from operandi_utils.database import ( db_create_workflow, db_create_workflow_job, db_get_hpc_slurm_job, db_get_workflow, db_update_workspace, db_increase_processing_stats_with_handling) +from operandi_utils.oton import OTONConverter from operandi_utils.rabbitmq import ( get_connection_publisher, RABBITMQ_QUEUE_JOB_STATUSES, RABBITMQ_QUEUE_HARVESTER, RABBITMQ_QUEUE_USERS) from operandi_server.constants import ( @@ -30,7 +31,7 @@ get_db_workflow_job_with_handling, get_db_workflow_with_handling, nf_script_uses_mets_server_with_handling, - validate_oton_with_handling + validate_oton_with_handling, nf_script_executable_steps_with_handling ) from .workspace_utils import check_if_file_group_exists_with_handling, get_db_workspace_with_handling from .user import RouterUser @@ -133,10 +134,32 @@ async def _push_status_request_to_rabbitmq(self, job_id: str): self.logger.error(f"{message}, error: {error}") raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=message) - async def insert_production_workflows(self, production_workflows_dir: Path = get_nf_workflows_dir()): + async def produce_production_workflows( + self, + ocrd_process_wf_dir: Path = get_ocrd_process_wfs_dir(), + production_nf_wfs_dir: Path = get_nf_wfs_dir() + ): + oton_converter = OTONConverter() + for path in ocrd_process_wf_dir.iterdir(): + if not path.is_file(): + self.logger.info(f"Skipping non-file path: {path}") + continue + if path.suffix != '.txt': + self.logger.info(f"Skipping non .txt extension file path: {path}") + continue + # path.stem -> file_name + # path.name -> file_name.ext + output_path = Path(production_nf_wfs_dir, f"{path.stem}.nf") + oton_converter.convert_oton( + input_path=path, output_path=str(output_path), environment="apptainer", with_mets_server=False) + output_path = Path(production_nf_wfs_dir, f"{path.stem}_with_MS.nf") + oton_converter.convert_oton( + input_path=path, output_path=str(output_path), environment="apptainer", with_mets_server=True) + + async def insert_production_workflows(self, production_nf_wfs_dir: Path = get_nf_wfs_dir()): wf_detail = "Workflow provided by the Operandi Server" - self.logger.info(f"Inserting production workflows for Operandi from: {production_workflows_dir}") - for path in production_workflows_dir.iterdir(): + self.logger.info(f"Inserting production workflows for Operandi from: {production_nf_wfs_dir}") + for path in production_nf_wfs_dir.iterdir(): if not path.is_file(): self.logger.info(f"Skipping non-file path: {path}") continue @@ -150,11 +173,14 @@ async def insert_production_workflows(self, production_workflows_dir: Path = get nf_script_dest = join(workflow_dir, path.name) copyfile(src=path, dst=nf_script_dest) uses_mets_server = await nf_script_uses_mets_server_with_handling(self.logger, nf_script_dest) - self.logger.info(f"Inserting: {workflow_id}, uses_mets_server: {uses_mets_server}, script path: {nf_script_dest}") + executable_steps = await nf_script_executable_steps_with_handling(self.logger, nf_script_dest) + self.logger.info( + f"Inserting: {workflow_id}, uses_mets_server: {uses_mets_server}, script path: {nf_script_dest}") await db_create_workflow( user_id="Operandi Server", workflow_id=workflow_id, workflow_dir=workflow_dir, workflow_script_path=nf_script_dest, - workflow_script_base=path.name, uses_mets_server=uses_mets_server, details=wf_detail) + workflow_script_base=path.name, uses_mets_server=uses_mets_server, executable_steps=executable_steps, + details=wf_detail) self.production_workflows.append(workflow_id) async def list_workflows(self, auth: HTTPBasicCredentials = Depends(HTTPBasic())) -> List[WorkflowRsrc]: @@ -204,10 +230,11 @@ async def upload_workflow_script( self.logger.error(f"{message}, error: {error}") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) uses_mets_server = await nf_script_uses_mets_server_with_handling(self.logger, nf_script_dest) + executable_steps = await nf_script_executable_steps_with_handling(self.logger, nf_script_dest) db_workflow = await db_create_workflow( user_id=py_user_action.user_id, workflow_id=workflow_id, workflow_dir=workflow_dir, workflow_script_path=nf_script_dest, workflow_script_base=nextflow_script.filename, - uses_mets_server=uses_mets_server, details=details) + uses_mets_server=uses_mets_server, executable_steps=executable_steps, details=details) return WorkflowRsrc.from_db_workflow(db_workflow) async def update_workflow_script( @@ -239,10 +266,11 @@ async def update_workflow_script( self.logger.error(f"{message}, error: {error}") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) uses_mets_server = await nf_script_uses_mets_server_with_handling(self.logger, nf_script_dest) + executable_steps = await nf_script_executable_steps_with_handling(self.logger, nf_script_dest) db_workflow = await db_create_workflow( user_id=py_user_action.user_id, workflow_id=workflow_id, workflow_dir=workflow_dir, workflow_script_path=nf_script_dest, workflow_script_base=nextflow_script.filename, - uses_mets_server=uses_mets_server, details=details) + uses_mets_server=uses_mets_server, executable_steps=executable_steps, details=details) return WorkflowRsrc.from_db_workflow(db_workflow) async def get_workflow_job_status( @@ -461,4 +489,4 @@ async def convert_txt_to_nextflow( await validate_oton_with_handling(self.logger, ocrd_process_txt) await convert_oton_with_handling(self.logger, ocrd_process_txt, nf_script_dest, environment, with_mets_server) - return FileResponse(nf_script_dest, filename=f'{oton_id}.nf') + return FileResponse(nf_script_dest, filename=f'{oton_id}.nf', media_type="application/txt-file") diff --git a/src/server/operandi_server/routers/workflow_utils.py b/src/server/operandi_server/routers/workflow_utils.py index 49d2a517..39a837fa 100644 --- a/src/server/operandi_server/routers/workflow_utils.py +++ b/src/server/operandi_server/routers/workflow_utils.py @@ -41,7 +41,6 @@ async def get_db_workflow_job_with_handling(logger, job_id: str, check_local_exi raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message) return db_workflow_job - async def nf_script_uses_mets_server_with_handling( logger, nf_script_path: str, search_string: str = PARAMS_KEY_METS_SOCKET_PATH ) -> bool: @@ -58,6 +57,33 @@ async def nf_script_uses_mets_server_with_handling( logger.error(f"{message}, error: {error}") raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=message) +async def nf_script_executable_steps_with_handling(logger, nf_script_path: str) -> List[str]: + processor_executables: List[str] = [] + try: + with open(nf_script_path) as nf_file: + line = nf_file.readline() + for word in line.split(' '): + if "ocrd-" in word: + processor_executables.append(word) + break + except Exception as error: + message = "Failed to identify processor executables in the provided Nextflow workflow." + logger.error(f"{message}, error: {error}") + raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=message) + + """ + apptainer_images: List[str] = [] + try: + for executable in processor_executables: + apptainer_images.append(OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE[executable]) + except Exception as error: + message = "Failed to produce apptainer image names from the processor executables list" + logger.error(f"{message}, error: {error}") + raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=message) + return apptainer_images + """ + + return processor_executables async def validate_oton_with_handling(logger, ocrd_process_txt_path: str): try: diff --git a/src/server/operandi_server/server.py b/src/server/operandi_server/server.py index c413cf2f..6c8eb873 100644 --- a/src/server/operandi_server/server.py +++ b/src/server/operandi_server/server.py @@ -114,6 +114,7 @@ async def include_webapi_routers(self): self.include_router(RouterDiscovery().router) self.include_router(RouterUser().router) workflow_router = RouterWorkflow() + await workflow_router.produce_production_workflows() await workflow_router.insert_production_workflows() self.include_router(workflow_router.router) self.include_router(RouterWorkspace().router) diff --git a/src/utils/operandi_utils/__init__.py b/src/utils/operandi_utils/__init__.py index 6cbc04c8..0fd74c2f 100644 --- a/src/utils/operandi_utils/__init__.py +++ b/src/utils/operandi_utils/__init__.py @@ -4,7 +4,8 @@ "is_url_responsive", "generate_id", "get_log_file_path_prefix", - "get_nf_workflows_dir", + "get_nf_wfs_dir", + "get_ocrd_process_wfs_dir", "make_zip_archive", "receive_file", "reconfigure_all_loggers", @@ -25,7 +26,8 @@ download_mets_file, is_url_responsive, generate_id, - get_nf_workflows_dir, + get_nf_wfs_dir, + get_ocrd_process_wfs_dir, receive_file, make_zip_archive, unpack_zip_archive, diff --git a/src/utils/operandi_utils/constants.py b/src/utils/operandi_utils/constants.py index 57fe6e17..c38f33cb 100644 --- a/src/utils/operandi_utils/constants.py +++ b/src/utils/operandi_utils/constants.py @@ -18,6 +18,7 @@ "LOG_LEVEL_SERVER", "LOG_LEVEL_WORKER", "MODULE_TYPES", + "OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE", "OLA_HD_BAG_ENDPOINT", "OLA_HD_USER", "OLA_HD_PASSWORD", @@ -167,7 +168,7 @@ class StateWorkspace(str, Enum): UNSET = "UNSET" # TODO: Find a more optimal way of achieving this dynamically -OCRD_PROCESSOR_NAME_TO_IMAGE = { +OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE = { "ocrd": "ocrd_core.sif", "ocrd-tesserocr-crop": "ocrd_tesserocr.sif", "ocrd-tesserocr-deskew": "ocrd_tesserocr.sif", diff --git a/src/utils/operandi_utils/database/db_workflow.py b/src/utils/operandi_utils/database/db_workflow.py index 03e0d074..b012c16f 100644 --- a/src/utils/operandi_utils/database/db_workflow.py +++ b/src/utils/operandi_utils/database/db_workflow.py @@ -1,4 +1,5 @@ from datetime import datetime +from typing import List from operandi_utils import call_sync from .models import DBWorkflow @@ -6,7 +7,7 @@ # TODO: This also updates to satisfy the PUT method in the Workflow Manager - fix this async def db_create_workflow( user_id: str, workflow_id: str, workflow_dir: str, workflow_script_base: str, workflow_script_path: str, - uses_mets_server: bool, details: str = "Workflow" + uses_mets_server: bool, executable_steps: List[str], details: str = "Workflow" ) -> DBWorkflow: try: db_workflow = await db_get_workflow(workflow_id) @@ -18,6 +19,7 @@ async def db_create_workflow( workflow_script_base=workflow_script_base, workflow_script_path=workflow_script_path, uses_mets_server=uses_mets_server, + executable_steps=executable_steps, datetime=datetime.now(), details=details ) @@ -28,6 +30,7 @@ async def db_create_workflow( db_workflow.workflow_script_base = workflow_script_base db_workflow.workflow_script_path = workflow_script_path db_workflow.uses_mets_server = uses_mets_server + db_workflow.executable_steps = executable_steps db_workflow.details = details await db_workflow.save() return db_workflow @@ -36,10 +39,11 @@ async def db_create_workflow( @call_sync async def sync_db_create_workflow( user_id: str, workflow_id: str, workflow_dir: str, workflow_script_base: str, workflow_script_path: str, - uses_mets_server: bool, details: str = "Workflow" + uses_mets_server: bool, executable_steps: List[str], details: str = "Workflow" ) -> DBWorkflow: return await db_create_workflow( - user_id, workflow_id, workflow_dir, workflow_script_base, workflow_script_path, uses_mets_server, details) + user_id, workflow_id, workflow_dir, workflow_script_base, workflow_script_path, uses_mets_server, + executable_steps, details) async def db_get_workflow(workflow_id: str) -> DBWorkflow: @@ -70,6 +74,8 @@ async def db_update_workflow(find_workflow_id: str, **kwargs) -> DBWorkflow: db_workflow.workflow_script_path = value elif key == "uses_mets_server": db_workflow.uses_mets_server = value + elif key == "executable_steps": + db_workflow.executable_steps = value elif key == "deleted": db_workflow.deleted = value elif key == "details": diff --git a/src/utils/operandi_utils/database/models.py b/src/utils/operandi_utils/database/models.py index d587e589..d885df0e 100644 --- a/src/utils/operandi_utils/database/models.py +++ b/src/utils/operandi_utils/database/models.py @@ -102,6 +102,7 @@ class DBWorkflow(Document): workflow_script_base The name of the nextflow script file workflow_script_path Nextflow workflow file full path on the server uses_mets_server Whether the NF script forwards requests to a workspace mets server + executable_steps A list of ocrd_processor executables deleted Whether the entry has been deleted locally from the server datetime Shows the created date time of the entry details Extra user specified details about this entry @@ -112,6 +113,7 @@ class DBWorkflow(Document): workflow_script_base: str workflow_script_path: str uses_mets_server: bool + executable_steps: List[str] deleted: bool = False datetime = datetime.now() details: Optional[str] diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow.nf index ca8b2a11..b057341e 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow.nf @@ -1,250 +1,268 @@ -nextflow.enable.dsl=2 +// This workflow was automatically generated by the v2.17.0 operandi_utils.oton module +nextflow.enable.dsl = 2 -// The values are assigned inside the batch script -// Based on internal values and options provided in the request -params.input_file_group = "null" -params.mets = "null" +params.input_file_group = "OCR-D-IMG" +params.mets_path = "null" params.workspace_dir = "null" -// amount of pages of the workspace params.pages = "null" -params.singularity_wrapper = "null" params.cpus = "null" params.ram = "null" params.forks = params.cpus -// Do not pass these parameters from the caller unless you know what you are doing params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) - -log.info """\ - OPERANDI - HPC - Default Workflow - =========================================== - input_file_group : ${params.input_file_group} - mets : ${params.mets} - workspace_dir : ${params.workspace_dir} - pages : ${params.pages} - singularity_wrapper : ${params.singularity_wrapper} - cpus : ${params.cpus} - ram : ${params.ram} - forks : ${params.forks} - cpus_per_fork : ${params.cpus_per_fork} - ram_per_fork : ${params.ram_per_fork} - """ - .stripIndent() +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" process split_page_ranges { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: val range_multiplier + output: env mets_file_chunk env current_range_pages + script: - """ - current_range_pages=\$(${params.singularity_wrapper} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) - echo "Current range is: \$current_range_pages" - mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) - echo "Mets file chunk path: \$mets_file_chunk" - \$(${params.singularity_wrapper} cp -p ${params.mets} \$mets_file_chunk) - """ + """ + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) + echo "Mets file chunk path: \$mets_file_chunk" + \$(${params.env_wrapper_cmd_core} cp -p ${params.mets_path} \$mets_file_chunk) + """ } -process ocrd_cis_ocropy_binarize { +process ocrd_cis_ocropy_binarize_0 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-binarize -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } -process ocrd_anybaseocr_crop { +process ocrd_anybaseocr_crop_1 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-anybaseocr-crop -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step1} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } -process ocrd_skimage_binarize { +process ocrd_skimage_binarize_2 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-skimage-binarize -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P method "li" - """ + """ + ${params.env_wrapper_cmd_step2} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' + """ } -process ocrd_skimage_denoise { +process ocrd_skimage_denoise_3 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-skimage-denoise -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P level-of-operation "page" - """ + """ + ${params.env_wrapper_cmd_step3} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + """ } -process ocrd_tesserocr_deskew { +process ocrd_tesserocr_deskew_4 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-tesserocr-deskew -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P operation_level "page" - """ + """ + ${params.env_wrapper_cmd_step4} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' + """ } -process ocrd_cis_ocropy_segment { +process ocrd_cis_ocropy_segment_5 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-segment -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P level-of-operation "page" - """ + """ + ${params.env_wrapper_cmd_step5} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + """ } -process ocrd_cis_ocropy_dewarp { +process ocrd_cis_ocropy_dewarp_6 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-dewarp -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step6} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } -process ocrd_calamari_recognize { +process ocrd_calamari_recognize_7 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-calamari-recognize -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P checkpoint_dir "qurator-gt4histocr-1.0" - """ + """ + ${params.env_wrapper_cmd_step7} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' + """ } process merging_mets { - // Must be a single instance - modifying the main mets file + debug true maxForks 1 + cpus params.cpus_per_fork + memory params.ram_per_fork input: val mets_file_chunk val page_range + script: - """ - ${params.singularity_wrapper} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} - ${params.singularity_wrapper} rm ${mets_file_chunk} - """ + """ + ${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} + ${params.env_wrapper_cmd_core} rm ${mets_file_chunk} + """ } workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize(split_page_ranges.out[0], split_page_ranges.out[1], params.input_file_group, "OCR-D-BIN") - ocrd_anybaseocr_crop(ocrd_cis_ocropy_binarize.out[0], ocrd_cis_ocropy_binarize.out[1], "OCR-D-BIN", "OCR-D-CROP") - ocrd_skimage_binarize(ocrd_anybaseocr_crop.out[0], ocrd_anybaseocr_crop.out[1], "OCR-D-CROP", "OCR-D-BIN2") - ocrd_skimage_denoise(ocrd_skimage_binarize.out[0], ocrd_skimage_binarize.out[1], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") - ocrd_tesserocr_deskew(ocrd_skimage_denoise.out[0], ocrd_skimage_denoise.out[1], "OCR-D-BIN-DENOISE", "OCR-D-BIN-DENOISE-DESKEW") - ocrd_cis_ocropy_segment(ocrd_tesserocr_deskew.out[0], ocrd_tesserocr_deskew.out[1], "OCR-D-BIN-DENOISE-DESKEW", "OCR-D-SEG") - ocrd_cis_ocropy_dewarp(ocrd_cis_ocropy_segment.out[0], ocrd_cis_ocropy_segment.out[1], "OCR-D-SEG", "OCR-D-SEG-LINE-RESEG-DEWARP") - ocrd_calamari_recognize(ocrd_cis_ocropy_dewarp.out[0], ocrd_cis_ocropy_dewarp.out[1], "OCR-D-SEG-LINE-RESEG-DEWARP", "OCR-D-OCR") - merging_mets(ocrd_calamari_recognize.out[0], ocrd_calamari_recognize.out[1]) + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") + ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") + ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") + ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") + ocrd_tesserocr_deskew_4(ocrd_skimage_denoise_3.out[0], ocrd_skimage_denoise_3.out[1], ocrd_skimage_denoise_3.out[2], "OCR-D-BIN-DENOISE", "OCR-D-BIN-DENOISE-DESKEW") + ocrd_cis_ocropy_segment_5(ocrd_tesserocr_deskew_4.out[0], ocrd_tesserocr_deskew_4.out[1], ocrd_tesserocr_deskew_4.out[2], "OCR-D-BIN-DENOISE-DESKEW", "OCR-D-SEG") + ocrd_cis_ocropy_dewarp_6(ocrd_cis_ocropy_segment_5.out[0], ocrd_cis_ocropy_segment_5.out[1], ocrd_cis_ocropy_segment_5.out[2], "OCR-D-SEG", "OCR-D-SEG-LINE-RESEG-DEWARP") + ocrd_calamari_recognize_7(ocrd_cis_ocropy_dewarp_6.out[0], ocrd_cis_ocropy_dewarp_6.out[1], ocrd_cis_ocropy_dewarp_6.out[2], "OCR-D-SEG-LINE-RESEG-DEWARP", "OCR-D-OCR") + merging_mets(ocrd_calamari_recognize_7.out[0], ocrd_calamari_recognize_7.out[1]) } diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow_with_MS.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow_with_MS.nf index 8833a502..590ba3c4 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow_with_MS.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow_with_MS.nf @@ -1,217 +1,249 @@ -nextflow.enable.dsl=2 +// This workflow was automatically generated by the v2.17.0 operandi_utils.oton module +nextflow.enable.dsl = 2 -// The values are assigned inside the batch script -// Based on internal values and options provided in the request -params.input_file_group = "null" -params.mets = "null" -params.mets_socket = "null" +params.input_file_group = "OCR-D-IMG" +params.mets_path = "null" params.workspace_dir = "null" -// amount of pages of the workspace params.pages = "null" -params.singularity_wrapper = "null" +params.mets_socket_path = "null" params.cpus = "null" params.ram = "null" params.forks = params.cpus -// Do not pass these parameters from the caller unless you know what you are doing params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) - -log.info """\ - OPERANDI - HPC - Default Workflow with Mets Server - =========================================== - input_file_group : ${params.input_file_group} - mets : ${params.mets} - mets_socket : ${params.mets_socket} - workspace_dir : ${params.workspace_dir} - pages : ${params.pages} - singularity_wrapper : ${params.singularity_wrapper} - cpus : ${params.cpus} - ram : ${params.ram} - forks : ${params.forks} - cpus_per_fork : ${params.cpus_per_fork} - ram_per_fork : ${params.ram_per_fork} - """ - .stripIndent() +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" process split_page_ranges { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: val range_multiplier + output: + env mets_file_chunk env current_range_pages - shell: - ''' - current_range_pages=$(!{params.singularity_wrapper} ocrd workspace -d !{params.workspace_dir} list-page -f comma-separated -D !{params.forks} -C !{range_multiplier}) - echo "Current range is: $current_range_pages" - ''' + + script: + """ + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.mets_path}) + """ } -process ocrd_cis_ocropy_binarize { +process ocrd_cis_ocropy_binarize_0 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-binarize -U ${params.mets_socket} -w ${params.workspace_dir} --page-id ${page_range} -m ${params.mets} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } -process ocrd_anybaseocr_crop { +process ocrd_anybaseocr_crop_1 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-anybaseocr-crop -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step1} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } -process ocrd_skimage_binarize { +process ocrd_skimage_binarize_2 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-skimage-binarize -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P method "li" - """ + """ + ${params.env_wrapper_cmd_step2} ocrd-skimage-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"method": "li"}' + """ } -process ocrd_skimage_denoise { +process ocrd_skimage_denoise_3 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-skimage-denoise -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P level-of-operation "page" - """ + """ + ${params.env_wrapper_cmd_step3} ocrd-skimage-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + """ } -process ocrd_tesserocr_deskew { +process ocrd_tesserocr_deskew_4 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-tesserocr-deskew -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P operation_level "page" - """ + """ + ${params.env_wrapper_cmd_step4} ocrd-tesserocr-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"operation_level": "page"}' + """ } -process ocrd_cis_ocropy_segment { +process ocrd_cis_ocropy_segment_5 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-segment -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P level-of-operation "page" - """ + """ + ${params.env_wrapper_cmd_step5} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + """ } -process ocrd_cis_ocropy_dewarp { +process ocrd_cis_ocropy_dewarp_6 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-dewarp -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step6} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } -process ocrd_calamari_recognize { +process ocrd_calamari_recognize_7 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-calamari-recognize -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P checkpoint_dir "qurator-gt4histocr-1.0" - """ + """ + ${params.env_wrapper_cmd_step7} ocrd-calamari-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"checkpoint_dir": "qurator-gt4histocr-1.0"}' + """ } workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize(split_page_ranges.out, params.input_file_group, "OCR-D-BIN") - ocrd_anybaseocr_crop(ocrd_cis_ocropy_binarize.out, "OCR-D-BIN", "OCR-D-CROP") - ocrd_skimage_binarize(ocrd_anybaseocr_crop.out, "OCR-D-CROP", "OCR-D-BIN2") - ocrd_skimage_denoise(ocrd_skimage_binarize.out, "OCR-D-BIN2", "OCR-D-BIN-DENOISE") - ocrd_tesserocr_deskew(ocrd_skimage_denoise.out, "OCR-D-BIN-DENOISE", "OCR-D-BIN-DENOISE-DESKEW") - ocrd_cis_ocropy_segment(ocrd_tesserocr_deskew.out, "OCR-D-BIN-DENOISE-DESKEW", "OCR-D-SEG") - ocrd_cis_ocropy_dewarp(ocrd_cis_ocropy_segment.out, "OCR-D-SEG", "OCR-D-SEG-LINE-RESEG-DEWARP") - ocrd_calamari_recognize(ocrd_cis_ocropy_dewarp.out, "OCR-D-SEG-LINE-RESEG-DEWARP", "OCR-D-OCR") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") + ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") + ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") + ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") + ocrd_tesserocr_deskew_4(ocrd_skimage_denoise_3.out[0], ocrd_skimage_denoise_3.out[1], ocrd_skimage_denoise_3.out[2], "OCR-D-BIN-DENOISE", "OCR-D-BIN-DENOISE-DESKEW") + ocrd_cis_ocropy_segment_5(ocrd_tesserocr_deskew_4.out[0], ocrd_tesserocr_deskew_4.out[1], ocrd_tesserocr_deskew_4.out[2], "OCR-D-BIN-DENOISE-DESKEW", "OCR-D-SEG") + ocrd_cis_ocropy_dewarp_6(ocrd_cis_ocropy_segment_5.out[0], ocrd_cis_ocropy_segment_5.out[1], ocrd_cis_ocropy_segment_5.out[2], "OCR-D-SEG", "OCR-D-SEG-LINE-RESEG-DEWARP") + ocrd_calamari_recognize_7(ocrd_cis_ocropy_dewarp_6.out[0], ocrd_cis_ocropy_dewarp_6.out[1], ocrd_cis_ocropy_dewarp_6.out[2], "OCR-D-SEG-LINE-RESEG-DEWARP", "OCR-D-OCR") } diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow.nf index 57dd0b48..78c6f2c2 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow.nf @@ -1,294 +1,320 @@ -nextflow.enable.dsl=2 +// This workflow was automatically generated by the v2.17.0 operandi_utils.oton module +nextflow.enable.dsl = 2 -// The values are assigned inside the batch script -// Based on internal values and options provided in the request -params.input_file_group = "null" -params.mets = "null" +params.input_file_group = "OCR-D-IMG" +params.mets_path = "null" params.workspace_dir = "null" -// amount of pages of the workspace params.pages = "null" -params.singularity_wrapper = "null" params.cpus = "null" params.ram = "null" params.forks = params.cpus -// Do not pass these parameters from the caller unless you know what you are doing params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) - -log.info """\ - OPERANDI - HPC - Odem Workflow - =========================================== - input_file_group : ${params.input_file_group} - mets : ${params.mets} - workspace_dir : ${params.workspace_dir} - pages : ${params.pages} - singularity_wrapper : ${params.singularity_wrapper} - cpus : ${params.cpus} - ram : ${params.ram} - forks : ${params.forks} - cpus_per_fork : ${params.cpus_per_fork} - ram_per_fork : ${params.ram_per_fork} - """ - .stripIndent() +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" +params.env_wrapper_cmd_step8 = "null" +params.env_wrapper_cmd_step9 = "null" process split_page_ranges { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: val range_multiplier + output: env mets_file_chunk env current_range_pages + script: - """ - current_range_pages=\$(${params.singularity_wrapper} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) - echo "Current range is: \$current_range_pages" - mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) - echo "Mets file chunk path: \$mets_file_chunk" - \$(${params.singularity_wrapper} cp -p ${params.mets} \$mets_file_chunk) - """ + """ + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) + echo "Mets file chunk path: \$mets_file_chunk" + \$(${params.env_wrapper_cmd_core} cp -p ${params.mets_path} \$mets_file_chunk) + """ } process ocrd_cis_ocropy_binarize_0 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-binarize -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"dpi": 300}' + """ } process ocrd_anybaseocr_crop_1 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-anybaseocr-crop -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step1} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"dpi": 300}' + """ } process ocrd_cis_ocropy_denoise_2 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-denoise -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step2} ocrd-cis-ocropy-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"dpi": 300}' + """ } process ocrd_cis_ocropy_deskew_3 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-deskew -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P level-of-operation page - """ + """ + ${params.env_wrapper_cmd_step3} ocrd-cis-ocropy-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + """ } process ocrd_tesserocr_segment_region_4 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-tesserocr-segment-region -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P padding 5.0 -P find_tables false -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step4} ocrd-tesserocr-segment-region -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"padding": 5.0, "find_tables": false, "dpi": 300}' + """ } process ocrd_segment_repair_5 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-segment-repair -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P plausibilize true -P plausibilize_merge_min_overlap 0.7 - """ + """ + ${params.env_wrapper_cmd_step5} ocrd-segment-repair -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"plausibilize": true, "plausibilize_merge_min_overlap": 0.7}' + """ } process ocrd_cis_ocropy_clip_6 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-clip -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step6} ocrd-cis-ocropy-clip -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } process ocrd_cis_ocropy_segment_7 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-segment -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step7} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"dpi": 300}' + """ } process ocrd_cis_ocropy_dewarp_8 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-dewarp -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step8} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } process ocrd_tesserocr_recognize_9 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-tesserocr-recognize -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P model Fraktur - """ + """ + ${params.env_wrapper_cmd_step9} ocrd-tesserocr-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"model": "Fraktur"}' + """ } process merging_mets { - // Must be a single instance - modifying the main mets file + debug true maxForks 1 + cpus params.cpus_per_fork + memory params.ram_per_fork input: val mets_file_chunk val page_range + script: - """ - ${params.singularity_wrapper} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} - ${params.singularity_wrapper} rm ${mets_file_chunk} - """ + """ + ${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} + ${params.env_wrapper_cmd_core} rm ${mets_file_chunk} + """ } workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.input_file_group, "OCR-D-BINPAGE") - ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], "OCR-D-BINPAGE", "OCR-D-SEG-PAGE-ANYOCR") - ocrd_cis_ocropy_denoise_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], "OCR-D-SEG-PAGE-ANYOCR", "OCR-D-DENOISE-OCROPY") - ocrd_cis_ocropy_deskew_3(ocrd_cis_ocropy_denoise_2.out[0], ocrd_cis_ocropy_denoise_2.out[1], "OCR-D-DENOISE-OCROPY", "OCR-D-DESKEW-OCROPY") - ocrd_tesserocr_segment_region_4(ocrd_cis_ocropy_deskew_3.out[0], ocrd_cis_ocropy_deskew_3.out[1], "OCR-D-DESKEW-OCROPY", "OCR-D-SEG-BLOCK-TESSERACT") - ocrd_segment_repair_5(ocrd_tesserocr_segment_region_4.out[0], ocrd_tesserocr_segment_region_4.out[1], "OCR-D-SEG-BLOCK-TESSERACT", "OCR-D-SEGMENT-REPAIR") - ocrd_cis_ocropy_clip_6(ocrd_segment_repair_5.out[0], ocrd_segment_repair_5.out[1], "OCR-D-SEGMENT-REPAIR", "OCR-D-CLIP") - ocrd_cis_ocropy_segment_7(ocrd_cis_ocropy_clip_6.out[0], ocrd_cis_ocropy_clip_6.out[1], "OCR-D-CLIP", "OCR-D-SEGMENT-OCROPY") - ocrd_cis_ocropy_dewarp_8(ocrd_cis_ocropy_segment_7.out[0], ocrd_cis_ocropy_segment_7.out[1], "OCR-D-SEGMENT-OCROPY", "OCR-D-DEWARP") - ocrd_tesserocr_recognize_9(ocrd_cis_ocropy_dewarp_8.out[0], ocrd_cis_ocropy_dewarp_8.out[1], "OCR-D-DEWARP", "OCR-D-OCR") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BINPAGE") + ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BINPAGE", "OCR-D-SEG-PAGE-ANYOCR") + ocrd_cis_ocropy_denoise_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-SEG-PAGE-ANYOCR", "OCR-D-DENOISE-OCROPY") + ocrd_cis_ocropy_deskew_3(ocrd_cis_ocropy_denoise_2.out[0], ocrd_cis_ocropy_denoise_2.out[1], ocrd_cis_ocropy_denoise_2.out[2], "OCR-D-DENOISE-OCROPY", "OCR-D-DESKEW-OCROPY") + ocrd_tesserocr_segment_region_4(ocrd_cis_ocropy_deskew_3.out[0], ocrd_cis_ocropy_deskew_3.out[1], ocrd_cis_ocropy_deskew_3.out[2], "OCR-D-DESKEW-OCROPY", "OCR-D-SEG-BLOCK-TESSERACT") + ocrd_segment_repair_5(ocrd_tesserocr_segment_region_4.out[0], ocrd_tesserocr_segment_region_4.out[1], ocrd_tesserocr_segment_region_4.out[2], "OCR-D-SEG-BLOCK-TESSERACT", "OCR-D-SEGMENT-REPAIR") + ocrd_cis_ocropy_clip_6(ocrd_segment_repair_5.out[0], ocrd_segment_repair_5.out[1], ocrd_segment_repair_5.out[2], "OCR-D-SEGMENT-REPAIR", "OCR-D-CLIP") + ocrd_cis_ocropy_segment_7(ocrd_cis_ocropy_clip_6.out[0], ocrd_cis_ocropy_clip_6.out[1], ocrd_cis_ocropy_clip_6.out[2], "OCR-D-CLIP", "OCR-D-SEGMENT-OCROPY") + ocrd_cis_ocropy_dewarp_8(ocrd_cis_ocropy_segment_7.out[0], ocrd_cis_ocropy_segment_7.out[1], ocrd_cis_ocropy_segment_7.out[2], "OCR-D-SEGMENT-OCROPY", "OCR-D-DEWARP") + ocrd_tesserocr_recognize_9(ocrd_cis_ocropy_dewarp_8.out[0], ocrd_cis_ocropy_dewarp_8.out[1], ocrd_cis_ocropy_dewarp_8.out[2], "OCR-D-DEWARP", "OCR-D-OCR") merging_mets(ocrd_tesserocr_recognize_9.out[0], ocrd_tesserocr_recognize_9.out[1]) } diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow_with_MS.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow_with_MS.nf index a3235f59..240ae719 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow_with_MS.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow_with_MS.nf @@ -1,257 +1,301 @@ -nextflow.enable.dsl=2 +// This workflow was automatically generated by the v2.17.0 operandi_utils.oton module +nextflow.enable.dsl = 2 -// The values are assigned inside the batch script -// Based on internal values and options provided in the request -params.input_file_group = "null" -params.mets = "null" -params.mets_socket = "null" +params.input_file_group = "OCR-D-IMG" +params.mets_path = "null" params.workspace_dir = "null" -// amount of pages of the workspace params.pages = "null" -params.singularity_wrapper = "null" +params.mets_socket_path = "null" params.cpus = "null" params.ram = "null" params.forks = params.cpus -// Do not pass these parameters from the caller unless you know what you are doing params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) - -log.info """\ - OPERANDI - HPC - Odem Workflow with Mets Server - =========================================== - input_file_group : ${params.input_file_group} - mets : ${params.mets} - mets_socket : ${params.mets_socket} - workspace_dir : ${params.workspace_dir} - pages : ${params.pages} - singularity_wrapper : ${params.singularity_wrapper} - cpus : ${params.cpus} - ram : ${params.ram} - forks : ${params.forks} - cpus_per_fork : ${params.cpus_per_fork} - ram_per_fork : ${params.ram_per_fork} - """ - .stripIndent() +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" +params.env_wrapper_cmd_step1 = "null" +params.env_wrapper_cmd_step2 = "null" +params.env_wrapper_cmd_step3 = "null" +params.env_wrapper_cmd_step4 = "null" +params.env_wrapper_cmd_step5 = "null" +params.env_wrapper_cmd_step6 = "null" +params.env_wrapper_cmd_step7 = "null" +params.env_wrapper_cmd_step8 = "null" +params.env_wrapper_cmd_step9 = "null" process split_page_ranges { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: val range_multiplier + output: + env mets_file_chunk env current_range_pages - shell: - ''' - current_range_pages=$(!{params.singularity_wrapper} ocrd workspace -d !{params.workspace_dir} list-page -f comma-separated -D !{params.forks} -C !{range_multiplier}) - echo "Current range is: $current_range_pages" - ''' + + script: + """ + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.mets_path}) + """ } process ocrd_cis_ocropy_binarize_0 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-binarize -U ${params.mets_socket} -w ${params.workspace_dir} --page-id ${page_range} -m ${params.mets} -I ${input_group} -O ${output_group} -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"dpi": 300}' + """ } process ocrd_anybaseocr_crop_1 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-anybaseocr-crop -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step1} ocrd-anybaseocr-crop -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"dpi": 300}' + """ } process ocrd_cis_ocropy_denoise_2 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-denoise -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step2} ocrd-cis-ocropy-denoise -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"dpi": 300}' + """ } process ocrd_cis_ocropy_deskew_3 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-deskew -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P level-of-operation page - """ + """ + ${params.env_wrapper_cmd_step3} ocrd-cis-ocropy-deskew -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"level-of-operation": "page"}' + """ } process ocrd_tesserocr_segment_region_4 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-tesserocr-segment-region -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P padding 5.0 -P find_tables false -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step4} ocrd-tesserocr-segment-region -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"padding": 5.0, "find_tables": false, "dpi": 300}' + """ } process ocrd_segment_repair_5 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-segment-repair -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P plausibilize true -P plausibilize_merge_min_overlap 0.7 - """ + """ + ${params.env_wrapper_cmd_step5} ocrd-segment-repair -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"plausibilize": true, "plausibilize_merge_min_overlap": 0.7}' + """ } process ocrd_cis_ocropy_clip_6 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-clip -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step6} ocrd-cis-ocropy-clip -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } process ocrd_cis_ocropy_segment_7 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-segment -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P dpi 300 - """ + """ + ${params.env_wrapper_cmd_step7} ocrd-cis-ocropy-segment -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"dpi": 300}' + """ } process ocrd_cis_ocropy_dewarp_8 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-dewarp -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step8} ocrd-cis-ocropy-dewarp -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } process ocrd_tesserocr_recognize_9 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + output: + val mets_path val page_range + val workspace_dir script: - """ - ${params.singularity_wrapper} ocrd-tesserocr-recognize -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P model Fraktur - """ + """ + ${params.env_wrapper_cmd_step9} ocrd-tesserocr-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"model": "Fraktur"}' + """ } workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out, params.input_file_group, "OCR-D-BINPAGE") - ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out, "OCR-D-BINPAGE", "OCR-D-SEG-PAGE-ANYOCR") - ocrd_cis_ocropy_denoise_2(ocrd_anybaseocr_crop_1.out, "OCR-D-SEG-PAGE-ANYOCR", "OCR-D-DENOISE-OCROPY") - ocrd_cis_ocropy_deskew_3(ocrd_cis_ocropy_denoise_2.out, "OCR-D-DENOISE-OCROPY", "OCR-D-DESKEW-OCROPY") - ocrd_tesserocr_segment_region_4(ocrd_cis_ocropy_deskew_3.out, "OCR-D-DESKEW-OCROPY", "OCR-D-SEG-BLOCK-TESSERACT") - ocrd_segment_repair_5(ocrd_tesserocr_segment_region_4.out, "OCR-D-SEG-BLOCK-TESSERACT", "OCR-D-SEGMENT-REPAIR") - ocrd_cis_ocropy_clip_6(ocrd_segment_repair_5.out, "OCR-D-SEGMENT-REPAIR", "OCR-D-CLIP") - ocrd_cis_ocropy_segment_7(ocrd_cis_ocropy_clip_6.out, "OCR-D-CLIP", "OCR-D-SEGMENT-OCROPY") - ocrd_cis_ocropy_dewarp_8(ocrd_cis_ocropy_segment_7.out, "OCR-D-SEGMENT-OCROPY", "OCR-D-DEWARP") - ocrd_tesserocr_recognize_9(ocrd_cis_ocropy_dewarp_8.out, "OCR-D-DEWARP", "OCR-D-OCR") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BINPAGE") + ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BINPAGE", "OCR-D-SEG-PAGE-ANYOCR") + ocrd_cis_ocropy_denoise_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-SEG-PAGE-ANYOCR", "OCR-D-DENOISE-OCROPY") + ocrd_cis_ocropy_deskew_3(ocrd_cis_ocropy_denoise_2.out[0], ocrd_cis_ocropy_denoise_2.out[1], ocrd_cis_ocropy_denoise_2.out[2], "OCR-D-DENOISE-OCROPY", "OCR-D-DESKEW-OCROPY") + ocrd_tesserocr_segment_region_4(ocrd_cis_ocropy_deskew_3.out[0], ocrd_cis_ocropy_deskew_3.out[1], ocrd_cis_ocropy_deskew_3.out[2], "OCR-D-DESKEW-OCROPY", "OCR-D-SEG-BLOCK-TESSERACT") + ocrd_segment_repair_5(ocrd_tesserocr_segment_region_4.out[0], ocrd_tesserocr_segment_region_4.out[1], ocrd_tesserocr_segment_region_4.out[2], "OCR-D-SEG-BLOCK-TESSERACT", "OCR-D-SEGMENT-REPAIR") + ocrd_cis_ocropy_clip_6(ocrd_segment_repair_5.out[0], ocrd_segment_repair_5.out[1], ocrd_segment_repair_5.out[2], "OCR-D-SEGMENT-REPAIR", "OCR-D-CLIP") + ocrd_cis_ocropy_segment_7(ocrd_cis_ocropy_clip_6.out[0], ocrd_cis_ocropy_clip_6.out[1], ocrd_cis_ocropy_clip_6.out[2], "OCR-D-CLIP", "OCR-D-SEGMENT-OCROPY") + ocrd_cis_ocropy_dewarp_8(ocrd_cis_ocropy_segment_7.out[0], ocrd_cis_ocropy_segment_7.out[1], ocrd_cis_ocropy_segment_7.out[2], "OCR-D-SEGMENT-OCROPY", "OCR-D-DEWARP") + ocrd_tesserocr_recognize_9(ocrd_cis_ocropy_dewarp_8.out[0], ocrd_cis_ocropy_dewarp_8.out[1], ocrd_cis_ocropy_dewarp_8.out[2], "OCR-D-DEWARP", "OCR-D-OCR") } diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow.nf index 4230534d..b7d3a235 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow.nf @@ -1,95 +1,86 @@ -nextflow.enable.dsl=2 +// This workflow was automatically generated by the v2.17.0 operandi_utils.oton module +nextflow.enable.dsl = 2 -// The values are assigned inside the batch script -// Based on internal values and options provided in the request -params.input_file_group = "null" -params.mets = "null" +params.input_file_group = "OCR-D-IMG" +params.mets_path = "null" params.workspace_dir = "null" -// amount of pages of the workspace params.pages = "null" -params.singularity_wrapper = "null" params.cpus = "null" params.ram = "null" params.forks = params.cpus -// Do not pass these parameters from the caller unless you know what you are doing params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) - -log.info """\ - OPERANDI - HPC - SBB Workflow - =========================================== - input_file_group : ${params.input_file_group} - mets : ${params.mets} - workspace_dir : ${params.workspace_dir} - pages : ${params.pages} - singularity_wrapper : ${params.singularity_wrapper} - cpus : ${params.cpus} - ram : ${params.ram} - forks : ${params.forks} - cpus_per_fork : ${params.cpus_per_fork} - ram_per_fork : ${params.ram_per_fork} - """ - .stripIndent() +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" process split_page_ranges { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: val range_multiplier + output: env mets_file_chunk env current_range_pages + script: - """ - current_range_pages=\$(${params.singularity_wrapper} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) - echo "Current range is: \$current_range_pages" - mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) - echo "Mets file chunk path: \$mets_file_chunk" - \$(${params.singularity_wrapper} cp -p ${params.mets} \$mets_file_chunk) - """ + """ + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) + echo "Mets file chunk path: \$mets_file_chunk" + \$(${params.env_wrapper_cmd_core} cp -p ${params.mets_path} \$mets_file_chunk) + """ } -process ocrd_tesserocr_recognize { +process ocrd_tesserocr_recognize_0 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir + script: - """ - ${params.singularity_wrapper} ocrd-tesserocr-recognize -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} -P segmentation_level region -P textequiv_level word -P find_tables true -P model deu - """ + """ + ${params.env_wrapper_cmd_step0} ocrd-tesserocr-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"segmentation_level": "region", "textequiv_level": "word", "find_tables": true, "model": "deu"}' + """ } process merging_mets { - // Must be a single instance - modifying the main mets file + debug true maxForks 1 + cpus params.cpus_per_fork + memory params.ram_per_fork input: val mets_file_chunk val page_range + script: - """ - ${params.singularity_wrapper} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} - ${params.singularity_wrapper} rm ${mets_file_chunk} - """ + """ + ${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} + ${params.env_wrapper_cmd_core} rm ${mets_file_chunk} + """ } workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_tesserocr_recognize(split_page_ranges.out[0], split_page_ranges.out[1], params.input_file_group, "OCR-D-OCR") - merging_mets(ocrd_tesserocr_recognize.out[0], ocrd_tesserocr_recognize.out[1]) + ocrd_tesserocr_recognize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-OCR") + merging_mets(ocrd_tesserocr_recognize_0.out[0], ocrd_tesserocr_recognize_0.out[1]) } diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow_with_MS.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow_with_MS.nf index 9c24f34b..389772b5 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow_with_MS.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow_with_MS.nf @@ -1,74 +1,67 @@ -nextflow.enable.dsl=2 +// This workflow was automatically generated by the v2.17.0 operandi_utils.oton module +nextflow.enable.dsl = 2 -// The values are assigned inside the batch script -// Based on internal values and options provided in the request -params.input_file_group = "null" -params.mets = "null" -params.mets_socket = "null" +params.input_file_group = "OCR-D-IMG" +params.mets_path = "null" params.workspace_dir = "null" -// amount of pages of the workspace params.pages = "null" -params.singularity_wrapper = "null" +params.mets_socket_path = "null" params.cpus = "null" params.ram = "null" params.forks = params.cpus -// Do not pass these parameters from the caller unless you know what you are doing params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) - -log.info """\ - OPERANDI - HPC - SBB Workflow with Mets Server - =========================================== - input_file_group : ${params.input_file_group} - mets : ${params.mets} - mets_socket : ${params.mets_socket} - workspace_dir : ${params.workspace_dir} - pages : ${params.pages} - singularity_wrapper : ${params.singularity_wrapper} - cpus : ${params.cpus} - ram : ${params.ram} - forks : ${params.forks} - cpus_per_fork : ${params.cpus_per_fork} - ram_per_fork : ${params.ram_per_fork} - """ - .stripIndent() +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" process split_page_ranges { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: val range_multiplier + output: + env mets_file_chunk env current_range_pages - shell: - ''' - current_range_pages=$(!{params.singularity_wrapper} ocrd workspace -d !{params.workspace_dir} list-page -f comma-separated -D !{params.forks} -C !{range_multiplier}) - echo "Current range is: $current_range_pages" - ''' + + script: + """ + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.mets_path}) + """ } -process ocrd_tesserocr_recognize { +process ocrd_tesserocr_recognize_0 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + + output: + val mets_path + val page_range + val workspace_dir + script: - """ - ${params.singularity_wrapper} ocrd-tesserocr-recognize -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} -P segmentation_level region -P textequiv_level word -P find_tables true -P model deu - """ + """ + ${params.env_wrapper_cmd_step0} ocrd-tesserocr-recognize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} -p '{"segmentation_level": "region", "textequiv_level": "word", "find_tables": true, "model": "deu"}' + """ } workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_tesserocr_recognize(split_page_ranges.out[0], params.input_file_group, "OCR-D-BIN") + ocrd_tesserocr_recognize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-OCR") } diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow.nf index bb001c6d..fc01fceb 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow.nf @@ -1,95 +1,86 @@ -nextflow.enable.dsl=2 +// This workflow was automatically generated by the v2.17.0 operandi_utils.oton module +nextflow.enable.dsl = 2 -// The values are assigned inside the batch script -// Based on internal values and options provided in the request -params.input_file_group = "null" -params.mets = "null" +params.input_file_group = "OCR-D-IMG" +params.mets_path = "null" params.workspace_dir = "null" -// amount of pages of the workspace params.pages = "null" -params.singularity_wrapper = "null" params.cpus = "null" params.ram = "null" params.forks = params.cpus -// Do not pass these parameters from the caller unless you know what you are doing params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) - -log.info """\ - OPERANDI - HPC - Template Workflow - =========================================== - input_file_group : ${params.input_file_group} - mets : ${params.mets} - workspace_dir : ${params.workspace_dir} - pages : ${params.pages} - singularity_wrapper : ${params.singularity_wrapper} - cpus : ${params.cpus} - ram : ${params.ram} - forks : ${params.forks} - cpus_per_fork : ${params.cpus_per_fork} - ram_per_fork : ${params.ram_per_fork} - """ - .stripIndent() +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" process split_page_ranges { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: val range_multiplier + output: env mets_file_chunk env current_range_pages + script: - """ - current_range_pages=\$(${params.singularity_wrapper} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) - echo "Current range is: \$current_range_pages" - mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) - echo "Mets file chunk path: \$mets_file_chunk" - \$(${params.singularity_wrapper} cp -p ${params.mets} \$mets_file_chunk) - """ + """ + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.workspace_dir}/mets_${range_multiplier}.xml) + echo "Mets file chunk path: \$mets_file_chunk" + \$(${params.env_wrapper_cmd_core} cp -p ${params.mets_path} \$mets_file_chunk) + """ } -process ocrd_cis_ocropy_binarize { +process ocrd_cis_ocropy_binarize_0 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: - val mets_file_chunk + val mets_path val page_range + val workspace_dir val input_group val output_group + output: - val mets_file_chunk + val mets_path val page_range + val workspace_dir + script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-binarize -w ${params.workspace_dir} -m ${mets_file_chunk} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } process merging_mets { - // Must be a single instance - modifying the main mets file + debug true maxForks 1 + cpus params.cpus_per_fork + memory params.ram_per_fork input: val mets_file_chunk val page_range + script: - """ - ${params.singularity_wrapper} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} - ${params.singularity_wrapper} rm ${mets_file_chunk} - """ + """ + ${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} merge --force --no-copy-files ${mets_file_chunk} --page-id ${page_range} + ${params.env_wrapper_cmd_core} rm ${mets_file_chunk} + """ } workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize(split_page_ranges.out[0], split_page_ranges.out[1], params.input_file_group, "OCR-D-BIN") - merging_mets(ocrd_cis_ocropy_binarize.out[0], ocrd_cis_ocropy_binarize.out[1]) + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") + merging_mets(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1]) } diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow_with_MS.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow_with_MS.nf index d89e8ee3..be041784 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow_with_MS.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow_with_MS.nf @@ -1,74 +1,67 @@ -nextflow.enable.dsl=2 +// This workflow was automatically generated by the v2.17.0 operandi_utils.oton module +nextflow.enable.dsl = 2 -// The values are assigned inside the batch script -// Based on internal values and options provided in the request -params.input_file_group = "null" -params.mets = "null" -params.mets_socket = "null" +params.input_file_group = "OCR-D-IMG" +params.mets_path = "null" params.workspace_dir = "null" -// amount of pages of the workspace params.pages = "null" -params.singularity_wrapper = "null" +params.mets_socket_path = "null" params.cpus = "null" params.ram = "null" params.forks = params.cpus -// Do not pass these parameters from the caller unless you know what you are doing params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue() params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue()) - -log.info """\ - OPERANDI - HPC - Template Workflow with Mets Server - =========================================== - input_file_group : ${params.input_file_group} - mets : ${params.mets} - mets_socket : ${params.mets_socket} - workspace_dir : ${params.workspace_dir} - pages : ${params.pages} - singularity_wrapper : ${params.singularity_wrapper} - cpus : ${params.cpus} - ram : ${params.ram} - forks : ${params.forks} - cpus_per_fork : ${params.cpus_per_fork} - ram_per_fork : ${params.ram_per_fork} - """ - .stripIndent() +params.env_wrapper_cmd_core = "null" +params.env_wrapper_cmd_step0 = "null" process split_page_ranges { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: val range_multiplier + output: + env mets_file_chunk env current_range_pages - shell: - ''' - current_range_pages=$(!{params.singularity_wrapper} ocrd workspace -d !{params.workspace_dir} list-page -f comma-separated -D !{params.forks} -C !{range_multiplier}) - echo "Current range is: $current_range_pages" - ''' + + script: + """ + current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) + echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.mets_path}) + """ } -process ocrd_cis_ocropy_binarize { +process ocrd_cis_ocropy_binarize_0 { + debug true maxForks params.forks cpus params.cpus_per_fork memory params.ram_per_fork - debug true input: + val mets_path val page_range + val workspace_dir val input_group val output_group + + output: + val mets_path + val page_range + val workspace_dir + script: - """ - ${params.singularity_wrapper} ocrd-cis-ocropy-binarize -U ${params.mets_socket} -w ${params.workspace_dir} -m ${params.mets} --page-id ${page_range} -I ${input_group} -O ${output_group} - """ + """ + ${params.env_wrapper_cmd_step0} ocrd-cis-ocropy-binarize -w ${workspace_dir} -m ${mets_path} -I ${input_group} -O ${output_group} + """ } workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize(split_page_ranges.out[0], params.input_file_group, "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") } diff --git a/src/utils/operandi_utils/hpc/nhr_executor.py b/src/utils/operandi_utils/hpc/nhr_executor.py index 1684dc9b..b278d1ef 100644 --- a/src/utils/operandi_utils/hpc/nhr_executor.py +++ b/src/utils/operandi_utils/hpc/nhr_executor.py @@ -3,6 +3,7 @@ from os.path import join from pathlib import Path from time import sleep +from typing import List from operandi_utils.constants import StateJobSlurm from .constants import ( @@ -37,8 +38,9 @@ def execute_blocking(self, command, timeout=None, environment=None): def trigger_slurm_job( self, workflow_job_id: str, nextflow_script_path: Path, input_file_grp: str, workspace_id: str, mets_basename: str, nf_process_forks: int, ws_pages_amount: int, use_mets_server: bool, - file_groups_to_remove: str, cpus: int = 2, ram: int = 8, job_deadline_time: str = HPC_JOB_DEADLINE_TIME_TEST, - partition: str = HPC_NHR_JOB_DEFAULT_PARTITION, qos: str = HPC_JOB_QOS_DEFAULT + nf_executable_steps: List[str], file_groups_to_remove: str, cpus: int = 2, ram: int = 8, + job_deadline_time: str = HPC_JOB_DEADLINE_TIME_TEST, partition: str = HPC_NHR_JOB_DEFAULT_PARTITION, + qos: str = HPC_JOB_QOS_DEFAULT ) -> str: if ws_pages_amount < nf_process_forks: self.logger.warning( @@ -74,7 +76,8 @@ def trigger_slurm_job( hpc_nf_script_path=hpc_nf_script_path, hpc_ws_dir=hpc_workspace_dir, bind_ocrd_models=f"{ph_node_dir_ocrd_models}/ocrd-resources:/usr/local/share/ocrd-resources", ph_sif_ocrd_all=ph_node_sif_path_ocrd_all, input_file_grp=input_file_grp, mets_basename=mets_basename, - use_mets_server=use_mets_server, ws_pages_amount=ws_pages_amount, cpus=cpus, ram=ram, forks=nf_process_forks + use_mets_server=use_mets_server, nf_executable_steps=nf_executable_steps, ws_pages_amount=ws_pages_amount, + cpus=cpus, ram=ram, forks=nf_process_forks ) regular_args = { @@ -174,21 +177,27 @@ def poll_till_end_slurm_job_state(self, slurm_job_id: str, interval: int = 5, ti @staticmethod def cmd_nextflow_run( hpc_nf_script_path: str, hpc_ws_dir: str, bind_ocrd_models: str, ph_sif_ocrd_all: str, input_file_grp: str, - mets_basename: str, use_mets_server: bool, ws_pages_amount: int, cpus: int, ram: int, forks: int + mets_basename: str, use_mets_server: bool, nf_executable_steps: List[str], ws_pages_amount: int, cpus: int, + ram: int, forks: int ) -> str: - apptainer_cmd = f"apptainer exec --bind {hpc_ws_dir}:/ws_data --bind {bind_ocrd_models}" - apptainer_cmd += f" --env OCRD_METS_CACHING=false {ph_sif_ocrd_all}" - nf_run_command = f"nextflow run {hpc_nf_script_path} -ansi-log false -with-report" nf_run_command += f" --input_file_group {input_file_grp}" - nf_run_command += f" --mets /ws_data/{mets_basename}" + nf_run_command += f" --mets_path /ws_data/{mets_basename}" if use_mets_server: nf_run_command += f" --mets_socket /ws_data/mets_server.sock" nf_run_command += f" --workspace_dir /ws_data" nf_run_command += f" --pages {ws_pages_amount}" # Command wrapper placeholder. Each occurrence is replaced with a single quote ' to avoid json parsing errors + + # TODO: Send actual slim image apptainer cmds here instead of the sif_ocrd_all ph_cmd_wrapper = "PH_CMD_WRAPPER" - nf_run_command += f" --singularity_wrapper {ph_cmd_wrapper}{apptainer_cmd}{ph_cmd_wrapper}" + index = 0 + apptainer_cmd = f"apptainer exec --bind {hpc_ws_dir}:/ws_data --bind {bind_ocrd_models}" + apptainer_cmd += f" --env OCRD_METS_CACHING=false {ph_sif_ocrd_all}" + nf_run_command += f" --env_wrapper_cmd_core {ph_cmd_wrapper}{apptainer_cmd}{ph_cmd_wrapper}" + for executable_step in nf_executable_steps: + nf_run_command += f" --env_wrapper_cmd_step{index} {ph_cmd_wrapper}{apptainer_cmd}{ph_cmd_wrapper}" + index += 1 nf_run_command += f" --cpus {cpus}" nf_run_command += f" --ram {ram}" nf_run_command += f" --forks {forks}" diff --git a/src/utils/operandi_utils/hpc/ocrd_process_workflows/default_workflow.txt b/src/utils/operandi_utils/hpc/ocrd_process_workflows/default_workflow.txt new file mode 100644 index 00000000..b6374302 --- /dev/null +++ b/src/utils/operandi_utils/hpc/ocrd_process_workflows/default_workflow.txt @@ -0,0 +1,9 @@ +ocrd process \ + "cis-ocropy-binarize -I OCR-D-IMG -O OCR-D-BIN" \ + "anybaseocr-crop -I OCR-D-BIN -O OCR-D-CROP" \ + "skimage-binarize -I OCR-D-CROP -O OCR-D-BIN2 -P method li" \ + "skimage-denoise -I OCR-D-BIN2 -O OCR-D-BIN-DENOISE -P level-of-operation page" \ + "tesserocr-deskew -I OCR-D-BIN-DENOISE -O OCR-D-BIN-DENOISE-DESKEW -P operation_level page" \ + "cis-ocropy-segment -I OCR-D-BIN-DENOISE-DESKEW -O OCR-D-SEG -P level-of-operation page" \ + "cis-ocropy-dewarp -I OCR-D-SEG -O OCR-D-SEG-LINE-RESEG-DEWARP" \ + "calamari-recognize -I OCR-D-SEG-LINE-RESEG-DEWARP -O OCR-D-OCR -P checkpoint_dir qurator-gt4histocr-1.0" diff --git a/src/utils/operandi_utils/hpc/ocrd_process_workflows/odem_workflow.txt b/src/utils/operandi_utils/hpc/ocrd_process_workflows/odem_workflow.txt new file mode 100644 index 00000000..63d8e7bb --- /dev/null +++ b/src/utils/operandi_utils/hpc/ocrd_process_workflows/odem_workflow.txt @@ -0,0 +1,11 @@ +ocrd process \ + "cis-ocropy-binarize -I OCR-D-IMG -O OCR-D-BINPAGE -P dpi 300" \ + "anybaseocr-crop -I OCR-D-BINPAGE -O OCR-D-SEG-PAGE-ANYOCR -P dpi 300" \ + "cis-ocropy-denoise -I OCR-D-SEG-PAGE-ANYOCR -O OCR-D-DENOISE-OCROPY -P dpi 300" \ + "cis-ocropy-deskew -I OCR-D-DENOISE-OCROPY -O OCR-D-DESKEW-OCROPY -P level-of-operation page" \ + "tesserocr-segment-region -I OCR-D-DESKEW-OCROPY -O OCR-D-SEG-BLOCK-TESSERACT -P padding 5.0 -P find_tables false -P dpi 300" \ + "segment-repair -I OCR-D-SEG-BLOCK-TESSERACT -O OCR-D-SEGMENT-REPAIR -P plausibilize true -P plausibilize_merge_min_overlap 0.7" \ + "cis-ocropy-clip -I OCR-D-SEGMENT-REPAIR -O OCR-D-CLIP" \ + "cis-ocropy-segment -I OCR-D-CLIP -O OCR-D-SEGMENT-OCROPY -P dpi 300" \ + "cis-ocropy-dewarp -I OCR-D-SEGMENT-OCROPY -O OCR-D-DEWARP" \ + "tesserocr-recognize -I OCR-D-DEWARP -O OCR-D-OCR -P model Fraktur" diff --git a/src/utils/operandi_utils/hpc/ocrd_process_workflows/sbb_workflow.txt b/src/utils/operandi_utils/hpc/ocrd_process_workflows/sbb_workflow.txt new file mode 100644 index 00000000..49cdd8ad --- /dev/null +++ b/src/utils/operandi_utils/hpc/ocrd_process_workflows/sbb_workflow.txt @@ -0,0 +1,2 @@ +ocrd process \ + "tesserocr-recognize -I OCR-D-IMG -O OCR-D-OCR -P segmentation_level region -P textequiv_level word -P find_tables true -P model deu" diff --git a/src/utils/operandi_utils/hpc/ocrd_process_workflows/template_workflow.txt b/src/utils/operandi_utils/hpc/ocrd_process_workflows/template_workflow.txt new file mode 100644 index 00000000..2bbc6cda --- /dev/null +++ b/src/utils/operandi_utils/hpc/ocrd_process_workflows/template_workflow.txt @@ -0,0 +1,2 @@ +ocrd process \ + "cis-ocropy-binarize -I OCR-D-IMG -O OCR-D-BIN" diff --git a/src/utils/operandi_utils/oton/nf_block_workflow.py b/src/utils/operandi_utils/oton/nf_block_workflow.py index f6d17374..e37b64ec 100644 --- a/src/utils/operandi_utils/oton/nf_block_workflow.py +++ b/src/utils/operandi_utils/oton/nf_block_workflow.py @@ -37,7 +37,7 @@ def produce_workflow_calls( if previous_nfp is None: workflow_call += ( f'{nf_split_page_ranges.nf_process_name}.out[0], {nf_split_page_ranges.nf_process_name}.out[1], ' - f'{PARAMS_KEY_WORKSPACE_DIR}, {PARAMS_KEY_INPUT_FILE_GRP} "{out_file_grps}"' + f'{PARAMS_KEY_WORKSPACE_DIR}, {PARAMS_KEY_INPUT_FILE_GRP}, "{out_file_grps}"' ) else: workflow_call += ( diff --git a/src/utils/operandi_utils/oton/nf_file_executable.py b/src/utils/operandi_utils/oton/nf_file_executable.py index 43a94744..ef6b7529 100644 --- a/src/utils/operandi_utils/oton/nf_file_executable.py +++ b/src/utils/operandi_utils/oton/nf_file_executable.py @@ -85,8 +85,7 @@ def build_split_page_ranges_process(self, environment: str, with_mets_server: bo block.add_directive(directive='memory', value=PARAMS_KEY_RAM_PER_FORK) block.add_parameter_input(parameter="range_multiplier", parameter_type="val") - if not with_mets_server: - block.add_parameter_output(parameter="mets_file_chunk", parameter_type="env") + block.add_parameter_output(parameter="mets_file_chunk", parameter_type="env") block.add_parameter_output(parameter="current_range_pages", parameter_type="env") PH_RANGE_MULTIPLIER = '${range_multiplier}' @@ -103,6 +102,9 @@ def build_split_page_ranges_process(self, environment: str, with_mets_server: bo script += f"{bash_cmd_ocrd_ws})\n" script += f'{SPACES}{SPACES}echo "Current range is: \\$current_range_pages"\n' + if with_mets_server: + script += f"{SPACES}{SPACES}mets_file_chunk=\\$(echo ${BS[0]}{PARAMS_KEY_METS_PATH}{BS[1]})\n" + if not with_mets_server: script += f"{SPACES}{SPACES}mets_file_chunk=\\$(echo ${BS[0]}{PARAMS_KEY_WORKSPACE_DIR}{BS[1]}/mets_{PH_RANGE_MULTIPLIER}.xml)\n" script += f'{SPACES}{SPACES}echo "Mets file chunk path: \\$mets_file_chunk"\n' diff --git a/src/utils/operandi_utils/utils.py b/src/utils/operandi_utils/utils.py index b68bbfc3..c9a3cb1e 100644 --- a/src/utils/operandi_utils/utils.py +++ b/src/utils/operandi_utils/utils.py @@ -83,9 +83,11 @@ def is_url_responsive(url: str) -> bool: return False -def get_nf_workflows_dir() -> Path: +def get_nf_wfs_dir() -> Path: return Path(dirname(__file__), "hpc", "nextflow_workflows") +def get_ocrd_process_wfs_dir() -> Path: + return Path(dirname(__file__), "hpc", "ocrd_process_workflows") def generate_id(file_ext: str = None): generated_id = str(uuid4()) diff --git a/tests/assets/oton/constants.py b/tests/assets/oton/constants.py index 91404cda..b8f3e053 100644 --- a/tests/assets/oton/constants.py +++ b/tests/assets/oton/constants.py @@ -27,7 +27,7 @@ main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") @@ -44,7 +44,7 @@ main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") @@ -60,7 +60,7 @@ main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_denoise_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN-DENOISE") ocrd_tesserocr_deskew_3(ocrd_skimage_denoise_2.out[0], ocrd_skimage_denoise_2.out[1], ocrd_skimage_denoise_2.out[2], "OCR-D-BIN-DENOISE", "OCR-D-BIN-DENOISE-DESKEW") @@ -76,7 +76,7 @@ main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_dinglehopper_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-EVAL-SEG-BLOCK") + ocrd_dinglehopper_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-EVAL-SEG-BLOCK") ocrd_dinglehopper_1(ocrd_dinglehopper_0.out[0], ocrd_dinglehopper_0.out[1], ocrd_dinglehopper_0.out[2], "OCR-D-GT-SEG-LINE,OCR-D-OCR", "OCR-D-EVAL-SEG-LINE") ocrd_dinglehopper_2(ocrd_dinglehopper_1.out[0], ocrd_dinglehopper_1.out[1], ocrd_dinglehopper_1.out[2], "OCR-D-GT-SEG-PAGE,OCR-D-OCR", "OCR-D-EVAL-SEG-PAGE") merging_mets(ocrd_dinglehopper_2.out[0], ocrd_dinglehopper_2.out[1]) @@ -88,7 +88,7 @@ main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_olena_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_olena_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_olena_binarize_0.out[0], ocrd_olena_binarize_0.out[1], ocrd_olena_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_olena_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_cis_ocropy_denoise_3(ocrd_olena_binarize_2.out[0], ocrd_olena_binarize_2.out[1], ocrd_olena_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") diff --git a/tests/assets/oton/test_output_nextflow1_apptainer.nf b/tests/assets/oton/test_output_nextflow1_apptainer.nf index 836b69a0..b057341e 100644 --- a/tests/assets/oton/test_output_nextflow1_apptainer.nf +++ b/tests/assets/oton/test_output_nextflow1_apptainer.nf @@ -256,7 +256,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") diff --git a/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf b/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf index ce5de9c7..590ba3c4 100644 --- a/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf @@ -31,12 +31,14 @@ process split_page_ranges { val range_multiplier output: + env mets_file_chunk env current_range_pages script: """ current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.mets_path}) """ } @@ -236,7 +238,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") diff --git a/tests/assets/oton/test_output_nextflow1_docker.nf b/tests/assets/oton/test_output_nextflow1_docker.nf index aa10c21c..4f59235a 100644 --- a/tests/assets/oton/test_output_nextflow1_docker.nf +++ b/tests/assets/oton/test_output_nextflow1_docker.nf @@ -232,7 +232,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") diff --git a/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf b/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf index 870ea30c..105bd787 100644 --- a/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf @@ -25,12 +25,14 @@ process split_page_ranges { val range_multiplier output: + env mets_file_chunk env current_range_pages script: """ current_range_pages=\$(${params.env_wrapper_cmd_core} ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.mets_path}) """ } @@ -214,7 +216,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") diff --git a/tests/assets/oton/test_output_nextflow1_local.nf b/tests/assets/oton/test_output_nextflow1_local.nf index 0e2cafde..812258c8 100644 --- a/tests/assets/oton/test_output_nextflow1_local.nf +++ b/tests/assets/oton/test_output_nextflow1_local.nf @@ -231,7 +231,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") diff --git a/tests/assets/oton/test_output_nextflow1_local_with_MS.nf b/tests/assets/oton/test_output_nextflow1_local_with_MS.nf index 8f2655cf..7685e74c 100644 --- a/tests/assets/oton/test_output_nextflow1_local_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_local_with_MS.nf @@ -24,12 +24,14 @@ process split_page_ranges { val range_multiplier output: + env mets_file_chunk env current_range_pages script: """ current_range_pages=\$(ocrd workspace -d ${params.workspace_dir} list-page -f comma-separated -D ${params.forks} -C ${range_multiplier}) echo "Current range is: \$current_range_pages" + mets_file_chunk=\$(echo ${params.mets_path}) """ } @@ -213,7 +215,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_skimage_denoise_3(ocrd_skimage_binarize_2.out[0], ocrd_skimage_binarize_2.out[1], ocrd_skimage_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") diff --git a/tests/assets/oton/test_output_nextflow2.nf b/tests/assets/oton/test_output_nextflow2.nf index 43a633f9..a10ca1ad 100644 --- a/tests/assets/oton/test_output_nextflow2.nf +++ b/tests/assets/oton/test_output_nextflow2.nf @@ -208,7 +208,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_cis_ocropy_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_cis_ocropy_binarize_0.out[0], ocrd_cis_ocropy_binarize_0.out[1], ocrd_cis_ocropy_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_skimage_denoise_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN-DENOISE") ocrd_tesserocr_deskew_3(ocrd_skimage_denoise_2.out[0], ocrd_skimage_denoise_2.out[1], ocrd_skimage_denoise_2.out[2], "OCR-D-BIN-DENOISE", "OCR-D-BIN-DENOISE-DESKEW") diff --git a/tests/assets/oton/test_output_nextflow3.nf b/tests/assets/oton/test_output_nextflow3.nf index eaa7ab06..4e551f1e 100644 --- a/tests/assets/oton/test_output_nextflow3.nf +++ b/tests/assets/oton/test_output_nextflow3.nf @@ -116,7 +116,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_dinglehopper_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-EVAL-SEG-BLOCK") + ocrd_dinglehopper_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-EVAL-SEG-BLOCK") ocrd_dinglehopper_1(ocrd_dinglehopper_0.out[0], ocrd_dinglehopper_0.out[1], ocrd_dinglehopper_0.out[2], "OCR-D-GT-SEG-LINE,OCR-D-OCR", "OCR-D-EVAL-SEG-LINE") ocrd_dinglehopper_2(ocrd_dinglehopper_1.out[0], ocrd_dinglehopper_1.out[1], ocrd_dinglehopper_1.out[2], "OCR-D-GT-SEG-PAGE,OCR-D-OCR", "OCR-D-EVAL-SEG-PAGE") merging_mets(ocrd_dinglehopper_2.out[0], ocrd_dinglehopper_2.out[1]) diff --git a/tests/assets/oton/test_output_nextflow4.nf b/tests/assets/oton/test_output_nextflow4.nf index e1138699..1d4f600b 100644 --- a/tests/assets/oton/test_output_nextflow4.nf +++ b/tests/assets/oton/test_output_nextflow4.nf @@ -346,7 +346,7 @@ workflow { main: ch_range_multipliers = Channel.of(0..params.forks.intValue()-1) split_page_ranges(ch_range_multipliers) - ocrd_olena_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group "OCR-D-BIN") + ocrd_olena_binarize_0(split_page_ranges.out[0], split_page_ranges.out[1], params.workspace_dir, params.input_file_group, "OCR-D-BIN") ocrd_anybaseocr_crop_1(ocrd_olena_binarize_0.out[0], ocrd_olena_binarize_0.out[1], ocrd_olena_binarize_0.out[2], "OCR-D-BIN", "OCR-D-CROP") ocrd_olena_binarize_2(ocrd_anybaseocr_crop_1.out[0], ocrd_anybaseocr_crop_1.out[1], ocrd_anybaseocr_crop_1.out[2], "OCR-D-CROP", "OCR-D-BIN2") ocrd_cis_ocropy_denoise_3(ocrd_olena_binarize_2.out[0], ocrd_olena_binarize_2.out[1], ocrd_olena_binarize_2.out[2], "OCR-D-BIN2", "OCR-D-BIN-DENOISE") diff --git a/tests/tests_server/test_endpoint_workflow.py b/tests/tests_server/test_endpoint_workflow.py index 1b812806..2fc107c8 100644 --- a/tests/tests_server/test_endpoint_workflow.py +++ b/tests/tests_server/test_endpoint_workflow.py @@ -3,8 +3,20 @@ from tests.constants import WORKFLOW_DUMMY_TEXT from .helpers_asserts import assert_local_dir_workflow, assert_response_status_code +def test_post_workflow_script(operandi, auth, db_workflows, bytes_template_workflow): + # Post a new workflow script + wf_detail = "Test template workflow with mets server" + response = operandi.post( + url=f"/workflow?details={wf_detail}", files={"nextflow_script": bytes_template_workflow}, auth=auth) + assert_response_status_code(response.status_code, expected_floor=2) + workflow_id = response.json()['resource_id'] + assert_local_dir_workflow(workflow_id) + db_workflow = db_workflows.find_one({"workflow_id": workflow_id}) + assert_exists_db_resource(db_workflow, resource_key="workflow_id", resource_id=workflow_id) + assert db_workflow["details"] == wf_detail + assert db_workflow["uses_mets_server"] == False -def test_post_workflow_script(operandi, auth, db_workflows, bytes_template_workflow_with_ms): +def test_post_workflow_script_with_ms(operandi, auth, db_workflows, bytes_template_workflow_with_ms): # Post a new workflow script wf_detail = "Test template workflow with mets server" response = operandi.post( @@ -15,6 +27,7 @@ def test_post_workflow_script(operandi, auth, db_workflows, bytes_template_workf db_workflow = db_workflows.find_one({"workflow_id": workflow_id}) assert_exists_db_resource(db_workflow, resource_key="workflow_id", resource_id=workflow_id) assert db_workflow["details"] == wf_detail + assert db_workflow["uses_mets_server"] == True def test_put_workflow_script( @@ -38,6 +51,7 @@ def test_put_workflow_script( assert workflow_path1, "Failed to extract workflow path 1" assert workflow_details1, "Failed to extract workflow details 1" assert db_workflow["details"] == wf_detail + assert db_workflow["uses_mets_server"] == True # The second put request replaces the previously created workflow files = {"nextflow_script": bytes_default_workflow_with_ms} @@ -56,6 +70,7 @@ def test_put_workflow_script( assert workflow_path2, "Failed to extract workflow path 2" assert workflow_details2, "Failed to extract workflow details 2" assert db_workflow["details"] == wf_detail_put + assert db_workflow["uses_mets_server"] == True assert workflow_dir1 == workflow_dir2, \ f"Workflow dir paths should match, but does not: {workflow_dir1} != {workflow_dir2}" diff --git a/tests/tests_utils/test_3_hpc/test_1_nhr_executor.py b/tests/tests_utils/test_3_hpc/test_1_nhr_executor.py index 2ba80b1d..35d5cc46 100644 --- a/tests/tests_utils/test_3_hpc/test_1_nhr_executor.py +++ b/tests/tests_utils/test_3_hpc/test_1_nhr_executor.py @@ -6,7 +6,7 @@ def test_hpc_connector_executor_mk_dir(hpc_nhr_command_executor): - test_dir_name = join(hpc_nhr_command_executor.project_root_dir, f"test_dir_{current_time}") + test_dir_name = join(hpc_nhr_command_executor.project_root_dir_with_env, f"test_dir_{current_time}") sleep(0.5) output, err, return_code = hpc_nhr_command_executor.execute_blocking(command=f"bash -lc 'mkdir -p {test_dir_name}'") assert return_code == 0, err @@ -15,7 +15,7 @@ def test_hpc_connector_executor_mk_dir(hpc_nhr_command_executor): def test_hpc_connector_executor_rm_dir_negative(hpc_nhr_command_executor): - test_dir_name = join(hpc_nhr_command_executor.project_root_dir, f"test_dir_{current_time}") + test_dir_name = join(hpc_nhr_command_executor.project_root_dir_with_env, f"test_dir_{current_time}") sleep(0.5) output, err, return_code = hpc_nhr_command_executor.execute_blocking(command=f"bash -lc 'rm {test_dir_name}'") assert return_code == 1 @@ -25,7 +25,7 @@ def test_hpc_connector_executor_rm_dir_negative(hpc_nhr_command_executor): def test_hpc_connector_executor_rm_dir_positive(hpc_nhr_command_executor): - test_dir_name = join(hpc_nhr_command_executor.project_root_dir, f"test_dir_{current_time}") + test_dir_name = join(hpc_nhr_command_executor.project_root_dir_with_env, f"test_dir_{current_time}") sleep(0.5) output, err, return_code = hpc_nhr_command_executor.execute_blocking(command=f"bash -lc 'rm -rf {test_dir_name}'") assert return_code == 0 @@ -34,7 +34,7 @@ def test_hpc_connector_executor_rm_dir_positive(hpc_nhr_command_executor): def test_hpc_connector_executor_cd_dir(hpc_nhr_command_executor): - test_dir_name = join(hpc_nhr_command_executor.project_root_dir, f"test_dir_{current_time}") + test_dir_name = join(hpc_nhr_command_executor.project_root_dir_with_env, f"test_dir_{current_time}") sleep(0.5) output, err, return_code = hpc_nhr_command_executor.execute_blocking(command=f"bash -lc 'cd {test_dir_name}'") assert return_code == 1 diff --git a/tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py b/tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py index 3a81b884..be740443 100644 --- a/tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py +++ b/tests/tests_utils/test_3_hpc/test_2_nhr_transfer.py @@ -15,7 +15,7 @@ def test_hpc_connector_transfer_file(hpc_nhr_data_transfer, path_batch_script_em """ assert_exists_file(str(path_batch_script_empty)) - test_hpc_file_path = Path(hpc_nhr_data_transfer.project_root_dir, BATCH_SCRIPT_EMPTY) + test_hpc_file_path = Path(hpc_nhr_data_transfer.project_root_dir_with_env, BATCH_SCRIPT_EMPTY) hpc_nhr_data_transfer.put_file(local_src=path_batch_script_empty, remote_dst=str(test_hpc_file_path)) sleep(2) test_local_received_file_path = Path(OPERANDI_SERVER_BASE_DIR, BATCH_SCRIPT_EMPTY) @@ -29,7 +29,7 @@ def test_hpc_connector_transfer_dir(hpc_nhr_data_transfer, path_dummy_workspace_ Testing the put_dir and get_dir functionality of the HPC transfer """ assert_exists_dir(str(path_dummy_workspace_data_dir)) - test_hpc_dir_path = Path(hpc_nhr_data_transfer.project_root_dir, ID_WORKSPACE) + test_hpc_dir_path = Path(hpc_nhr_data_transfer.project_root_dir_with_env, ID_WORKSPACE) hpc_nhr_data_transfer.put_dir(local_src=str(path_dummy_workspace_data_dir), remote_dst=str(test_hpc_dir_path)) sleep(5) test_local_received_dir_path = Path(OPERANDI_SERVER_BASE_DIR, ID_WORKSPACE) diff --git a/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py b/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py index 3309b538..c584386d 100644 --- a/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py +++ b/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py @@ -56,12 +56,13 @@ def test_pack_and_put_slurm_workspace_with_ms( ) -def test_hpc_connector_run_batch_script( +def _test_hpc_connector_run_batch_script( hpc_nhr_command_executor, hpc_nhr_data_transfer, template_workflow): slurm_job_id = hpc_nhr_command_executor.trigger_slurm_job( workflow_job_id=ID_WORKFLOW_JOB, nextflow_script_path=Path(template_workflow), input_file_grp=DEFAULT_FILE_GRP, workspace_id=ID_WORKSPACE, - mets_basename=DEFAULT_METS_BASENAME, nf_process_forks=2, ws_pages_amount=8, use_mets_server=False, + mets_basename=DEFAULT_METS_BASENAME, nf_process_forks=2, ws_pages_amount=8, + use_mets_server=False, nf_executable_steps=["ocrd-cis-ocropy-binarize"], file_groups_to_remove="", cpus=2, ram=16, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, partition=HPC_NHR_JOB_TEST_PARTITION, qos=HPC_JOB_QOS_SHORT) finished_successfully = hpc_nhr_command_executor.poll_till_end_slurm_job_state( @@ -84,7 +85,8 @@ def test_hpc_connector_run_batch_script_with_ms( workflow_job_id=ID_WORKFLOW_JOB_WITH_MS, nextflow_script_path=Path(template_workflow_with_ms), input_file_grp=DEFAULT_FILE_GRP, workspace_id=ID_WORKSPACE_WITH_MS, mets_basename=DEFAULT_METS_BASENAME, nf_process_forks=2, ws_pages_amount=8, - use_mets_server=True, file_groups_to_remove="", cpus=3, ram=16, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, + use_mets_server=True, nf_executable_steps=["ocrd-cis-ocropy-binarize"], + file_groups_to_remove="", cpus=3, ram=16, job_deadline_time=HPC_JOB_DEADLINE_TIME_TEST, partition=HPC_NHR_JOB_TEST_PARTITION, qos=HPC_JOB_QOS_SHORT) finished_successfully = hpc_nhr_command_executor.poll_till_end_slurm_job_state( slurm_job_id=slurm_job_id, interval=5, timeout=300) From b3e70260ae31e15127b80c40dca96a858a137bf2 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Fri, 29 Nov 2024 16:58:28 +0100 Subject: [PATCH 10/16] add: extra logging when oton converting --- src/server/operandi_server/routers/workflow.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/server/operandi_server/routers/workflow.py b/src/server/operandi_server/routers/workflow.py index 5a7a1121..5bf43374 100644 --- a/src/server/operandi_server/routers/workflow.py +++ b/src/server/operandi_server/routers/workflow.py @@ -149,12 +149,15 @@ async def produce_production_workflows( continue # path.stem -> file_name # path.name -> file_name.ext + self.logger.info(f"Converting to Nextflow workflow the ocrd process workflow: {path}") output_path = Path(production_nf_wfs_dir, f"{path.stem}.nf") oton_converter.convert_oton( input_path=path, output_path=str(output_path), environment="apptainer", with_mets_server=False) + self.logger.info(f"Converted to a Nextflow file without a mets server: {output_path}") output_path = Path(production_nf_wfs_dir, f"{path.stem}_with_MS.nf") oton_converter.convert_oton( input_path=path, output_path=str(output_path), environment="apptainer", with_mets_server=True) + self.logger.info(f"Converted to a Nextflow file with a mets server: {output_path}") async def insert_production_workflows(self, production_nf_wfs_dir: Path = get_nf_wfs_dir()): wf_detail = "Workflow provided by the Operandi Server" From 3a5658828a410378c0c67d6009b75156799270a6 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Fri, 29 Nov 2024 17:13:22 +0100 Subject: [PATCH 11/16] fix: processor executables entry in the DBWorkflow --- src/server/operandi_server/routers/workflow_utils.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/server/operandi_server/routers/workflow_utils.py b/src/server/operandi_server/routers/workflow_utils.py index 39a837fa..591798b3 100644 --- a/src/server/operandi_server/routers/workflow_utils.py +++ b/src/server/operandi_server/routers/workflow_utils.py @@ -62,10 +62,12 @@ async def nf_script_executable_steps_with_handling(logger, nf_script_path: str) try: with open(nf_script_path) as nf_file: line = nf_file.readline() - for word in line.split(' '): - if "ocrd-" in word: - processor_executables.append(word) - break + while line: + for word in line.split(' '): + if "ocrd-" in word: + processor_executables.append(word) + break + line = nf_file.readline() except Exception as error: message = "Failed to identify processor executables in the provided Nextflow workflow." logger.error(f"{message}, error: {error}") @@ -82,7 +84,7 @@ async def nf_script_executable_steps_with_handling(logger, nf_script_path: str) raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=message) return apptainer_images """ - + logger.info(f"Found processor executables: {processor_executables}") return processor_executables async def validate_oton_with_handling(logger, ocrd_process_txt_path: str): From 24be118eed8e8243c55021595c72c41096b9a70a Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Fri, 29 Nov 2024 20:00:44 +0100 Subject: [PATCH 12/16] fix: slim containers solution --- .../batch_submit_workflow_job.sh | 30 +++----- src/utils/operandi_utils/hpc/nhr_executor.py | 71 ++++++++++++------- .../test_3_hpc/test_3_nhr_combined.py | 2 +- 3 files changed, 57 insertions(+), 46 deletions(-) diff --git a/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh b/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh index 8f807194..cc2dcafa 100755 --- a/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh +++ b/src/utils/operandi_utils/hpc/batch_scripts/batch_submit_workflow_job.sh @@ -36,6 +36,7 @@ FILE_GROUPS_TO_REMOVE=$(echo "$json_args" | jq .file_groups_to_remove | tr -d '" WORKFLOW_JOB_DIR=$(echo "$json_args" | jq .hpc_workflow_job_dir | tr -d '"') WORKSPACE_DIR=$(echo "$json_args" | jq .hpc_workspace_dir | tr -d '"') NF_RUN_COMMAND=$(echo "$json_args" | jq .nf_run_command | tr -d '"') +PRINT_OCRD_VERSION_COMMAND=$(echo "$json_args" | jq .print_ocrd_version_command | tr -d '"') START_METS_SERVER_COMMAND=$(echo "$json_args" | jq .start_mets_server_command | tr -d '"') STOP_METS_SERVER_COMMAND=$(echo "$json_args" | jq .stop_mets_server_command | tr -d '"') LIST_FILE_GROUPS_COMMAND=$(echo "$json_args" | jq .list_file_groups_command | tr -d '"') @@ -43,19 +44,15 @@ REMOVE_FILE_GROUP_COMMAND=$(echo "$json_args" | jq .remove_file_group_command | PROJECT_DIR_OCRD_MODELS="${PROJECT_BASE_DIR}/ocrd_models" PROJECT_DIR_PROCESSOR_SIFS="${PROJECT_BASE_DIR}/ocrd_processor_sifs" -PROJECT_SIF_PATH_OCRD_ALL="${PROJECT_BASE_DIR}/ocrd_processor_sifs/ocrd_all_maximum_image.sif" NODE_DIR_OCRD_MODELS="${TMP_LOCAL}/ocrd_models" NODE_DIR_PROCESSOR_SIFS="${TMP_LOCAL}/ocrd_processor_sifs" -NODE_SIF_PATH_OCRD_ALL="${TMP_LOCAL}/ocrd_processor_sifs/ocrd_all_maximum_image.sif" echo "" echo "Project dir ocrd models: $PROJECT_DIR_OCRD_MODELS" echo "Project dir processor sifs: $PROJECT_DIR_PROCESSOR_SIFS" -echo "Project sif path ocrd all: $PROJECT_SIF_PATH_OCRD_ALL" echo "Node dir ocrd models: $NODE_DIR_OCRD_MODELS" echo "Node dir processor sifs: $NODE_DIR_PROCESSOR_SIFS" -echo "Node sif path ocrd all: $NODE_SIF_PATH_OCRD_ALL" echo "" echo "Workspace dir: $WORKSPACE_DIR" @@ -63,18 +60,19 @@ echo "Use mets server: $USE_METS_SERVER" echo "" echo "Nf run command with Node placeholders: $NF_RUN_COMMAND" -NF_RUN_COMMAND="${NF_RUN_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" NF_RUN_COMMAND="${NF_RUN_COMMAND//PH_NODE_DIR_OCRD_MODELS/$NODE_DIR_OCRD_MODELS}" NF_RUN_COMMAND="${NF_RUN_COMMAND//PH_CMD_WRAPPER/\'}" +NF_RUN_COMMAND="${NF_RUN_COMMAND//PH_NODE_DIR_PROCESSOR_SIFS/$NODE_DIR_PROCESSOR_SIFS}" echo "" echo "Nf run command without placeholders: $NF_RUN_COMMAND" echo "" -echo "Replacing ocrd core image sif placeholder of commands" -START_METS_SERVER_COMMAND="${START_METS_SERVER_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" -STOP_METS_SERVER_COMMAND="${STOP_METS_SERVER_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" -LIST_FILE_GROUPS_COMMAND="${LIST_FILE_GROUPS_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" -REMOVE_FILE_GROUP_COMMAND="${REMOVE_FILE_GROUP_COMMAND//PH_NODE_SIF_PATH_OCRD_ALL/$NODE_SIF_PATH_OCRD_ALL}" +echo "Replacing ocrd core NODE_DIR_PROCESSOR_SIFS" +PRINT_OCRD_VERSION_COMMAND="${PRINT_OCRD_VERSION_COMMAND//PH_NODE_DIR_PROCESSOR_SIFS/$NODE_DIR_PROCESSOR_SIFS}" +START_METS_SERVER_COMMAND="${START_METS_SERVER_COMMAND//PH_NODE_DIR_PROCESSOR_SIFS/$NODE_DIR_PROCESSOR_SIFS}" +STOP_METS_SERVER_COMMAND="${STOP_METS_SERVER_COMMAND//PH_NODE_DIR_PROCESSOR_SIFS/$NODE_DIR_PROCESSOR_SIFS}" +LIST_FILE_GROUPS_COMMAND="${LIST_FILE_GROUPS_COMMAND//PH_NODE_DIR_PROCESSOR_SIFS/$NODE_DIR_PROCESSOR_SIFS}" +REMOVE_FILE_GROUP_COMMAND="${REMOVE_FILE_GROUP_COMMAND//PH_NODE_DIR_PROCESSOR_SIFS/$NODE_DIR_PROCESSOR_SIFS}" echo "" check_existence_of_dir_scratch_base(){ @@ -98,15 +96,6 @@ check_existence_of_dir_ocrd_models(){ echo "Ocrd models directory found at: ${PROJECT_DIR_OCRD_MODELS}" } -check_existence_of_sif_path_ocrd_all(){ - # The SIF file of the OCR-D All docker image must be previously created - if [ ! -f "${PROJECT_SIF_PATH_OCRD_ALL}" ]; then - echo "Required ocrd_all_image sif file not found at: ${PROJECT_SIF_PATH_OCRD_ALL}" - exit 1 - fi - echo "Required ocrd_all_image sif file found at: ${PROJECT_SIF_PATH_OCRD_ALL}" -} - check_existence_of_ocrd_processor_images_to_be_used(){ for ocrd_image in "${ocrd_processor_images[@]}" do @@ -121,7 +110,6 @@ check_existence_of_ocrd_processor_images_to_be_used(){ check_existence_of_paths() { check_existence_of_dir_scratch_base check_existence_of_dir_ocrd_models - check_existence_of_sif_path_ocrd_all check_existence_of_ocrd_processor_images_to_be_used } @@ -172,7 +160,7 @@ transfer_to_node_storage_processor_images(){ fi done echo "" - apptainer exec "$NODE_SIF_PATH_OCRD_ALL" ocrd --version + eval "$PRINT_OCRD_VERSION_COMMAND" echo "" } diff --git a/src/utils/operandi_utils/hpc/nhr_executor.py b/src/utils/operandi_utils/hpc/nhr_executor.py index b278d1ef..c0b7d2fc 100644 --- a/src/utils/operandi_utils/hpc/nhr_executor.py +++ b/src/utils/operandi_utils/hpc/nhr_executor.py @@ -5,13 +5,18 @@ from time import sleep from typing import List -from operandi_utils.constants import StateJobSlurm +from operandi_utils.constants import StateJobSlurm, OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE from .constants import ( HPC_JOB_DEADLINE_TIME_TEST, HPC_JOB_QOS_DEFAULT, HPC_NHR_JOB_DEFAULT_PARTITION, HPC_BATCH_SUBMIT_WORKFLOW_JOB, HPC_WRAPPER_SUBMIT_WORKFLOW_JOB, HPC_WRAPPER_CHECK_WORKFLOW_JOB_STATUS ) from .nhr_connector import NHRConnector +# Just some placeholders to be replaced with actual paths that are +# dynamically allocated inside the node that runs the HPC slurm job +PH_NODE_DIR_OCRD_MODELS = "PH_NODE_DIR_OCRD_MODELS" +PH_NODE_DIR_PROCESSOR_SIFS = "PH_NODE_DIR_PROCESSOR_SIFS" +PH_CMD_WRAPPER = "PH_CMD_WRAPPER" class NHRExecutor(NHRConnector): def __init__(self) -> None: @@ -67,23 +72,33 @@ def trigger_slurm_job( hpc_nf_script_path = join(self.slurm_workspaces_dir, workflow_job_id, nextflow_script_id) hpc_workspace_dir = join(self.slurm_workspaces_dir, workflow_job_id, workspace_id) - # NODE_PATH_OCRD_MODELS_PLACEHOLDER and NODE_PATH_SIF_PLACEHOLDER are just placeholders to be replaced - # with actual paths that are dynamically allocated inside the node that runs the HPC slurm job - ph_node_dir_ocrd_models = "PH_NODE_DIR_OCRD_MODELS" - ph_node_sif_path_ocrd_all = "PH_NODE_SIF_PATH_OCRD_ALL" + sif_ocrd_all = "ocrd_all_maximum_image.sif" + sif_ocrd_core = OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE["ocrd"] + # TODO: Refactor the switch for using slim images + use_slim_images = False + if use_slim_images: + ph_sif_core = f"{PH_NODE_DIR_PROCESSOR_SIFS}/{sif_ocrd_core}" + else: + ph_sif_core = f"{PH_NODE_DIR_PROCESSOR_SIFS}/{sif_ocrd_all}" nf_run_command = self.cmd_nextflow_run( hpc_nf_script_path=hpc_nf_script_path, hpc_ws_dir=hpc_workspace_dir, - bind_ocrd_models=f"{ph_node_dir_ocrd_models}/ocrd-resources:/usr/local/share/ocrd-resources", - ph_sif_ocrd_all=ph_node_sif_path_ocrd_all, input_file_grp=input_file_grp, mets_basename=mets_basename, + bind_ocrd_models=f"{PH_NODE_DIR_OCRD_MODELS}/ocrd-resources:/usr/local/share/ocrd-resources", + sif_core=sif_ocrd_core, + sif_ocrd_all=sif_ocrd_all, input_file_grp=input_file_grp, mets_basename=mets_basename, use_mets_server=use_mets_server, nf_executable_steps=nf_executable_steps, ws_pages_amount=ws_pages_amount, - cpus=cpus, ram=ram, forks=nf_process_forks + cpus=cpus, ram=ram, forks=nf_process_forks, use_slim_images=use_slim_images ) + if use_slim_images: + ocrd_processor_images = ",".join([OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE[exe] for exe in nf_executable_steps]) + ocrd_processor_images = f"{sif_ocrd_core},{ocrd_processor_images}" + else: + ocrd_processor_images = sif_ocrd_all regular_args = { "project_base_dir": self.project_root_dir, "scratch_base_dir": self.slurm_workspaces_dir, - "ocrd_processor_images": "ocrd_all_maximum_image.sif", + "ocrd_processor_images": ocrd_processor_images, "workflow_job_id": workflow_job_id, "workspace_id": workspace_id, "use_mets_server_bash_flag": use_mets_server_bash_flag, @@ -91,10 +106,11 @@ def trigger_slurm_job( "hpc_workflow_job_dir": hpc_workflow_job_dir, "hpc_workspace_dir": hpc_workspace_dir, "nf_run_command": nf_run_command, - "start_mets_server_command": self.cmd_core_start_mets_server(hpc_workspace_dir, ph_node_sif_path_ocrd_all), - "stop_mets_server_command": self.cmd_core_stop_mets_server(hpc_workspace_dir, ph_node_sif_path_ocrd_all), - "list_file_groups_command": self.cmd_core_list_file_groups(hpc_workspace_dir, ph_node_sif_path_ocrd_all), - "remove_file_group_command": self.cmd_core_remove_file_group(hpc_workspace_dir, ph_node_sif_path_ocrd_all) + "print_ocrd_version_command": self.cmd_core_print_version(hpc_workspace_dir, ph_sif_core), + "start_mets_server_command": self.cmd_core_start_mets_server(hpc_workspace_dir, ph_sif_core), + "stop_mets_server_command": self.cmd_core_stop_mets_server(hpc_workspace_dir, ph_sif_core), + "list_file_groups_command": self.cmd_core_list_file_groups(hpc_workspace_dir, ph_sif_core), + "remove_file_group_command": self.cmd_core_remove_file_group(hpc_workspace_dir, ph_sif_core) } command += f" '{dumps(sbatch_args)}' '{dumps(regular_args)}'" @@ -176,9 +192,9 @@ def poll_till_end_slurm_job_state(self, slurm_job_id: str, interval: int = 5, ti @staticmethod def cmd_nextflow_run( - hpc_nf_script_path: str, hpc_ws_dir: str, bind_ocrd_models: str, ph_sif_ocrd_all: str, input_file_grp: str, - mets_basename: str, use_mets_server: bool, nf_executable_steps: List[str], ws_pages_amount: int, cpus: int, - ram: int, forks: int + hpc_nf_script_path: str, hpc_ws_dir: str, bind_ocrd_models: str, sif_core: str, sif_ocrd_all: str, + input_file_grp: str, mets_basename: str, use_mets_server: bool, nf_executable_steps: List[str], + ws_pages_amount: int, cpus: int, ram: int, forks: int, use_slim_images: bool ) -> str: nf_run_command = f"nextflow run {hpc_nf_script_path} -ansi-log false -with-report" nf_run_command += f" --input_file_group {input_file_grp}" @@ -187,22 +203,29 @@ def cmd_nextflow_run( nf_run_command += f" --mets_socket /ws_data/mets_server.sock" nf_run_command += f" --workspace_dir /ws_data" nf_run_command += f" --pages {ws_pages_amount}" - # Command wrapper placeholder. Each occurrence is replaced with a single quote ' to avoid json parsing errors - # TODO: Send actual slim image apptainer cmds here instead of the sif_ocrd_all - ph_cmd_wrapper = "PH_CMD_WRAPPER" - index = 0 + sif_images = [OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE[exe] for exe in nf_executable_steps] apptainer_cmd = f"apptainer exec --bind {hpc_ws_dir}:/ws_data --bind {bind_ocrd_models}" - apptainer_cmd += f" --env OCRD_METS_CACHING=false {ph_sif_ocrd_all}" - nf_run_command += f" --env_wrapper_cmd_core {ph_cmd_wrapper}{apptainer_cmd}{ph_cmd_wrapper}" - for executable_step in nf_executable_steps: - nf_run_command += f" --env_wrapper_cmd_step{index} {ph_cmd_wrapper}{apptainer_cmd}{ph_cmd_wrapper}" + apptainer_cmd += f" --env OCRD_METS_CACHING=false" + apptainer_image = sif_core if use_slim_images else sif_ocrd_all + core_command = f"{apptainer_cmd} {PH_NODE_DIR_PROCESSOR_SIFS}/{apptainer_image}" + nf_run_command += f" --env_wrapper_cmd_core {PH_CMD_WRAPPER}{core_command}{PH_CMD_WRAPPER}" + + index = 0 + for sif_image in sif_images: + apptainer_image = sif_image if use_slim_images else sif_ocrd_all + step_command = f"{apptainer_cmd} {PH_NODE_DIR_PROCESSOR_SIFS}/{apptainer_image}" + nf_run_command += f" --env_wrapper_cmd_step{index} {PH_CMD_WRAPPER}{step_command}{PH_CMD_WRAPPER}" index += 1 nf_run_command += f" --cpus {cpus}" nf_run_command += f" --ram {ram}" nf_run_command += f" --forks {forks}" return nf_run_command + @staticmethod + def cmd_core_print_version(hpc_ws_dir: str, ph_sif_core: str) -> str: + return f"apptainer exec --bind {hpc_ws_dir}:/ws_data {ph_sif_core} ocrd --version" + @staticmethod def cmd_core_start_mets_server(hpc_ws_dir: str, ph_sif_core: str) -> str: command = f"apptainer exec --bind {hpc_ws_dir}:/ws_data {ph_sif_core}" diff --git a/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py b/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py index c584386d..13c772e1 100644 --- a/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py +++ b/tests/tests_utils/test_3_hpc/test_3_nhr_combined.py @@ -56,7 +56,7 @@ def test_pack_and_put_slurm_workspace_with_ms( ) -def _test_hpc_connector_run_batch_script( +def test_hpc_connector_run_batch_script( hpc_nhr_command_executor, hpc_nhr_data_transfer, template_workflow): slurm_job_id = hpc_nhr_command_executor.trigger_slurm_job( workflow_job_id=ID_WORKFLOW_JOB, nextflow_script_path=Path(template_workflow), From 3a20a9a6f517158cbb97cab4ea6ff21626673256 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Mon, 2 Dec 2024 14:02:55 +0100 Subject: [PATCH 13/16] refactor: slim images switch --- src/utils/operandi_utils/hpc/constants.py | 4 ++++ src/utils/operandi_utils/hpc/nhr_executor.py | 10 ++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/utils/operandi_utils/hpc/constants.py b/src/utils/operandi_utils/hpc/constants.py index 156cd45a..5ce9f88c 100644 --- a/src/utils/operandi_utils/hpc/constants.py +++ b/src/utils/operandi_utils/hpc/constants.py @@ -13,6 +13,7 @@ "HPC_SSH_CONNECTION_TRY_TIMES", "HPC_NHR_PROJECT", "HPC_NHR_CLUSTERS", + "HPC_USE_SLIM_IMAGES", "HPC_WRAPPER_SUBMIT_WORKFLOW_JOB", "HPC_WRAPPER_CHECK_WORKFLOW_JOB_STATUS" ] @@ -78,3 +79,6 @@ HPC_JOB_QOS_LONG = "7d" HPC_JOB_QOS_VERY_LONG = "14d" HPC_SSH_CONNECTION_TRY_TIMES = 30 + +# A switch to decide whether to use the fat ocrd_all image or slim images of the processors +HPC_USE_SLIM_IMAGES: bool = False diff --git a/src/utils/operandi_utils/hpc/nhr_executor.py b/src/utils/operandi_utils/hpc/nhr_executor.py index c0b7d2fc..36a48acc 100644 --- a/src/utils/operandi_utils/hpc/nhr_executor.py +++ b/src/utils/operandi_utils/hpc/nhr_executor.py @@ -8,7 +8,7 @@ from operandi_utils.constants import StateJobSlurm, OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE from .constants import ( HPC_JOB_DEADLINE_TIME_TEST, HPC_JOB_QOS_DEFAULT, HPC_NHR_JOB_DEFAULT_PARTITION, HPC_BATCH_SUBMIT_WORKFLOW_JOB, - HPC_WRAPPER_SUBMIT_WORKFLOW_JOB, HPC_WRAPPER_CHECK_WORKFLOW_JOB_STATUS + HPC_USE_SLIM_IMAGES, HPC_WRAPPER_SUBMIT_WORKFLOW_JOB, HPC_WRAPPER_CHECK_WORKFLOW_JOB_STATUS ) from .nhr_connector import NHRConnector @@ -75,9 +75,7 @@ def trigger_slurm_job( sif_ocrd_all = "ocrd_all_maximum_image.sif" sif_ocrd_core = OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE["ocrd"] - # TODO: Refactor the switch for using slim images - use_slim_images = False - if use_slim_images: + if HPC_USE_SLIM_IMAGES: ph_sif_core = f"{PH_NODE_DIR_PROCESSOR_SIFS}/{sif_ocrd_core}" else: ph_sif_core = f"{PH_NODE_DIR_PROCESSOR_SIFS}/{sif_ocrd_all}" @@ -87,10 +85,10 @@ def trigger_slurm_job( sif_core=sif_ocrd_core, sif_ocrd_all=sif_ocrd_all, input_file_grp=input_file_grp, mets_basename=mets_basename, use_mets_server=use_mets_server, nf_executable_steps=nf_executable_steps, ws_pages_amount=ws_pages_amount, - cpus=cpus, ram=ram, forks=nf_process_forks, use_slim_images=use_slim_images + cpus=cpus, ram=ram, forks=nf_process_forks, use_slim_images=HPC_USE_SLIM_IMAGES ) - if use_slim_images: + if HPC_USE_SLIM_IMAGES: ocrd_processor_images = ",".join([OCRD_PROCESSOR_EXECUTABLE_TO_IMAGE[exe] for exe in nf_executable_steps]) ocrd_processor_images = f"{sif_ocrd_core},{ocrd_processor_images}" else: From 5177e1282e8523f3cead173bf628bcf6e9452208 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Mon, 2 Dec 2024 15:21:49 +0100 Subject: [PATCH 14/16] add: info print on oton produced wfs --- .../nextflow_workflows/default_workflow.nf | 23 ++++++++++++++++ .../default_workflow_with_MS.nf | 24 +++++++++++++++++ .../hpc/nextflow_workflows/odem_workflow.nf | 25 ++++++++++++++++++ .../odem_workflow_with_MS.nf | 26 +++++++++++++++++++ .../hpc/nextflow_workflows/sbb_workflow.nf | 16 ++++++++++++ .../sbb_workflow_with_MS.nf | 17 ++++++++++++ .../nextflow_workflows/template_workflow.nf | 16 ++++++++++++ .../template_workflow_with_MS.nf | 17 ++++++++++++ .../operandi_utils/oton/nf_file_executable.py | 15 +++++++++++ .../oton/test_output_nextflow1_apptainer.nf | 23 ++++++++++++++++ ...test_output_nextflow1_apptainer_with_MS.nf | 24 +++++++++++++++++ .../oton/test_output_nextflow1_docker.nf | 19 ++++++++++++++ .../test_output_nextflow1_docker_with_MS.nf | 20 ++++++++++++++ .../oton/test_output_nextflow1_local.nf | 18 +++++++++++++ .../test_output_nextflow1_local_with_MS.nf | 19 ++++++++++++++ tests/assets/oton/test_output_nextflow2.nf | 17 ++++++++++++ tests/assets/oton/test_output_nextflow3.nf | 13 ++++++++++ tests/assets/oton/test_output_nextflow4.nf | 23 ++++++++++++++++ 18 files changed, 355 insertions(+) diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow.nf index b057341e..a748cf8b 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow.nf @@ -20,6 +20,29 @@ params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" params.env_wrapper_cmd_step7 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow_with_MS.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow_with_MS.nf index 590ba3c4..23115cd0 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow_with_MS.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/default_workflow_with_MS.nf @@ -21,6 +21,30 @@ params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" params.env_wrapper_cmd_step7 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + mets_socket_path: ${params.mets_socket_path} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow.nf index 78c6f2c2..8d3908c2 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow.nf @@ -22,6 +22,31 @@ params.env_wrapper_cmd_step7 = "null" params.env_wrapper_cmd_step8 = "null" params.env_wrapper_cmd_step9 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + env_wrapper_cmd_step8: ${params.env_wrapper_cmd_step8} + env_wrapper_cmd_step9: ${params.env_wrapper_cmd_step9} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow_with_MS.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow_with_MS.nf index 240ae719..63fe0753 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow_with_MS.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/odem_workflow_with_MS.nf @@ -23,6 +23,32 @@ params.env_wrapper_cmd_step7 = "null" params.env_wrapper_cmd_step8 = "null" params.env_wrapper_cmd_step9 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + mets_socket_path: ${params.mets_socket_path} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + env_wrapper_cmd_step8: ${params.env_wrapper_cmd_step8} + env_wrapper_cmd_step9: ${params.env_wrapper_cmd_step9} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow.nf index b7d3a235..eb37866b 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow.nf @@ -13,6 +13,22 @@ params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toI params.env_wrapper_cmd_core = "null" params.env_wrapper_cmd_step0 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow_with_MS.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow_with_MS.nf index 389772b5..e81b6094 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow_with_MS.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/sbb_workflow_with_MS.nf @@ -14,6 +14,23 @@ params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toI params.env_wrapper_cmd_core = "null" params.env_wrapper_cmd_step0 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + mets_socket_path: ${params.mets_socket_path} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow.nf index fc01fceb..0028a978 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow.nf @@ -13,6 +13,22 @@ params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toI params.env_wrapper_cmd_core = "null" params.env_wrapper_cmd_step0 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow_with_MS.nf b/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow_with_MS.nf index be041784..0daa30ed 100755 --- a/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow_with_MS.nf +++ b/src/utils/operandi_utils/hpc/nextflow_workflows/template_workflow_with_MS.nf @@ -14,6 +14,23 @@ params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toI params.env_wrapper_cmd_core = "null" params.env_wrapper_cmd_step0 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + mets_socket_path: ${params.mets_socket_path} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/src/utils/operandi_utils/oton/nf_file_executable.py b/src/utils/operandi_utils/oton/nf_file_executable.py index ef6b7529..61cf247e 100644 --- a/src/utils/operandi_utils/oton/nf_file_executable.py +++ b/src/utils/operandi_utils/oton/nf_file_executable.py @@ -193,6 +193,19 @@ def __assign_first_file_grps_param(self): break index += 1 + def build_log_info_prints(self) -> str: + log_info = f'log.info """\\\n' + log_info += f"{SPACES}OPERANDI HPC - Nextflow Workflow\n" + log_info += f"{SPACES}===================================================\n" + for param in self.nf_lines_parameters: + if not param or "params." not in param: + continue + param_key = param[param.find(".") + 1:param.find("=") - 1] + log_info += f"{SPACES}{param_key}: " + log_info += f'${BS[0]}{param[0:param.find("=") - 1]}{BS[1]}\n' + log_info += f'{SPACES}""".stripIndent()\n' + return log_info + # TODO: Refactor later def build_main_workflow(self, with_mets_server: bool): self.__assign_first_file_grps_param() @@ -213,6 +226,8 @@ def produce_nextflow_file(self, output_path: str, environment: str, with_mets_se for nextflow_line in self.nf_lines_parameters: nextflow_file.write(f'{nextflow_line}\n') nextflow_file.write("\n") + nextflow_file.write(self.build_log_info_prints()) + nextflow_file.write("\n") nextflow_file.write(f'{self.nf_process_split_range.file_representation(local_script=True)}\n') for block in self.nf_blocks_process: nextflow_file.write(f'{block.file_representation(local_script=False)}\n') diff --git a/tests/assets/oton/test_output_nextflow1_apptainer.nf b/tests/assets/oton/test_output_nextflow1_apptainer.nf index b057341e..a748cf8b 100644 --- a/tests/assets/oton/test_output_nextflow1_apptainer.nf +++ b/tests/assets/oton/test_output_nextflow1_apptainer.nf @@ -20,6 +20,29 @@ params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" params.env_wrapper_cmd_step7 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf b/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf index 590ba3c4..23115cd0 100644 --- a/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_apptainer_with_MS.nf @@ -21,6 +21,30 @@ params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" params.env_wrapper_cmd_step7 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + mets_socket_path: ${params.mets_socket_path} + cpus: ${params.cpus} + ram: ${params.ram} + forks: ${params.forks} + cpus_per_fork: ${params.cpus_per_fork} + ram_per_fork: ${params.ram_per_fork} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/tests/assets/oton/test_output_nextflow1_docker.nf b/tests/assets/oton/test_output_nextflow1_docker.nf index 4f59235a..638e6190 100644 --- a/tests/assets/oton/test_output_nextflow1_docker.nf +++ b/tests/assets/oton/test_output_nextflow1_docker.nf @@ -16,6 +16,25 @@ params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" params.env_wrapper_cmd_step7 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + forks: ${params.forks} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf b/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf index 105bd787..e702bba1 100644 --- a/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_docker_with_MS.nf @@ -17,6 +17,26 @@ params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" params.env_wrapper_cmd_step7 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + mets_socket_path: ${params.mets_socket_path} + forks: ${params.forks} + env_wrapper_cmd_core: ${params.env_wrapper_cmd_core} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/tests/assets/oton/test_output_nextflow1_local.nf b/tests/assets/oton/test_output_nextflow1_local.nf index 812258c8..0aad272f 100644 --- a/tests/assets/oton/test_output_nextflow1_local.nf +++ b/tests/assets/oton/test_output_nextflow1_local.nf @@ -15,6 +15,24 @@ params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" params.env_wrapper_cmd_step7 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + forks: ${params.forks} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/tests/assets/oton/test_output_nextflow1_local_with_MS.nf b/tests/assets/oton/test_output_nextflow1_local_with_MS.nf index 7685e74c..58ed1547 100644 --- a/tests/assets/oton/test_output_nextflow1_local_with_MS.nf +++ b/tests/assets/oton/test_output_nextflow1_local_with_MS.nf @@ -16,6 +16,25 @@ params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" params.env_wrapper_cmd_step7 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + mets_socket_path: ${params.mets_socket_path} + forks: ${params.forks} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/tests/assets/oton/test_output_nextflow2.nf b/tests/assets/oton/test_output_nextflow2.nf index a10ca1ad..f6eaf54a 100644 --- a/tests/assets/oton/test_output_nextflow2.nf +++ b/tests/assets/oton/test_output_nextflow2.nf @@ -14,6 +14,23 @@ params.env_wrapper_cmd_step4 = "null" params.env_wrapper_cmd_step5 = "null" params.env_wrapper_cmd_step6 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + forks: ${params.forks} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/tests/assets/oton/test_output_nextflow3.nf b/tests/assets/oton/test_output_nextflow3.nf index 4e551f1e..aba90b96 100644 --- a/tests/assets/oton/test_output_nextflow3.nf +++ b/tests/assets/oton/test_output_nextflow3.nf @@ -10,6 +10,19 @@ params.env_wrapper_cmd_step0 = "null" params.env_wrapper_cmd_step1 = "null" params.env_wrapper_cmd_step2 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + forks: ${params.forks} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks diff --git a/tests/assets/oton/test_output_nextflow4.nf b/tests/assets/oton/test_output_nextflow4.nf index 1d4f600b..0f8bb59c 100644 --- a/tests/assets/oton/test_output_nextflow4.nf +++ b/tests/assets/oton/test_output_nextflow4.nf @@ -20,6 +20,29 @@ params.env_wrapper_cmd_step10 = "null" params.env_wrapper_cmd_step11 = "null" params.env_wrapper_cmd_step12 = "null" +log.info """\ + OPERANDI HPC - Nextflow Workflow + =================================================== + input_file_group: ${params.input_file_group} + mets_path: ${params.mets_path} + workspace_dir: ${params.workspace_dir} + pages: ${params.pages} + forks: ${params.forks} + env_wrapper_cmd_step0: ${params.env_wrapper_cmd_step0} + env_wrapper_cmd_step1: ${params.env_wrapper_cmd_step1} + env_wrapper_cmd_step2: ${params.env_wrapper_cmd_step2} + env_wrapper_cmd_step3: ${params.env_wrapper_cmd_step3} + env_wrapper_cmd_step4: ${params.env_wrapper_cmd_step4} + env_wrapper_cmd_step5: ${params.env_wrapper_cmd_step5} + env_wrapper_cmd_step6: ${params.env_wrapper_cmd_step6} + env_wrapper_cmd_step7: ${params.env_wrapper_cmd_step7} + env_wrapper_cmd_step8: ${params.env_wrapper_cmd_step8} + env_wrapper_cmd_step9: ${params.env_wrapper_cmd_step9} + env_wrapper_cmd_step10: ${params.env_wrapper_cmd_step10} + env_wrapper_cmd_step11: ${params.env_wrapper_cmd_step11} + env_wrapper_cmd_step12: ${params.env_wrapper_cmd_step12} + """.stripIndent() + process split_page_ranges { debug true maxForks params.forks From 172474d1bca055eab41c419e06bbae8320626896 Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Mon, 2 Dec 2024 16:17:32 +0100 Subject: [PATCH 15/16] used dict for nf_params --- src/utils/operandi_utils/oton/constants.py | 13 --- .../operandi_utils/oton/nf_file_executable.py | 79 +++++++------------ tests/assets/oton/constants.py | 53 ++++++------- tests/tests_utils/test_2_oton/assert_utils.py | 2 +- .../test_2_oton/test_3_converter_1_local.py | 10 +-- 5 files changed, 62 insertions(+), 95 deletions(-) diff --git a/src/utils/operandi_utils/oton/constants.py b/src/utils/operandi_utils/oton/constants.py index 80750029..180fc94b 100644 --- a/src/utils/operandi_utils/oton/constants.py +++ b/src/utils/operandi_utils/oton/constants.py @@ -32,17 +32,4 @@ PARAMS_KEY_CPUS_PER_FORK: str = 'params.cpus_per_fork' PARAMS_KEY_RAM_PER_FORK: str = 'params.ram_per_fork' -REPR_INPUT_FILE_GRP: str = f"""{PARAMS_KEY_INPUT_FILE_GRP} = "null\"""" -REPR_METS_PATH: str = f"""{PARAMS_KEY_METS_PATH} = "null\"""" -REPR_METS_SOCKET_PATH: str = f"""{PARAMS_KEY_METS_SOCKET_PATH} = "null\"""" -REPR_WORKSPACE_DIR: str = f"""{PARAMS_KEY_WORKSPACE_DIR} = "null\"""" -REPR_ENV_WRAPPER_CMD_CORE: str = f"""{PARAMS_KEY_ENV_WRAPPER_CMD_CORE} = "null\"""" -REPR_PAGES: str = f"""{PARAMS_KEY_PAGES} = "null\"""" -REPR_CPUS: str = f"""{PARAMS_KEY_CPUS} = "null\"""" -REPR_RAM: str = f"""{PARAMS_KEY_RAM} = "null\"""" -REPR_FORKS: str = f"""{PARAMS_KEY_FORKS} = {PARAMS_KEY_CPUS}""" -REPR_FORKS_NULL: str = f"""{PARAMS_KEY_FORKS} = "4\"""" -REPR_CPUS_PER_FORK: str = f"""{PARAMS_KEY_CPUS_PER_FORK} = ({PARAMS_KEY_CPUS}.toInteger() / {PARAMS_KEY_FORKS}.toInteger()).intValue()""" -REPR_RAM_PER_FORK: str = f"""{PARAMS_KEY_RAM_PER_FORK} = sprintf("%dGB", ({PARAMS_KEY_RAM}.toInteger() / {PARAMS_KEY_FORKS}.toInteger()).intValue())""" - WORKFLOW_COMMENT = f"// This workflow was automatically generated by the v{OPERANDI_VERSION} operandi_utils.oton module" diff --git a/src/utils/operandi_utils/oton/nf_file_executable.py b/src/utils/operandi_utils/oton/nf_file_executable.py index 61cf247e..57d8d31c 100644 --- a/src/utils/operandi_utils/oton/nf_file_executable.py +++ b/src/utils/operandi_utils/oton/nf_file_executable.py @@ -11,20 +11,12 @@ PARAMS_KEY_ENV_WRAPPER_CMD_CORE, PARAMS_KEY_ENV_WRAPPER_CMD_STEP, PARAMS_KEY_FORKS, + PARAMS_KEY_PAGES, + PARAMS_KEY_CPUS, PARAMS_KEY_CPUS_PER_FORK, + PARAMS_KEY_RAM, PARAMS_KEY_RAM_PER_FORK, - REPR_ENV_WRAPPER_CMD_CORE, - REPR_INPUT_FILE_GRP, - REPR_METS_PATH, - REPR_METS_SOCKET_PATH, - REPR_WORKSPACE_DIR, - REPR_PAGES, - REPR_CPUS, - REPR_RAM, - REPR_FORKS, - REPR_FORKS_NULL, - REPR_CPUS_PER_FORK, - REPR_RAM_PER_FORK, + PARAMS_KEY_METS_SOCKET_PATH, SPACES, WORKFLOW_COMMENT ) @@ -38,7 +30,7 @@ def __init__(self): self.logger.setLevel(getLevelName(OTON_LOG_LEVEL)) self.supported_environments = ["local", "docker", "apptainer"] - self.nf_lines_parameters: List[str] = [] + self.nf_lines_parameters = {} self.nf_process_split_range = None self.nf_process_merging_mets = None self.nf_blocks_process: List[NextflowBlockProcess] = [] @@ -48,28 +40,28 @@ def build_parameters(self, environment: str, with_mets_server: bool): if environment not in self.supported_environments: raise ValueError(f"Invalid environment value: {environment}. Must be one of: {self.supported_environments}") - self.nf_lines_parameters.append('nextflow.enable.dsl = 2') - self.nf_lines_parameters.append('') - self.nf_lines_parameters.append(REPR_INPUT_FILE_GRP) - self.nf_lines_parameters.append(REPR_METS_PATH) - self.nf_lines_parameters.append(REPR_WORKSPACE_DIR) - self.nf_lines_parameters.append(REPR_PAGES) + self.nf_lines_parameters[PARAMS_KEY_INPUT_FILE_GRP] = '"null"' + self.nf_lines_parameters[PARAMS_KEY_METS_PATH] = '"null"' + self.nf_lines_parameters[PARAMS_KEY_WORKSPACE_DIR] = '"null"' + self.nf_lines_parameters[PARAMS_KEY_PAGES] = '"null"' if with_mets_server: - self.nf_lines_parameters.append(REPR_METS_SOCKET_PATH) + self.nf_lines_parameters[PARAMS_KEY_METS_SOCKET_PATH] = '"null"' if environment == "local": - self.nf_lines_parameters.append(REPR_FORKS_NULL) + self.nf_lines_parameters[PARAMS_KEY_FORKS] = '"4"' if environment == "docker": - self.nf_lines_parameters.append(REPR_FORKS_NULL) - self.nf_lines_parameters.append(REPR_ENV_WRAPPER_CMD_CORE) + self.nf_lines_parameters[PARAMS_KEY_FORKS] = '"4"' + self.nf_lines_parameters[PARAMS_KEY_ENV_WRAPPER_CMD_CORE] = '"null"' if environment == "apptainer": - self.nf_lines_parameters.append(REPR_CPUS) - self.nf_lines_parameters.append(REPR_RAM) - self.nf_lines_parameters.append(REPR_FORKS) - self.nf_lines_parameters.append(REPR_CPUS_PER_FORK) - self.nf_lines_parameters.append(REPR_RAM_PER_FORK) - self.nf_lines_parameters.append(REPR_ENV_WRAPPER_CMD_CORE) + self.nf_lines_parameters[PARAMS_KEY_CPUS] = '"null"' + self.nf_lines_parameters[PARAMS_KEY_RAM] = '"null"' + self.nf_lines_parameters[PARAMS_KEY_FORKS] = f'{PARAMS_KEY_CPUS}' + self.nf_lines_parameters[PARAMS_KEY_CPUS_PER_FORK] = \ + f'({PARAMS_KEY_CPUS}.toInteger() / {PARAMS_KEY_FORKS}.toInteger()).intValue()' + self.nf_lines_parameters[PARAMS_KEY_RAM_PER_FORK] = \ + f'sprintf("%dGB", ({PARAMS_KEY_RAM}.toInteger() / {PARAMS_KEY_FORKS}.toInteger()).intValue())' + self.nf_lines_parameters[PARAMS_KEY_ENV_WRAPPER_CMD_CORE] = '"null"' # TODO: Refactor later def build_split_page_ranges_process(self, environment: str, with_mets_server: bool) -> NextflowBlockProcess: @@ -180,35 +172,22 @@ def build_nextflow_processes( nf_process_block.add_parameter_output(parameter=CONST_METS_PATH, parameter_type='val') nf_process_block.add_parameter_output(parameter=CONST_PAGE_RANGE, parameter_type='val') nf_process_block.add_parameter_output(parameter=CONST_WORKSPACE_DIR, parameter_type='val') - self.nf_lines_parameters.append(f'{PARAMS_KEY_ENV_WRAPPER_CMD_STEP}{index} = "null"') + self.nf_lines_parameters[f'{PARAMS_KEY_ENV_WRAPPER_CMD_STEP}{index}'] = '"null"' self.nf_blocks_process.append(nf_process_block) index += 1 - def __assign_first_file_grps_param(self): - first_file_grps = self.nf_blocks_process[0].processor_call_arguments.input_file_grps - index = 0 - for parameter in self.nf_lines_parameters: - if PARAMS_KEY_INPUT_FILE_GRP in parameter: - self.nf_lines_parameters[index] = parameter.replace("null", first_file_grps) - break - index += 1 - def build_log_info_prints(self) -> str: log_info = f'log.info """\\\n' log_info += f"{SPACES}OPERANDI HPC - Nextflow Workflow\n" log_info += f"{SPACES}===================================================\n" - for param in self.nf_lines_parameters: - if not param or "params." not in param: - continue - param_key = param[param.find(".") + 1:param.find("=") - 1] - log_info += f"{SPACES}{param_key}: " - log_info += f'${BS[0]}{param[0:param.find("=") - 1]}{BS[1]}\n' + for key, value in self.nf_lines_parameters.items(): + log_info += f"{SPACES}{key[len('params.'):]}: ${BS[0]}{key}{BS[1]}\n" log_info += f'{SPACES}""".stripIndent()\n' return log_info - # TODO: Refactor later def build_main_workflow(self, with_mets_server: bool): - self.__assign_first_file_grps_param() + first_file_grps = self.nf_blocks_process[0].processor_call_arguments.input_file_grps + self.nf_lines_parameters[PARAMS_KEY_INPUT_FILE_GRP] = f'"{first_file_grps}"' nf_workflow_block = NextflowBlockWorkflow( workflow_name="main", nf_processes=self.nf_blocks_process, @@ -223,8 +202,10 @@ def produce_nextflow_file(self, output_path: str, environment: str, with_mets_se # Write Nextflow line tokens to an output file with open(output_path, mode='w', encoding='utf-8') as nextflow_file: nextflow_file.write(f"{WORKFLOW_COMMENT}\n") - for nextflow_line in self.nf_lines_parameters: - nextflow_file.write(f'{nextflow_line}\n') + nextflow_file.write("nextflow.enable.dsl = 2\n") + nextflow_file.write("\n") + for key, value in self.nf_lines_parameters.items(): + nextflow_file.write(f'{key} = {value}\n') nextflow_file.write("\n") nextflow_file.write(self.build_log_info_prints()) nextflow_file.write("\n") diff --git a/tests/assets/oton/constants.py b/tests/assets/oton/constants.py index b8f3e053..6e540e84 100644 --- a/tests/assets/oton/constants.py +++ b/tests/assets/oton/constants.py @@ -105,33 +105,32 @@ } """ -PARAMETERS_COMMON = [ - 'nextflow.enable.dsl = 2', - 'params.mets_path = "null"', - 'params.workspace_dir = "null"', - 'params.pages = "null"', -] +PARAMETERS_COMMON = { + 'params.mets_path': '"null"', + 'params.workspace_dir': '"null"', + 'params.pages': '"null"', +} -PARAMETERS_LOCAL = [ - 'params.forks = "4"' -] +PARAMETERS_LOCAL = { + 'params.forks': '"4"', +} -PARAMETERS_DOCKER = [ - 'params.forks = "4"', - 'params.env_wrapper_cmd_core = "null"', - 'params.env_wrapper_cmd_step0 = "null"', - 'params.env_wrapper_cmd_step1 = "null"', - 'params.env_wrapper_cmd_step2 = "null"', -] +PARAMETERS_DOCKER = { + 'params.forks': '"4"', + 'params.env_wrapper_cmd_core': '"null"', + 'params.env_wrapper_cmd_step0': '"null"', + 'params.env_wrapper_cmd_step1': '"null"', + 'params.env_wrapper_cmd_step2': '"null"', +} -PARAMETERS_APPTAINER = [ - 'params.cpus = "null"', - 'params.ram = "null"', - 'params.forks = params.cpus', - 'params.cpus_per_fork = (params.cpus.toInteger() / params.forks.toInteger()).intValue()', - 'params.ram_per_fork = sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue())', - 'params.env_wrapper_cmd_core = "null"', - 'params.env_wrapper_cmd_step0 = "null"', - 'params.env_wrapper_cmd_step1 = "null"', - 'params.env_wrapper_cmd_step2 = "null"', -] +PARAMETERS_APPTAINER = { + 'params.cpus': '"null"', + 'params.ram': '"null"', + 'params.forks': 'params.cpus', + 'params.cpus_per_fork': '(params.cpus.toInteger() / params.forks.toInteger()).intValue()', + 'params.ram_per_fork': 'sprintf("%dGB", (params.ram.toInteger() / params.forks.toInteger()).intValue())', + 'params.env_wrapper_cmd_core': '"null"', + 'params.env_wrapper_cmd_step0': '"null"', + 'params.env_wrapper_cmd_step1': '"null"', + 'params.env_wrapper_cmd_step2': '"null"', +} diff --git a/tests/tests_utils/test_2_oton/assert_utils.py b/tests/tests_utils/test_2_oton/assert_utils.py index ed197bea..11d334b7 100644 --- a/tests/tests_utils/test_2_oton/assert_utils.py +++ b/tests/tests_utils/test_2_oton/assert_utils.py @@ -13,7 +13,7 @@ def assert_common_features( for parameter in PARAMETERS_COMMON: assert parameter in parameters if with_mets_server: - assert 'params.mets_socket_path = "null"' in parameters, f"params.mets_socket_path is missing in {parameters}" + assert parameters['params.mets_socket_path'] == '"null"', f"params.mets_socket_path is missing in {parameters}" blocks_process = nextflow_file_class.nf_blocks_process assert len(blocks_process) == num_blocks_process for block in blocks_process: diff --git a/tests/tests_utils/test_2_oton/test_3_converter_1_local.py b/tests/tests_utils/test_2_oton/test_3_converter_1_local.py index e113bcaa..1232a756 100644 --- a/tests/tests_utils/test_2_oton/test_3_converter_1_local.py +++ b/tests/tests_utils/test_2_oton/test_3_converter_1_local.py @@ -9,20 +9,20 @@ def test_convert_wf1_with_env_local(oton_converter): nextflow_file_class = oton_converter.convert_oton(IN_TXT_WF1, OUT_NF_WF1_LOCAL, "local", False) - assert 'params.input_file_group = "OCR-D-IMG"' in nextflow_file_class.nf_lines_parameters + assert nextflow_file_class.nf_lines_parameters['params.input_file_group'] == '"OCR-D-IMG"' assert_common_features(nextflow_file_class, 8, 1, False) assert_compare_workflow_blocks(OUT_NF_WF1_LOCAL, EXPECTED_WF1) def test_convert_wf1_with_env_local_with_mets_server(oton_converter): nextflow_file_class = oton_converter.convert_oton(IN_TXT_WF1, OUT_NF_WF1_LOCAL_WITH_MS, "local", True) - assert 'params.input_file_group = "OCR-D-IMG"' in nextflow_file_class.nf_lines_parameters + assert nextflow_file_class.nf_lines_parameters['params.input_file_group'] == '"OCR-D-IMG"' assert_common_features(nextflow_file_class, 8, 1, True) assert_compare_workflow_blocks(OUT_NF_WF1_LOCAL_WITH_MS, EXPECTED_WF1_WITH_MS) def test_convert_wf2_with_env_local(oton_converter): nextflow_file_class = oton_converter.convert_oton(IN_TXT_WF2, OUT_NF_WF2_LOCAL, "local", False) - assert 'params.input_file_group = "OCR-D-IMG"' in nextflow_file_class.nf_lines_parameters + assert nextflow_file_class.nf_lines_parameters['params.input_file_group'] == '"OCR-D-IMG"' assert_common_features(nextflow_file_class, 7, 1, False) assert_common_features_local(nextflow_file_class) assert_compare_workflow_blocks(OUT_NF_WF2_LOCAL, EXPECTED_WF2) @@ -30,7 +30,7 @@ def test_convert_wf2_with_env_local(oton_converter): def test_convert_wf3_with_env_local(oton_converter): nextflow_file_class = oton_converter.convert_oton(IN_TXT_WF3, OUT_NF_WF3_LOCAL, "local", False) - assert 'params.input_file_group = "OCR-D-GT-SEG-BLOCK,OCR-D-OCR"' in nextflow_file_class.nf_lines_parameters + assert nextflow_file_class.nf_lines_parameters['params.input_file_group'] == '"OCR-D-GT-SEG-BLOCK,OCR-D-OCR"' assert_common_features(nextflow_file_class, 3, 1, False) assert_common_features_local(nextflow_file_class) assert_compare_workflow_blocks(OUT_NF_WF3_LOCAL, EXPECTED_WF3) @@ -38,7 +38,7 @@ def test_convert_wf3_with_env_local(oton_converter): def test_convert_wf4_with_env_local(oton_converter): nextflow_file_class = oton_converter.convert_oton(IN_TXT_WF4, OUT_NF_WF4_LOCAL, "local", False) - assert 'params.input_file_group = "OCR-D-IMG"' in nextflow_file_class.nf_lines_parameters + assert nextflow_file_class.nf_lines_parameters['params.input_file_group'] == '"OCR-D-IMG"' assert_common_features(nextflow_file_class, 13, 1, False) assert_common_features_local(nextflow_file_class) assert_compare_workflow_blocks(OUT_NF_WF4_LOCAL, EXPECTED_WF4) From 602636edc5f484e3430f15d5ad6a7711992424bc Mon Sep 17 00:00:00 2001 From: Mehmed Mustafa Date: Mon, 2 Dec 2024 16:20:20 +0100 Subject: [PATCH 16/16] fix: operandi_utils setup --- src/utils/setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/utils/setup.py b/src/utils/setup.py index 9a79e4db..77b08639 100644 --- a/src/utils/setup.py +++ b/src/utils/setup.py @@ -20,7 +20,9 @@ 'operandi_utils.oton', 'operandi_utils.rabbitmq' ], - package_data={'': ['batch_scripts/*.sh', 'nextflow_workflows/*.nf', 'ocrd_all_tool.json']}, + package_data={ + '': ['batch_scripts/*.sh', 'nextflow_workflows/*.nf', 'ocrd_process_workflows/*.txt', 'ocrd_all_tool.json'] + }, install_requires=install_requires, entry_points={ 'console_scripts': [