Skip to content

Commit

Permalink
Move from print to logger in all scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
TorecLuik committed Apr 4, 2024
1 parent 2803478 commit 1786982
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 57 deletions.
12 changes: 6 additions & 6 deletions data/SLURM_Get_Update.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def runScript():
scriptParams = client.getInputs(unwrap=True)

message = ""
print(f"Request: {scriptParams}\n")
logger.info(f"Get Update: {scriptParams}\n")

# Job id
slurm_job_id = unwrap(client.getInput(SLURM_JOB_ID))
Expand All @@ -109,7 +109,7 @@ def runScript():
tup = slurmClient.get_logfile_from_slurm(
slurm_job_id_old)
(dir, export_file, result) = tup
print(f"Pulled logfile {result.__dict__}")
logger.debug(f"Pulled logfile {result.__dict__}")
# Upload logfile to Omero as Original File
output_display_name = f"Job logfile '{result.local}'"
namespace = NSCREATED + "/SLURM/SLURM_GET_UPDATE"
Expand All @@ -121,9 +121,9 @@ def runScript():
# Attach logfile (OriginalFile) to Project
conn = BlitzGateway(client_obj=client)
project_ids = unwrap(client.getInput("Project"))
print(project_ids)
logger.debug(project_ids)
project_id = project_ids[0].split(":")[0]
print(project_id)
logger.debug(project_id)
project = conn.getObject("Project", project_id)
tup = script_utils.create_link_file_annotation(
conn, export_file, project, output=output_display_name,
Expand Down Expand Up @@ -153,9 +153,9 @@ def check_job(slurmClient, message, slurm_job_id):
try:
job_status_dict, poll_result = slurmClient.check_job_status(
slurm_job_ids=[slurm_job_id])
print(job_status_dict, poll_result.stdout)
logger.debug(f"{job_status_dict}, {poll_result.stdout}")
if not poll_result.ok:
print("Error checking job status:", poll_result.stderr)
logger.warning(f"Error checking job status: {poll_result.stderr}")
message += f"\nError checking job status: {poll_result.stderr}"
else:
message += f"\n{job_status_dict}"
Expand Down
10 changes: 5 additions & 5 deletions data/_SLURM_Image_Transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ def log(text):
except UnicodeEncodeError:
pass
log_strings.append(str(text))
logger.debug(str(text))


def compress(target, base):
Expand Down Expand Up @@ -219,7 +220,7 @@ def save_as_zarr(conn, suuid, image, folder_name=None):
cmd1 = 'export JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:/bin/java::")'
command = f'omero zarr -s "{conn.host}" -k "{suuid}" --output {exp_dir} export Image:{image.getId()}'
cmd = cmd1 + " && " + command
print(cmd)
logger.debug(cmd)
process = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
Expand All @@ -228,11 +229,10 @@ def save_as_zarr(conn, suuid, image, folder_name=None):
)
stdout, stderr = process.communicate()
if stderr:
print(stderr.decode("utf-8"))
logger.warning(stderr.decode("utf-8"))
if process.returncode == 0:
print(f"OME ZARR CLI: {stdout}")
log(f"OME ZARR CLI: {stdout}")
print(img_name)
logger.debug(img_name)
os.rename(f"{exp_dir}/{image.getId()}.zarr", img_name)
return # shortcut

Expand Down Expand Up @@ -541,7 +541,7 @@ def get_t_range(size_t, script_params):
# Copy to SLURM
try:
r = slurmClient.transfer_data(Path(export_file))
print(r)
logger.debug(r)
message += f"'{folder_name}' succesfully copied to SLURM!\n"
except Exception as e:
message += f"Copying to SLURM failed: {e}\n"
Expand Down
26 changes: 13 additions & 13 deletions workflows/SLURM_CellPose_Segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def runScript():
_versions, _datafiles = slurmClient.get_image_versions_and_data_files(
'cellpose')
_workflow_params = slurmClient.get_workflow_parameters('cellpose')
print(_workflow_params)
logger.debug(_workflow_params)
name_descr = f"Name of folder where images are stored, as provided\
with {_IMAGE_EXPORT_SCRIPT}"
dur_descr = "Maximum time the script should run for. \
Expand Down Expand Up @@ -95,7 +95,7 @@ def runScript():
values=versions)
input_list.append(wf_v)
for i, (k, param) in enumerate(wfparams.items()):
print(i, k, param)
logger.debug(i, k, param)
logging.info(param)
p = slurmClient.convert_cytype_to_omtype(
param["cytype"],
Expand Down Expand Up @@ -124,22 +124,21 @@ def runScript():
kwargs = {}
for i, k in enumerate(_workflow_params):
kwargs[k] = unwrap(client.getInput(k)) # kwarg dict
print(kwargs)
logger.debug(kwargs)

try:
# 3. Call SLURM (segmentation)
unpack_result = slurmClient.unpack_data(zipfile)
print(unpack_result.stdout)
logger.debug(unpack_result.stdout)
if not unpack_result.ok:
print("Error unpacking data:", unpack_result.stderr)
logger.warning(f"Error unpacking data:{unpack_result.stderr}")
else:
# Quick git pull on Slurm for latest version of job scripts
try:
update_result = slurmClient.update_slurm_scripts()
print(update_result.__dict__)
logger.debug(update_result.__dict__)
except Exception as e:
print("Error updating SLURM scripts:",
e)
logger.warning(f"Error updating SLURM scripts:{e}")

cp_result, slurm_job_id = slurmClient.run_workflow(
workflow_name='cellpose',
Expand All @@ -150,7 +149,7 @@ def runScript():
**kwargs
)
if not cp_result.ok:
print("Error running CellPose job:", cp_result.stderr)
logger.warning(f"Error running CellPose job: {cp_result.stderr}")
else:
print_result = f"Submitted to Slurm as\
batch job {slurm_job_id}."
Expand All @@ -159,17 +158,18 @@ def runScript():
tup = slurmClient.check_job_status(
[slurm_job_id])
(job_status_dict, poll_result) = tup
print(poll_result.stdout, job_status_dict)
logger.debug(f"{poll_result.stdout},{job_status_dict}")
if not poll_result.ok:
print("Error checking job status:",
poll_result.stderr)
logger.warning("Error checking job status:",
poll_result.stderr)
else:
print_result += f"\n{job_status_dict}"
except Exception as e:
print_result += f" ERROR WITH JOB: {e}"
print(print_result)
logger.warning(print_result)

# 7. Script output
logger.info(print_result)
client.setOutput("Message", rstring(print_result))
finally:
client.closeSession()
Expand Down
54 changes: 23 additions & 31 deletions workflows/SLURM_Run_Workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def runScript():
# Create a script parameter for all workflow parameters
for param_incr, (k, param) in enumerate(_workflow_params[
wf].items()):
print(param_incr, k, param)
logger.debug(f"{param_incr}, {k}, {param}")
logger.info(param)
# Convert the parameter from cy(tomine)type to om(ero)type
omtype_param = slurmClient.convert_cytype_to_omtype(
Expand Down Expand Up @@ -212,7 +212,7 @@ def runScript():
version_errors = ""
for wf, selected in selected_workflows.items():
selected_version = unwrap(client.getInput(f"{wf}_Version"))
print(wf, selected, selected_version)
logger.debug(f"{wf}, {selected}, {selected_version}")
if selected and not selected_version:
version_errors += f"ERROR: No version for '{wf}'! \n"
if version_errors:
Expand All @@ -227,13 +227,12 @@ def runScript():
selected_output[output_option] = False
else:
selected_output[output_option] = True
print(f"Selected: {output_option} >> [{selected_op}]")
logger.debug(f"Selected: {output_option} >> [{selected_op}]")
if not any(selected_output.values()):
errormsg = "ERROR: Please select at least 1 output method!"
client.setOutput("Message", rstring(errormsg))
raise ValueError(errormsg)
else:
print(f"Output options chosen: {selected_output}")
logger.info(f"Output options chosen: {selected_output}")

# Connect to Omero
Expand Down Expand Up @@ -264,7 +263,7 @@ def runScript():
unpack_result = slurmClient.unpack_data(zipfile)
logger.debug(unpack_result.stdout)
if not unpack_result.ok:
logger.warning(f"Error unpacking data:{unpack_result.stderr}")
logger.warning(f"Error unpacking data: {unpack_result.stderr}")
else:
slurm_job_ids = {}
# Quick git pull on Slurm for latest version of job scripts
Expand All @@ -277,9 +276,9 @@ def runScript():
# --------------------------------------------
''')
slurmJob = slurmClient.run_conversion_workflow_job(zipfile, 'zarr', 'tiff')
logger.debug(slurmJob)
logger.info(f"Conversion job: {slurmJob}")
if not slurmJob.ok:
logger.warning(f"Error converting data:{slurmJob.get_error()}")
logger.warning(f"Error converting data: {slurmJob.get_error()}")
else:
try:
slurmJob.wait_for_completion(slurmClient, conn)
Expand Down Expand Up @@ -328,7 +327,6 @@ def runScript():
# slurm_job_id)
# log_msg = f"Job {slurm_job_id} has been
# resubmitted ({new_job_id})."
print(log_msg)
logger.warning(log_msg)
# log_string += log_msg
slurm_job_id_list.remove(slurm_job_id)
Expand Down Expand Up @@ -361,7 +359,6 @@ def runScript():
else:
log_msg = "Attempted to import images to\
Omero."
print(log_msg)
logger.info(log_msg)
UI_messages += log_msg
slurm_job_id_list.remove(slurm_job_id)
Expand All @@ -370,21 +367,18 @@ def runScript():
# Remove from future checks
log_msg = f"Job {slurm_job_id} is {job_state}."
log_msg += f"You can get the logfile using `Slurm Get Update` on job {slurm_job_id}"
print(log_msg)
logger.warning(log_msg)
UI_messages += log_msg
slurm_job_id_list.remove(slurm_job_id)
elif (job_state == "PENDING"
or job_state == "RUNNING"):
# expected
log_msg = f"Job {slurm_job_id} is busy..."
print(log_msg)
logger.debug(log_msg)
continue
else:
log_msg = f"Oops! State of job {slurm_job_id}\
is unknown: {job_state}. Stop tracking."
print(log_msg)
logger.warning(log_msg)
UI_messages += log_msg
slurm_job_id_list.remove(slurm_job_id)
Expand Down Expand Up @@ -423,8 +417,7 @@ def run_workflow(slurmClient: SlurmClient,
**kwargs)
logger.debug(cp_result.stdout)
if not cp_result.ok:
logger.warning(f"Error running {name} job:",
cp_result.stderr)
logger.warning(f"Error running {name} job: {cp_result.stderr}")
else:
UI_messages += f"Submitted {name} to Slurm\
as batch job {slurm_job_id}."
Expand All @@ -434,14 +427,13 @@ def run_workflow(slurmClient: SlurmClient,
logger.debug(
f"{job_status_dict[slurm_job_id]}, {poll_result.stdout}")
if not poll_result.ok:
logger.warning(f"Error checking job status:{poll_result.stderr}")
logger.warning(f"Error checking job status: {poll_result.stderr}")
else:
log_msg = f"\n{job_status_dict[slurm_job_id]}"
logger.info(log_msg)
print(log_msg)
except Exception as e:
UI_messages += f" ERROR WITH JOB: {e}"
print(UI_messages)
logger.warning(UI_messages)
raise SSHException(UI_messages)
return UI_messages, slurm_job_id

Expand All @@ -453,14 +445,14 @@ def getOmeroEmail(client, conn):
user = conn.getUser()
use_email = user.getEmail()
if use_email == "None":
print("No email given for this user")
logger.debug("No email given for this user")
use_email = None
except omero.gateway.OMEROError as e:
print(f"Error retrieving email {e}")
logger.warning(f"Error retrieving email {e}")
use_email = None
else:
use_email = None
print(f"Using email {use_email}")
logger.info(f"Using email {use_email}")
return use_email


Expand Down Expand Up @@ -488,7 +480,7 @@ def exportImageToSLURM(client: omscripts.client,
"Format": rstring('ZARR'),
"Folder_Name": rstring(zipfile)
}
print(inputs, script_ids)
logger.debug(f"{inputs}, {script_ids}")
rv = runOMEROScript(client, svc, script_ids, inputs)
return rv

Expand Down Expand Up @@ -528,7 +520,7 @@ def importResultsToOmero(client: omscripts.client,
for s in scripts if unwrap(s.getName()) in IMPORT_SCRIPTS]
first_id = unwrap(client.getInput("IDs"))[0]
data_type = unwrap(client.getInput("Data_Type"))
print(script_ids, first_id, data_type)
logger.debug(f"{script_ids}, {first_id}, {data_type}")
opts = {}
inputs = {
constants.RESULTS_OUTPUT_COMPLETED_JOB: rbool(True),
Expand All @@ -543,26 +535,26 @@ def importResultsToOmero(client: omscripts.client,
'Dataset', opts={'image': first_id})]
plates = [d.id for d in conn.getObjects(
'Plate', opts={'image': first_id})]
print(f"Datasets:{datasets} Plates:{plates}")
logger.debug(f"Datasets:{datasets} Plates:{plates}")
if len(plates) > len(datasets):
parent_id = plates[0]
parent_data_type = 'Plate'
else:
parent_id = datasets[0]
parent_data_type = 'Dataset'

print(f"Determined parent to be {parent_data_type}:{parent_id}")
logger.debug(f"Determined parent to be {parent_data_type}:{parent_id}")

if selected_output[OUTPUT_PARENT]:
# For now, there is no attaching to Dataset or Screen...
# If we need that, build it ;) (in Get_Result script)
if parent_data_type == 'Dataset' or parent_data_type == 'Project':
print(f"Adding to dataset {parent_id}")
logger.debug(f"Adding to dataset {parent_id}")
projects = get_project_name_ids(conn, parent_id)
inputs[constants.RESULTS_OUTPUT_ATTACH_PROJECT_ID] = rlist(
projects)
elif parent_data_type == 'Plate':
print(f"Adding to plate {parent_id}")
logger.debug(f"Adding to plate {parent_id}")
plates = get_plate_name_ids(conn, parent_id)
inputs[constants.RESULTS_OUTPUT_ATTACH_PROJECT] = rbool(False)
inputs[constants.RESULTS_OUTPUT_ATTACH_PLATE] = rbool(True)
Expand Down Expand Up @@ -647,7 +639,7 @@ def importResultsToOmero(client: omscripts.client,
constants.RESULTS_OUTPUT_ATTACH_TABLE
] = rbool(False)

print(f"Running import script {script_ids} with inputs: {inputs}")
logger.info(f"Running import script {script_ids} with inputs: {inputs}")
rv = runOMEROScript(client, svc, script_ids, inputs)
return rv

Expand All @@ -658,21 +650,21 @@ def get_project_name_ids(conn, parent_id):
projects = [rstring('%d: %s' % (d.id, d.getName()))
for d in conn.getObjects('Project',
opts={'dataset': parent_id})]
print(projects)
logger.debug(projects)
return projects


def get_dataset_name_ids(conn, parent_id):
dataset = [rstring('%d: %s' % (d.id, d.getName()))
for d in conn.getObjects('Dataset', [parent_id])]
print(dataset)
logger.debug(dataset)
return dataset


def get_plate_name_ids(conn, parent_id):
plates = [rstring('%d: %s' % (d.id, d.getName()))
for d in conn.getObjects('Plate', [parent_id])]
print(plates)
logger.debug(plates)
return plates


Expand All @@ -698,7 +690,7 @@ def createFileName(client: omscripts.client, conn: BlitzGateway) -> str:
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
filename = "_".join(objparams)
full_filename = f"{filename}_{timestamp}"
print("Filename: " + full_filename)
logger.debug("Filename: " + full_filename)
return full_filename


Expand Down
Loading

0 comments on commit 1786982

Please sign in to comment.