Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ruff and flake8 fixes #16884

Merged
merged 3 commits into from
Oct 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ jobs:
name: Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.7', '3.11']
env:
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/authnz/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def _parse_oidc_config(self, config_file):
if root.tag != "OIDC":
raise etree.ParseError(
"The root element in OIDC_Config xml file is expected to be `OIDC`, "
"found `{}` instead -- unable to continue.".format(root.tag)
f"found `{root.tag}` instead -- unable to continue."
)
for child in root:
if child.tag != "Setter":
Expand All @@ -79,7 +79,7 @@ def _parse_oidc_config(self, config_file):
if "Property" not in child.attrib or "Value" not in child.attrib or "Type" not in child.attrib:
log.error(
"Could not find the node attributes `Property` and/or `Value` and/or `Type`;"
" found these attributes: `{}`; skipping this node.".format(child.attrib)
f" found these attributes: `{child.attrib}`; skipping this node."
)
continue
try:
Expand Down Expand Up @@ -110,7 +110,7 @@ def _parse_oidc_backends_config(self, config_file):
if root.tag != "OIDC":
raise etree.ParseError(
"The root element in OIDC config xml file is expected to be `OIDC`, "
"found `{}` instead -- unable to continue.".format(root.tag)
f"found `{root.tag}` instead -- unable to continue."
)
for child in root:
if child.tag != "provider":
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1674,7 +1674,7 @@ def _set_object_store_ids_full(self, job):
# the outputs and set them accordingly
object_store_id_overrides = {o: preferred_outputs_object_store_id for o in output_names}

def split_object_stores(output_name):
def split_object_stores(output_name): # noqa: F811 https://github.com/PyCQA/pyflakes/issues/783
if "|__part__|" in output_name:
output_name = output_name.split("|__part__|", 1)[0]
if output_name in output_names:
Expand Down
7 changes: 3 additions & 4 deletions lib/galaxy/managers/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -1435,13 +1435,12 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None, internal=F
if name:
input_dicts.append({"name": name, "description": annotation_str})
for name, val in step_state.items():
input_type = type(val)
if input_type == RuntimeValue:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This looks so intentional to me - like we did this at some point to avoid ConnectedValue? I can't find evidence of that in the blame logs though. I think it is fine to keep this as is and just revert if there are problems. If this is actually the check we want - I think ideally we should be using is_runtime_value, again not a necessary thing though.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for the insight!
By looking at the implementation of is_runtime_value:

def is_runtime_value(value):
    return isinstance(value, RuntimeValue) or (
        isinstance(value, dict) and value.get("__class__") in ["RuntimeValue", "ConnectedValue"]
    )

it's clear that it would not avoid ConnectedValue either.
I don't see a reason to avoid it here, but if there is one, then we would need to revert this change or use if isinstance(val, RuntimeValue) and not isinstance(val, ConnectedValue):

if isinstance(val, RuntimeValue):
input_dicts.append({"name": name, "description": f"runtime parameter for tool {module.get_name()}"})
elif input_type == dict:
elif isinstance(val, dict):
# Input type is described by a dict, e.g. indexed parameters.
for partval in val.values():
if type(partval) == RuntimeValue:
if isinstance(partval, RuntimeValue):
input_dicts.append(
{"name": name, "description": f"runtime parameter for tool {module.get_name()}"}
)
Expand Down
12 changes: 3 additions & 9 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,7 @@ def calculate_user_disk_usage_statements(user_id, quota_source_map, for_sqlite=F
)
if for_sqlite:
# hacky alternative for older sqlite
statement = """
statement = f"""
WITH new (user_id, quota_source_label, disk_usage) AS (
VALUES(:id, :label, ({label_usage}))
)
Expand All @@ -614,9 +614,7 @@ def calculate_user_disk_usage_statements(user_id, quota_source_map, for_sqlite=F
LEFT JOIN user_quota_source_usage AS old
ON new.user_id = old.user_id
AND new.quota_source_label = old.quota_source_label
""".format(
label_usage=label_usage
)
"""
else:
statement = f"""
INSERT INTO user_quota_source_usage(user_id, quota_source_label, disk_usage)
Expand Down Expand Up @@ -996,11 +994,7 @@ def calculate_disk_usage_default_source(self, object_store):
exclude_objectstore_ids = quota_source_map.default_usage_excluded_ids()
default_cond = "dataset.object_store_id IS NULL OR" if default_quota_enabled and exclude_objectstore_ids else ""
default_usage_dataset_condition = (
(
"AND ( {default_cond} dataset.object_store_id NOT IN :exclude_object_store_ids )".format(
default_cond=default_cond,
)
)
f"AND ( {default_cond} dataset.object_store_id NOT IN :exclude_object_store_ids )"
if exclude_objectstore_ids
else ""
)
Expand Down
10 changes: 5 additions & 5 deletions lib/galaxy/model/security.py
Original file line number Diff line number Diff line change
Expand Up @@ -605,21 +605,21 @@ def __active_folders_have_accessible_library_datasets(self, trans, folder, user,
return False

def can_access_library_item(self, roles, item, user):
if type(item) == self.model.Library:
if isinstance(item, self.model.Library):
return self.can_access_library(roles, item)
elif type(item) == self.model.LibraryFolder:
elif isinstance(item, self.model.LibraryFolder):
return (
self.can_access_library(roles, item.parent_library) and self.check_folder_contents(user, roles, item)[0]
)
elif type(item) == self.model.LibraryDataset:
elif isinstance(item, self.model.LibraryDataset):
return self.can_access_library(roles, item.folder.parent_library) and self.can_access_dataset(
roles, item.library_dataset_dataset_association.dataset
)
elif type(item) == self.model.LibraryDatasetDatasetAssociation:
elif isinstance(item, self.model.LibraryDatasetDatasetAssociation):
return self.can_access_library(
roles, item.library_dataset.folder.parent_library
) and self.can_access_dataset(roles, item.dataset)
elif type(item) == self.model.LibraryDatasetCollectionAssociation:
elif isinstance(item, self.model.LibraryDatasetCollectionAssociation):
return self.can_access_library(roles, item.folder.parent_library)
else:
log.warning(f"Unknown library item type: {type(item)}")
Expand Down
6 changes: 2 additions & 4 deletions lib/galaxy/quota/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,16 +179,14 @@ def _default_unregistered_quota(self, quota_source_label):
def _default_quota(self, default_type, quota_source_label):
label_condition = "IS NULL" if quota_source_label is None else "= :label"
query = text(
"""
f"""
SELECT bytes
FROM quota as default_quota
LEFT JOIN default_quota_association on default_quota.id = default_quota_association.quota_id
WHERE default_quota_association.type = :default_type
AND default_quota.deleted != :is_true
AND default_quota.quota_source_label {label_condition}
""".format(
label_condition=label_condition
)
"""
)

conn = self.sa_session.connection()
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tool_util/verify/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def build_case_references(
filtered_test_references.append(test_reference)
if log is not None:
log.info(
f"Skipping {len(test_references)-len(filtered_test_references)} out of {len(test_references)} tests."
f"Skipping {len(test_references) - len(filtered_test_references)} out of {len(test_references)} tests."
)
test_references = filtered_test_references

Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/tools/actions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -757,7 +757,7 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current
assert (
old_job.user_id == trans.user.id
), f"({old_job.id}/{current_job.id}): Old user id ({old_job.user_id}) does not match rerun user id ({trans.user.id})"
elif trans.user is None and type(galaxy_session) == trans.model.GalaxySession:
elif trans.user is None and isinstance(galaxy_session, trans.model.GalaxySession):
assert (
old_job.session_id == galaxy_session.id
), f"({old_job.id}/{current_job.id}): Old session id ({old_job.session_id}) does not match rerun session id ({galaxy_session.id})"
Expand Down Expand Up @@ -847,7 +847,7 @@ def _new_job_for_session(self, trans, tool, history):
if hasattr(trans, "get_galaxy_session"):
galaxy_session = trans.get_galaxy_session()
# If we're submitting from the API, there won't be a session.
if type(galaxy_session) == trans.model.GalaxySession:
if isinstance(galaxy_session, trans.model.GalaxySession):
job.session_id = model.cached_id(galaxy_session)
if trans.user is not None:
job.user_id = model.cached_id(trans.user)
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/actions/upload_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,7 @@ def create_job(trans, params, tool, json_file_path, outputs, folder=None, histor
trans.sa_session.add(job)
job.galaxy_version = trans.app.config.version_major
galaxy_session = trans.get_galaxy_session()
if type(galaxy_session) == trans.model.GalaxySession:
if isinstance(galaxy_session, trans.model.GalaxySession):
job.session_id = galaxy_session.id
if trans.user is not None:
job.user_id = trans.user.id
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/parameters/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ def from_element(cls, param, elem):
message = elem.get("message")
negate = elem.get("negate", "false")
if not message:
message = f"The selected dataset is {'non-' if negate == 'true' else ''}empty, this tool expects {'non-' if negate=='false' else ''}empty files."
message = f"The selected dataset is {'non-' if negate == 'true' else ''}empty, this tool expects {'non-' if negate == 'false' else ''}empty files."
return cls(message, negate)

def validate(self, value, trans=None):
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/util/dictifiable.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ def get_value(key, item):
assert value_mapper is not None
if key in value_mapper:
return value_mapper[key](item)
if type(item) == datetime.datetime:
if isinstance(item, datetime.datetime):
return item.isoformat()
elif type(item) == uuid.UUID:
elif isinstance(item, uuid.UUID):
return str(item)
# Leaving this for future reference, though we may want a more
# generic way to handle special type mappings going forward.
Expand Down
12 changes: 5 additions & 7 deletions lib/galaxy/web/framework/middleware/error.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ def send_report(rep, exc_data, html=True):


def error_template(head_html, exception, extra):
return """
return f"""
<!DOCTYPE HTML>
<html>
<head>
Expand All @@ -476,24 +476,22 @@ def error_template(head_html, exception, extra):
.content {{ max-width: 720px; margin: auto; margin-top: 50px; }}
</style>
<title>Internal Server Error</title>
{}
{head_html}
</head>
<body>
<div class="content">
<h1>Internal Server Error</h1>

<h2>Galaxy was unable to successfully complete your request</h2>

<p>{}</p>
<p>{exception}</p>

This may be an intermittent problem due to load or other unpredictable factors, reloading the page may address the problem.

{}
{extra}
</div>
</body>
</html>""".format(
head_html, exception, extra
)
</html>"""


def make_error_middleware(app, global_conf, **kw):
Expand Down
14 changes: 5 additions & 9 deletions lib/galaxy/webapps/galaxy/api/cloudauthz.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,11 +93,11 @@ def create(self, trans, payload, **kwargs):
* status: HTTP response code
* message: A message complementary to the response code.
"""
msg_template = f"Rejected user `{str(trans.user.id)}`'s request to create cloudauthz config because of {{}}."
msg_template = f"Rejected user `{trans.user.id}`'s request to create cloudauthz config because of {{}}."
if not isinstance(payload, dict):
raise ActionInputError(
"Invalid payload data type. The payload is expected to be a dictionary, but "
"received data of type `{}`.".format(str(type(payload)))
f"received data of type `{type(payload)}`."
)

missing_arguments = []
Expand All @@ -116,7 +116,7 @@ def create(self, trans, payload, **kwargs):
if len(missing_arguments) > 0:
log.debug(msg_template.format(f"missing required config {missing_arguments}"))
raise RequestParameterMissingException(
"The following required arguments are missing in the payload: " "{}".format(missing_arguments)
f"The following required arguments are missing in the payload: {missing_arguments}"
)

description = payload.get("description", "")
Expand Down Expand Up @@ -200,9 +200,7 @@ def delete(self, trans, encoded_authz_id, **kwargs):
return view
except Exception as e:
log.exception(
msg_template.format(
"exception while deleting the cloudauthz record with " "ID: `{}`.".format(encoded_authz_id)
)
msg_template.format(f"exception while deleting the cloudauthz record with ID: `{encoded_authz_id}`.")
)
raise InternalServerError(
"An unexpected error has occurred while responding to the DELETE request of the "
Expand Down Expand Up @@ -270,9 +268,7 @@ def update(self, trans, encoded_authz_id, payload, **kwargs):
raise e
except Exception as e:
log.exception(
msg_template.format(
"exception while updating the cloudauthz record with " "ID: `{}`.".format(encoded_authz_id)
)
msg_template.format(f"exception while updating the cloudauthz record with ID: `{encoded_authz_id}`.")
)
raise InternalServerError(
"An unexpected error has occurred while responding to the PUT request of the "
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/webapps/galaxy/api/library_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ def create(self, trans, library_id, payload, **kwd):
trans.sa_session.add(meta_i)
with transaction(trans.sa_session):
trans.sa_session.commit()
if type(v) == trans.app.model.LibraryDatasetDatasetAssociation:
if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation):
v = v.library_dataset
encoded_id = trans.security.encode_id(v.id)
if create_type == "folder":
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy_test/api/test_folder_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def test_index_order_by(self, history_id):
history_id,
folder_id,
name,
content=f"{'0'*dataset_sizes[index]}",
content=f"{'0' * dataset_sizes[index]}",
ldda_message=ldda_messages[index],
file_type=file_types[index],
)
Expand Down
2 changes: 1 addition & 1 deletion test/integration/test_recalculate_user_disk_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def test_recalculate_user_disk_usage(self):
assert current_usage["total_disk_usage"] == 0
size = 100
history_id = self.dataset_populator.new_history()
hda_id = self.dataset_populator.new_dataset(history_id, content=f"{'0'*size}", wait=True)["id"]
hda_id = self.dataset_populator.new_dataset(history_id, content=f"{'0' * size}", wait=True)["id"]
expected_usage = size + 1 # +1 for the new line character in the dataset
# The usage should be the total of the datasets
current_usage = self.dataset_populator.get_usage_for(None)
Expand Down
4 changes: 2 additions & 2 deletions test/integration/test_storage_cleaner.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def _create_histories_with(
history_ids.append(history_id)
# Create a dataset with content equal to the expected size of the history
if history_data.size:
self.dataset_populator.new_dataset(history_id, content=f"{'0'*(history_data.size-1)}\n")
self.dataset_populator.new_dataset(history_id, content=f"{'0' * (history_data.size - 1)}\n")
if wait_for_histories:
for history_id in history_ids:
self.dataset_populator.wait_for_history(history_id)
Expand All @@ -176,7 +176,7 @@ def _create_datasets_in_history_with(
dataset_ids = []
for dataset_data in test_datasets:
dataset = self.dataset_populator.new_dataset(
history_id, name=dataset_data.name, content=f"{'0'*(dataset_data.size-1)}\n"
history_id, name=dataset_data.name, content=f"{'0' * (dataset_data.size - 1)}\n"
)
dataset_ids.append(dataset["id"])
if wait_for_history:
Expand Down
Loading