Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use id entity attribute when setting exampleOfWork property #19396

Open
wants to merge 2 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 22 additions & 38 deletions lib/galaxy/model/store/ro_crate_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,25 +123,27 @@ def _add_file(self, dataset: HistoryDatasetAssociation, properties: Dict[Any, An
def _add_files(self, crate: ROCrate):
for wfda in self.invocation.input_datasets:
if not self.file_entities.get(wfda.dataset.dataset.id):
dataset_formal_param = self._add_dataset_formal_parameter(wfda.dataset, crate)
crate.mainEntity.append_to("input", dataset_formal_param)
properties = {
"exampleOfWork": {"@id": f"#{wfda.dataset.dataset.uuid}"},
"exampleOfWork": {"@id": dataset_formal_param.id},
}
file_entity = self._add_file(wfda.dataset, properties, crate)
dataset_formal_param = self._add_dataset_formal_parameter(wfda.dataset, crate)
crate.mainEntity.append_to("input", dataset_formal_param)
self.create_action.append_to("object", file_entity)

for wfda in self.invocation.output_datasets:
if not self.file_entities.get(wfda.dataset.dataset.id):
dataset_formal_param = self._add_dataset_formal_parameter(wfda.dataset, crate)
crate.mainEntity.append_to("output", dataset_formal_param)
properties = {
"exampleOfWork": {"@id": f"#{wfda.dataset.dataset.uuid}"},
"exampleOfWork": {"@id": dataset_formal_param.id},
}
file_entity = self._add_file(wfda.dataset, properties, crate)
dataset_formal_param = self._add_dataset_formal_parameter(wfda.dataset, crate)
crate.mainEntity.append_to("output", dataset_formal_param)
self.create_action.append_to("result", file_entity)

def _add_collection(self, hdca: HistoryDatasetCollectionAssociation, crate: ROCrate) -> ContextEntity:
def _add_collection(
self, hdca: HistoryDatasetCollectionAssociation, crate: ROCrate, collection_formal_param: ContextEntity
) -> ContextEntity:
name = hdca.name
dataset_ids = []
for hda in hdca.dataset_instances:
Expand All @@ -158,7 +160,7 @@ def _add_collection(self, hdca: HistoryDatasetCollectionAssociation, crate: ROCr
"@type": "Collection",
"additionalType": self._get_collection_additional_type(hdca.collection.collection_type),
"hasPart": dataset_ids,
"exampleOfWork": {"@id": f"#{hdca.type_id}-param"},
"exampleOfWork": {"@id": collection_formal_param.id},
}
collection_entity = crate.add(
ContextEntity(
Expand All @@ -185,14 +187,14 @@ def _get_parameter_additional_type(self, parameter_type: Optional[str]) -> str:

def _add_collections(self, crate: ROCrate):
for wfdca in self.invocation.input_dataset_collections:
collection_entity = self._add_collection(wfdca.dataset_collection, crate)
collection_formal_param = self._add_collection_formal_parameter(wfdca.dataset_collection, crate)
collection_entity = self._add_collection(wfdca.dataset_collection, crate, collection_formal_param)
crate.mainEntity.append_to("input", collection_formal_param)
self.create_action.append_to("object", collection_entity)

for wfdca in self.invocation.output_dataset_collections:
collection_entity = self._add_collection(wfdca.dataset_collection, crate)
collection_formal_param = self._add_collection_formal_parameter(wfdca.dataset_collection, crate)
collection_entity = self._add_collection(wfdca.dataset_collection, crate, collection_formal_param)
crate.mainEntity.append_to("output", collection_formal_param)
self.create_action.append_to("result", collection_entity)

Expand Down Expand Up @@ -336,31 +338,14 @@ def _add_profiles(self, crate: ROCrate):
def _add_parameters(self, crate: ROCrate):
for step in self.invocation.steps:
if step.workflow_step.type == "parameter_input":
property_value = self._add_step_parameter_pv(step, crate)
formal_param = self._add_step_parameter_fp(step, crate)
crate.mainEntity.append_to("input", formal_param)
property_value = self._add_step_parameter(step, crate)
self.create_action.append_to("object", property_value)

def _add_step_parameter_pv(self, step: WorkflowInvocationStep, crate: ROCrate):
param_id = step.workflow_step.label
return crate.add(
ContextEntity(
crate,
f"{param_id}-pv",
properties={
"@type": "PropertyValue",
"name": f"{param_id}",
"value": step.output_value.value,
"exampleOfWork": {"@id": f"#{param_id}-param"},
},
)
)

def _add_step_parameter_fp(self, step: WorkflowInvocationStep, crate: ROCrate):
def _add_step_parameter(self, step: WorkflowInvocationStep, crate: ROCrate) -> ContextEntity:
param_id = step.workflow_step.label
assert step.workflow_step.tool_inputs
param_type = step.workflow_step.tool_inputs["parameter_type"]
return crate.add(
formal_param = crate.add(
ContextEntity(
crate,
f"{param_id}-param",
Expand All @@ -373,19 +358,16 @@ def _add_step_parameter_fp(self, step: WorkflowInvocationStep, crate: ROCrate):
},
)
)

def _add_step_tool_pv(self, step: WorkflowInvocationStep, tool_input: str, crate: ROCrate):
param_id = tool_input
assert step.workflow_step.tool_inputs
crate.mainEntity.append_to("input", formal_param)
return crate.add(
ContextEntity(
crate,
f"{param_id}-pv",
properties={
"@type": "PropertyValue",
"name": f"{step.workflow_step.label}",
"value": step.workflow_step.tool_inputs[tool_input],
"exampleOfWork": {"@id": f"#{param_id}-param"},
"name": f"{param_id}",
"value": step.output_value.value,
"exampleOfWork": {"@id": formal_param.id},
},
)
)
Expand Down Expand Up @@ -419,7 +401,9 @@ def _add_dataset_formal_parameter(self, hda: HistoryDatasetAssociation, crate: R
)
)

def _add_collection_formal_parameter(self, hdca: HistoryDatasetCollectionAssociation, crate: ROCrate):
def _add_collection_formal_parameter(
self, hdca: HistoryDatasetCollectionAssociation, crate: ROCrate
) -> ContextEntity:
return crate.add(
ContextEntity(
crate,
Expand Down
59 changes: 24 additions & 35 deletions test/unit/data/model/test_model_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ def test_import_library_require_permissions():
assert error_caught


def test_import_export_library():
def test_import_export_library(tmp_path):
"""Test basics of library, library folder, and library dataset import/export."""
app = _mock_app()
sa_session = app.model.context
Expand Down Expand Up @@ -412,12 +412,11 @@ def test_import_export_library():
assert len(root_folder.datasets) == 1
assert len(root_folder.folders) == 1

temp_directory = mkdtemp()
with store.DirectoryModelExportStore(temp_directory, app=app) as export_store:
with store.DirectoryModelExportStore(tmp_path, app=app) as export_store:
export_store.export_library(library)

import_model_store = store.get_import_model_store_for_directory(
temp_directory, app=app, user=u, import_options=store.ImportOptions(allow_library_creation=True)
tmp_path, app=app, user=u, import_options=store.ImportOptions(allow_library_creation=True)
)
import_model_store.perform_import()

Expand Down Expand Up @@ -593,7 +592,7 @@ def validate_invocation_collection_crate_directory(crate_directory):
assert dataset in root["hasPart"]


def test_export_history_with_missing_hid():
def test_export_history_with_missing_hid(tmp_path):
# The dataset's hid was used to compose the file name during the export but it
# can be missing sometimes. We now use the dataset's encoded id instead.
app = _mock_app()
Expand All @@ -603,46 +602,41 @@ def test_export_history_with_missing_hid():
d1.hid = None
app.commit()

temp_directory = mkdtemp()
with store.DirectoryModelExportStore(temp_directory, app=app, export_files="copy") as export_store:
with store.DirectoryModelExportStore(tmp_path, app=app, export_files="copy") as export_store:
export_store.export_history(history)


def test_export_history_to_ro_crate(tmp_path):
app = _mock_app()
u, history, d1, d2, j = _setup_simple_cat_job(app)

crate_directory = tmp_path / "crate"
with store.ROCrateModelExportStore(crate_directory, app=app) as export_store:
with store.ROCrateModelExportStore(tmp_path, app=app) as export_store:
export_store.export_history(history)
validate_history_crate_directory(crate_directory)
validate_history_crate_directory(tmp_path)


def test_export_invocation_to_ro_crate(tmp_path):
app = _mock_app()
workflow_invocation = _setup_invocation(app)
crate_directory = tmp_path / "crate"
with store.ROCrateModelExportStore(crate_directory, app=app) as export_store:
with store.ROCrateModelExportStore(tmp_path, app=app) as export_store:
export_store.export_workflow_invocation(workflow_invocation)
validate_invocation_crate_directory(crate_directory)
validate_invocation_crate_directory(tmp_path)


def test_export_simple_invocation_to_ro_crate(tmp_path):
app = _mock_app()
workflow_invocation = _setup_simple_invocation(app)
crate_directory = tmp_path / "crate"
with store.ROCrateModelExportStore(crate_directory, app=app) as export_store:
with store.ROCrateModelExportStore(tmp_path, app=app) as export_store:
export_store.export_workflow_invocation(workflow_invocation)
validate_invocation_crate_directory(crate_directory)
validate_invocation_crate_directory(tmp_path)


def test_export_collection_invocation_to_ro_crate(tmp_path):
app = _mock_app()
workflow_invocation = _setup_collection_invocation(app)
crate_directory = tmp_path / "crate"
with store.ROCrateModelExportStore(crate_directory, app=app) as export_store:
with store.ROCrateModelExportStore(tmp_path, app=app) as export_store:
export_store.export_workflow_invocation(workflow_invocation)
validate_invocation_collection_crate_directory(crate_directory)
validate_invocation_collection_crate_directory(tmp_path)


def test_export_invocation_to_ro_crate_archive(tmp_path):
Expand Down Expand Up @@ -727,7 +721,7 @@ def test_import_export_edit_datasets():
assert d1.dataset.object_store_id == "foo1", d1.dataset.object_store_id


def test_import_export_edit_collection():
def test_import_export_edit_collection(tmp_path):
"""Test modifying existing collections with imports."""
app = _mock_app()
sa_session = app.model.context
Expand All @@ -743,13 +737,12 @@ def test_import_export_edit_collection():
import_history = model.History(name="Test History for Import", user=u)
app.add_and_commit(import_history)

temp_directory = mkdtemp()
with store.DirectoryModelExportStore(temp_directory, app=app, for_edit=True) as export_store:
with store.DirectoryModelExportStore(tmp_path, app=app, for_edit=True) as export_store:
export_store.add_dataset_collection(hc1)

# Fabric editing metadata for collection...
collections_metadata_path = os.path.join(temp_directory, store.ATTRS_FILENAME_COLLECTIONS)
datasets_metadata_path = os.path.join(temp_directory, store.ATTRS_FILENAME_DATASETS)
collections_metadata_path = os.path.join(tmp_path, store.ATTRS_FILENAME_COLLECTIONS)
datasets_metadata_path = os.path.join(tmp_path, store.ATTRS_FILENAME_DATASETS)
with open(collections_metadata_path) as f:
hdcas_metadata = json.load(f)

Expand Down Expand Up @@ -798,14 +791,14 @@ def test_import_export_edit_collection():
with open(collections_metadata_path, "w") as collections_f:
json.dump(hdcas_metadata, collections_f)

_perform_import_from_directory(temp_directory, app, u, import_history, store.ImportOptions(allow_edit=True))
_perform_import_from_directory(tmp_path, app, u, import_history, store.ImportOptions(allow_edit=True))

sa_session.refresh(c1)
assert c1.populated_state == model.DatasetCollection.populated_states.OK, c1.populated_state
assert len(c1.elements) == 2


def test_import_export_composite_datasets():
def test_import_export_composite_datasets(tmp_path):
app = _mock_app()
sa_session = app.model.context

Expand All @@ -819,13 +812,12 @@ def test_import_export_composite_datasets():
app.write_primary_file(d1, "cool primary file")
app.write_composite_file(d1, "cool composite file", "child_file")

temp_directory = mkdtemp()
with store.DirectoryModelExportStore(temp_directory, app=app, export_files="copy") as export_store:
with store.DirectoryModelExportStore(tmp_path, app=app, export_files="copy") as export_store:
export_store.add_dataset(d1)

import_history = model.History(name="Test History for Import", user=u)
app.add_and_commit(import_history)
_perform_import_from_directory(temp_directory, app, u, import_history)
_perform_import_from_directory(tmp_path, app, u, import_history)
assert len(import_history.datasets) == 1
import_dataset = import_history.datasets[0]
_assert_extra_files_has_parent_directory_with_single_file_containing(
Expand All @@ -848,7 +840,7 @@ def _assert_extra_files_has_parent_directory_with_single_file_containing(
assert contents == expected_contents


def test_edit_metadata_files():
def test_edit_metadata_files(tmp_path):
app = _mock_app(store_by="uuid")
sa_session = app.model.context

Expand All @@ -864,15 +856,12 @@ def test_edit_metadata_files():
assert d1.metadata.bam_index
assert isinstance(d1.metadata.bam_index, model.MetadataFile)

temp_directory = mkdtemp()
with store.DirectoryModelExportStore(
temp_directory, app=app, for_edit=True, strip_metadata_files=False
) as export_store:
with store.DirectoryModelExportStore(tmp_path, app=app, for_edit=True, strip_metadata_files=False) as export_store:
export_store.add_dataset(d1)

import_history = model.History(name="Test History for Import", user=u)
app.add_and_commit(import_history)
_perform_import_from_directory(temp_directory, app, u, import_history, store.ImportOptions(allow_edit=True))
_perform_import_from_directory(tmp_path, app, u, import_history, store.ImportOptions(allow_edit=True))


def test_sessionless_import_edit_datasets():
Expand Down
Loading