From 398f9113e8c4259adf3874e28e3fe2ae369e3f3c Mon Sep 17 00:00:00 2001 From: Todd Roper Date: Mon, 7 Aug 2023 13:31:50 -0700 Subject: [PATCH] R.C. 0.6.0 (#281) --- migrate/scripts/seed_data.py | 15 +- tds/autogen/README.md | 4 - tds/autogen/__init__.py | 0 tds/autogen/orm.py | 137 ------- tds/autogen/schema.py | 140 -------- tds/db/base.py | 2 + tds/{autogen => db}/enums.py | 15 + tds/db/graph/query_helpers.py | 28 +- tds/db/helpers.py | 10 +- tds/lib/datasets.py | 39 +- tds/lib/projects.py | 17 +- tds/lib/utils.py | 9 + tds/modules/concept/__init__.py | 2 +- tds/modules/concept/controller.py | 31 +- tds/modules/concept/model.py | 4 +- tds/modules/dataset/__init__.py | 2 +- tds/modules/dataset/model.py | 110 ++++-- tds/modules/experimental/__init__.py | 10 + .../experimental/controller.py} | 27 +- tds/modules/external/__init__.py | 10 + tds/modules/external/controller.py | 331 +++++++++++++++++ tds/modules/external/model.py | 59 +++ tds/modules/framework/__init__.py | 10 + tds/modules/framework/controller.py | 191 ++++++++++ tds/modules/framework/response.py | 17 + tds/modules/model/__init__.py | 2 +- tds/modules/model/controller.py | 131 +++++++ tds/modules/model/model.py | 29 +- tds/modules/model/utils.py | 4 +- tds/modules/person/__init__.py | 10 + tds/modules/person/controller.py | 321 +++++++++++++++++ tds/modules/person/model.py | 83 +++++ tds/modules/person/response.py | 26 ++ tds/modules/project/__init__.py | 10 + tds/modules/project/controller.py | 336 ++++++++++++++++++ tds/modules/project/helpers.py | 163 +++++++++ tds/modules/project/model.py | 96 +++++ tds/modules/project/response.py | 20 ++ tds/modules/provenance/controller.py | 3 +- tds/modules/provenance/model.py | 4 +- tds/modules/provenance/response.py | 2 +- tds/modules/simulation/model.py | 2 +- tds/modules/simulation/response.py | 2 +- tds/modules/workflow/__init__.py | 2 +- tds/routers/__init__.py | 0 tds/routers/external.py | 151 -------- tds/routers/models.py | 211 ----------- tds/routers/persons.py | 145 -------- tds/routers/projects.py | 309 ---------------- tds/schema/concept.py | 4 +- tds/schema/dataset.py | 6 +- tds/schema/model.py | 4 +- tds/schema/project.py | 14 +- tds/schema/provenance.py | 27 +- tds/schema/resource.py | 13 +- tds/server/build.py | 40 +-- tests/service.py | 14 +- 57 files changed, 2107 insertions(+), 1297 deletions(-) delete mode 100644 tds/autogen/README.md delete mode 100644 tds/autogen/__init__.py delete mode 100644 tds/autogen/orm.py delete mode 100644 tds/autogen/schema.py rename tds/{autogen => db}/enums.py (91%) create mode 100644 tds/modules/experimental/__init__.py rename tds/{routers/experimental.py => modules/experimental/controller.py} (83%) create mode 100644 tds/modules/external/__init__.py create mode 100644 tds/modules/external/controller.py create mode 100644 tds/modules/external/model.py create mode 100644 tds/modules/framework/__init__.py create mode 100644 tds/modules/framework/controller.py create mode 100644 tds/modules/framework/response.py create mode 100644 tds/modules/person/__init__.py create mode 100644 tds/modules/person/controller.py create mode 100644 tds/modules/person/model.py create mode 100644 tds/modules/person/response.py create mode 100644 tds/modules/project/__init__.py create mode 100644 tds/modules/project/controller.py create mode 100644 tds/modules/project/helpers.py create mode 100644 tds/modules/project/model.py create mode 100644 tds/modules/project/response.py delete mode 100644 tds/routers/__init__.py delete mode 100644 tds/routers/external.py delete mode 100644 tds/routers/models.py delete mode 100644 tds/routers/persons.py delete mode 100644 tds/routers/projects.py diff --git a/migrate/scripts/seed_data.py b/migrate/scripts/seed_data.py index 1adc870f2..8013c05d0 100644 --- a/migrate/scripts/seed_data.py +++ b/migrate/scripts/seed_data.py @@ -11,7 +11,10 @@ from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session -from tds.autogen import orm +from tds.modules.external.model import Publication +from tds.modules.model.model import ModelFramework +from tds.modules.person.model import Person +from tds.modules.project.model import Project, ProjectAsset from tds.modules.provenance.model import Provenance migrate_dir = Path(os.path.dirname(__file__)) @@ -24,12 +27,12 @@ SQL_DB = os.getenv("SQL_DB") pg_data_load = { - "model_framework": orm.ModelFramework, - "persons": orm.Person, - "projects": orm.Project, - "project_assets": orm.ProjectAsset, + "model_framework": ModelFramework, + "persons": Person, + "projects": Project, + "project_assets": ProjectAsset, "provenance": Provenance, - "publications": orm.Publication, + "publications": Publication, } diff --git a/tds/autogen/README.md b/tds/autogen/README.md deleted file mode 100644 index a115ec3e6..000000000 --- a/tds/autogen/README.md +++ /dev/null @@ -1,4 +0,0 @@ - -~~THIS DIRECTORY IS AUTOGENERATED. DO NOT EDIT MANUALLY~~ - -As of 5/26/23 this directory is no longer automatically generated. \ No newline at end of file diff --git a/tds/autogen/__init__.py b/tds/autogen/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tds/autogen/orm.py b/tds/autogen/orm.py deleted file mode 100644 index 51fb5720e..000000000 --- a/tds/autogen/orm.py +++ /dev/null @@ -1,137 +0,0 @@ -# pylint: skip-file -""" -ORM file from DBML autogen. -Skipping linter to prevent class docstring errors. -@TODO: Clean up file to pass linting. -""" -import sqlalchemy as sa -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.sql import func - -from tds.autogen.enums import ExtractedType, ResourceType, Role, ValueType - -Base = declarative_base() - - -class QualifierXref(Base): - __tablename__ = "qualifier_xref" - - id = sa.Column(sa.Integer(), primary_key=True) - qualifier_id = sa.Column( - sa.Integer(), sa.ForeignKey("qualifier.id"), nullable=False - ) - feature_id = sa.Column(sa.Integer(), sa.ForeignKey("feature.id"), nullable=False) - - -class ModelRuntime(Base): - __tablename__ = "model_runtime" - - id = sa.Column(sa.Integer(), primary_key=True) - timestamp = sa.Column(sa.DateTime(), nullable=False, server_default=func.now()) - name = sa.Column(sa.String(), nullable=False) - left = sa.Column(sa.String(), sa.ForeignKey("model_framework.name"), nullable=False) - right = sa.Column( - sa.String(), sa.ForeignKey("model_framework.name"), nullable=False - ) - - -class Feature(Base): - __tablename__ = "feature" - - id = sa.Column(sa.Integer(), primary_key=True) - dataset_id = sa.Column(sa.Integer(), sa.ForeignKey("dataset.id"), nullable=False) - description = sa.Column(sa.Text()) - display_name = sa.Column(sa.String()) - name = sa.Column(sa.String(), nullable=False) - value_type = sa.Column(sa.Enum(ValueType), nullable=False) - - -class Qualifier(Base): - __tablename__ = "qualifier" - - id = sa.Column(sa.Integer(), primary_key=True) - dataset_id = sa.Column(sa.Integer(), sa.ForeignKey("dataset.id"), nullable=False) - description = sa.Column(sa.Text()) - name = sa.Column(sa.String(), nullable=False) - value_type = sa.Column(sa.Enum(ValueType), nullable=False) - - -class Extraction(Base): - __tablename__ = "extraction" - - id = sa.Column(sa.Integer(), primary_key=True) - publication_id = sa.Column( - sa.Integer(), sa.ForeignKey("publication.id"), nullable=False - ) - type = sa.Column(sa.Enum(ExtractedType), nullable=False) - data = sa.Column(sa.LargeBinary(), nullable=False) - img = sa.Column(sa.LargeBinary(), nullable=False) - - -class ProjectAsset(Base): - __tablename__ = "project_asset" - - id = sa.Column(sa.Integer(), primary_key=True) - project_id = sa.Column(sa.Integer(), sa.ForeignKey("project.id"), nullable=False) - resource_id = sa.Column(sa.String(), nullable=False) - resource_type = sa.Column(sa.Enum(ResourceType), nullable=False) - external_ref = sa.Column(sa.String()) - - -class Association(Base): - __tablename__ = "association" - - id = sa.Column(sa.Integer(), primary_key=True) - person_id = sa.Column(sa.Integer(), sa.ForeignKey("person.id"), nullable=False) - resource_id = sa.Column(sa.Integer(), nullable=False) - resource_type = sa.Column(sa.Enum(ResourceType)) - role = sa.Column(sa.Enum(Role)) - - -class ModelFramework(Base): - __tablename__ = "model_framework" - - name = sa.Column(sa.String(), primary_key=True) - version = sa.Column(sa.String(), nullable=False) - semantics = sa.Column(sa.String(), nullable=False) - schema_url = sa.Column(sa.String()) - - -class Software(Base): - __tablename__ = "software" - - id = sa.Column(sa.Integer(), primary_key=True) - timestamp = sa.Column(sa.DateTime(), nullable=False, server_default=func.now()) - source = sa.Column(sa.String(), nullable=False) - storage_uri = sa.Column(sa.String(), nullable=False) - - -class Publication(Base): - __tablename__ = "publication" - - id = sa.Column(sa.Integer(), primary_key=True) - xdd_uri = sa.Column(sa.String(), nullable=False) - title = sa.Column(sa.String(), nullable=False) - publication_data = sa.Column(sa.JSON, nullable=True) - - -class Project(Base): - __tablename__ = "project" - - id = sa.Column(sa.Integer(), primary_key=True) - name = sa.Column(sa.String(), nullable=False) - description = sa.Column(sa.String(), nullable=False) - timestamp = sa.Column(sa.DateTime(), server_default=func.now()) - active = sa.Column(sa.Boolean(), nullable=False) - username = sa.Column(sa.String()) - - -class Person(Base): - __tablename__ = "person" - - id = sa.Column(sa.Integer(), primary_key=True) - name = sa.Column(sa.String(), nullable=False) - email = sa.Column(sa.String(), nullable=False) - org = sa.Column(sa.String()) - website = sa.Column(sa.String()) - is_registered = sa.Column(sa.Boolean(), nullable=False) diff --git a/tds/autogen/schema.py b/tds/autogen/schema.py deleted file mode 100644 index eb0053a3e..000000000 --- a/tds/autogen/schema.py +++ /dev/null @@ -1,140 +0,0 @@ -# pylint: skip-file -""" -Schema file from DBML autogen. -Skipping linter to prevent class docstring errors. -@TODO: Clean up file to pass linting. -""" -import datetime -from typing import Optional - -from pydantic import BaseModel, Json - -from tds.autogen.enums import ( - ExtractedType, - OntologicalField, - ProvenanceType, - RelationType, - ResourceType, - Role, - TaggableType, - ValueType, -) - - -class QualifierXref(BaseModel): - id: Optional[int] = None - qualifier_id: Optional[int] = None - feature_id: Optional[int] = None - - -class ModelRuntime(BaseModel): - id: Optional[int] = None - timestamp: datetime.datetime = datetime.datetime.now() - name: str - left: str - right: str - - -class Feature(BaseModel): - id: Optional[int] = None - dataset_id: Optional[int] = None - description: Optional[str] - display_name: Optional[str] - name: str - value_type: ValueType - - -class Qualifier(BaseModel): - id: Optional[int] = None - dataset_id: Optional[int] = None - description: Optional[str] - name: str - value_type: ValueType - - -class Extraction(BaseModel): - id: Optional[int] = None - publication_id: Optional[int] = None - type: ExtractedType - data: bytes - img: bytes - - -class ProjectAsset(BaseModel): - id: Optional[int] = None - project_id: Optional[int] = None - resource_id: Optional[int] = None - resource_type: ResourceType - external_ref: Optional[str] - - -class OntologyConcept(BaseModel): - id: Optional[int] = None - curie: str - type: TaggableType - object_id: str - status: OntologicalField - - -class Provenance(BaseModel): - id: Optional[int] = None - timestamp: datetime.datetime = datetime.datetime.now() - relation_type: RelationType - left: str - left_type: ProvenanceType - right: str - right_type: ProvenanceType - user_id: Optional[int] - concept: Optional[str] - - -class Association(BaseModel): - id: Optional[int] = None - person_id: Optional[int] = None - resource_id: Optional[int] = None - resource_type: Optional[ResourceType] - role: Optional[Role] - - -class ModelFramework(BaseModel): - name: str - version: str - semantics: str - schema_url: Optional[str] - - -class Software(BaseModel): - id: Optional[int] = None - timestamp: datetime.datetime = datetime.datetime.now() - source: str - storage_uri: str - - -class Publication(BaseModel): - id: Optional[int] = None - xdd_uri: str - title: str - publication_data: Optional[dict] - - -class Project(BaseModel): - id: Optional[int] = None - name: str - description: str - timestamp: Optional[datetime.datetime] = datetime.datetime.now() - active: bool - username: Optional[str] - - -class Person(BaseModel): - id: Optional[int] = None - name: str - email: str - org: Optional[str] - website: Optional[str] - is_registered: bool - - -class ActiveConcept(BaseModel): - curie: str - name: Optional[str] diff --git a/tds/db/base.py b/tds/db/base.py index 9b2ac840b..7bfa1e007 100644 --- a/tds/db/base.py +++ b/tds/db/base.py @@ -7,11 +7,13 @@ from elasticsearch import ConflictError from pydantic import BaseModel, Field +from sqlalchemy.ext.declarative import declarative_base from tds.db import es_client from tds.settings import settings es = es_client() +Base = declarative_base() def new_uuid() -> str: diff --git a/tds/autogen/enums.py b/tds/db/enums.py similarity index 91% rename from tds/autogen/enums.py rename to tds/db/enums.py index efa5826c7..de66e039a 100644 --- a/tds/autogen/enums.py +++ b/tds/db/enums.py @@ -129,3 +129,18 @@ class SimulationStatus(str, Enum): queued = "queued" running = "running" failed = "failed" + + +class ColumnTypes(str, Enum): + UNKNOWN = "unknown" + BOOLEAN = "boolean" + STRING = "string" + CHAR = "string" + INTEGER = "integer" + INT = "integer" + FLOAT = "float" + DOUBLE = "double" + TIMESTAMP = "timestamp" + DATETIME = "datetime" + DATE = "date" + TIME = "time" diff --git a/tds/db/graph/query_helpers.py b/tds/db/graph/query_helpers.py index 462b50460..154dda749 100644 --- a/tds/db/graph/query_helpers.py +++ b/tds/db/graph/query_helpers.py @@ -5,7 +5,7 @@ from fastapi import HTTPException -from tds.autogen import schema +from tds.db.enums import ProvenanceType, RelationType from tds.schema.provenance import provenance_type_to_abbr @@ -25,7 +25,7 @@ def dynamic_relationship_direction(direction, relationship_type): ) -def derived_models_query_generater(root_type: schema.ProvenanceType, root_id): +def derived_models_query_generater(root_type: ProvenanceType, root_id): """ return all models, model revisions (sometimes intermediates) that were derived from a publication or intermediate @@ -63,7 +63,7 @@ def derived_models_query_generater(root_type: schema.ProvenanceType, root_id): ) -def parent_model_query_generator(root_type: schema.ProvenanceType, root_id): +def parent_model_query_generator(root_type: ProvenanceType, root_id): """ Return match query to ModelRevision depending on root_type """ @@ -71,19 +71,19 @@ def parent_model_query_generator(root_type: schema.ProvenanceType, root_id): relationships_str = relationships_array_as_str( exclude=["CONTAINS", "IS_CONCEPT_OF"] ) - model_revision_node = node_builder(node_type=schema.ProvenanceType.Model) + model_revision_node = node_builder(node_type=ProvenanceType.Model) query_templates_index = { - schema.ProvenanceType.Model: f"-[r:BEGINS_AT]->{model_revision_node} ", - schema.ProvenanceType.ModelConfiguration: f"-[r:USES]->{model_revision_node} ", - schema.ProvenanceType.Simulation: "" + ProvenanceType.Model: f"-[r:BEGINS_AT]->{model_revision_node} ", + ProvenanceType.ModelConfiguration: f"-[r:USES]->{model_revision_node} ", + ProvenanceType.Simulation: "" + f"-[r:{relationships_str} *1..]->{model_revision_node} ", - schema.ProvenanceType.Dataset: "" + ProvenanceType.Dataset: "" + f"-[r:{relationships_str} *1..]->{model_revision_node} ", } return match_node + query_templates_index[root_type] -def match_node_builder(node_type: schema.ProvenanceType = None, node_id=None): +def match_node_builder(node_type: ProvenanceType = None, node_id=None): """ return node with match statement """ @@ -95,7 +95,7 @@ def match_node_builder(node_type: schema.ProvenanceType = None, node_id=None): return f"Match ({node_type_character}:{node_type} {{id: '{node_id}'}}) " -def return_node_abbr(root_type: schema.ProvenanceType): +def return_node_abbr(root_type: ProvenanceType): """ Return node type abbr """ @@ -108,20 +108,20 @@ def relationships_array_as_str(exclude=None, include=None): """ relationship_str = "" if exclude is not None: - for type_ in schema.RelationType: + for type_ in RelationType: value = type_.value if value in exclude: continue relationship_str += value + "|" return relationship_str[:-1] - for type_ in schema.RelationType: + for type_ in RelationType: value = type_.value if value in include: relationship_str += value + "|" return relationship_str[:-1] -def node_builder(node_type: schema.ProvenanceType = None, node_id=None): +def node_builder(node_type: ProvenanceType = None, node_id=None): """ Return node """ @@ -223,7 +223,7 @@ def nodes_edges( nodes=True, edges=False, versions=False, - types=List[schema.ProvenanceType], + types=List[ProvenanceType], ): """ Return connected nodes and edges diff --git a/tds/db/helpers.py b/tds/db/helpers.py index 3a217724f..cc44522c9 100644 --- a/tds/db/helpers.py +++ b/tds/db/helpers.py @@ -6,29 +6,29 @@ from sqlalchemy.engine.base import Connection from sqlalchemy.orm import Session -from tds.autogen import orm +from tds.db.base import Base def init_dev_content(connection: Connection): """ Initialize tables in the connected DB """ - orm.Base.metadata.create_all(connection) + Base.metadata.create_all(connection) def drop_content(connection: Connection): """ Drop all tables from the DB """ - return orm.Base.metadata.drop_all(connection) + return Base.metadata.drop_all(connection) -def entry_exists(connection: Connection, orm_type: Any, id: int) -> bool: +def entry_exists(connection: Connection, orm_type: Any, orm_id: int) -> bool: """ Check if entry exists """ with Session(connection) as session: - return session.query(orm_type).filter(orm_type.id == id).count() == 1 + return session.query(orm_type).filter(orm_type.id == orm_id).count() == 1 def list_by_id(connection: Connection, orm_type: Any, page_size: int, page: int = 0): diff --git a/tds/lib/datasets.py b/tds/lib/datasets.py index 7af339daa..85632f508 100644 --- a/tds/lib/datasets.py +++ b/tds/lib/datasets.py @@ -1,5 +1,5 @@ """ -Qualifer specific logic +Qualifier specific logic """ import json @@ -8,10 +8,15 @@ from sqlalchemy.engine.base import Engine from sqlalchemy.orm import Session -from tds.autogen import orm, schema from tds.db import rdb as rdb_engine from tds.lib.s3 import copy_object, get_file_path, parse_filename -from tds.modules.dataset.model import Dataset +from tds.modules.dataset.model import ( + Dataset, + QualifierPayload, + QualifierXref, + QualifierXrefPayload, +) +from tds.modules.person.model import Person from tds.modules.simulation.model import Simulation from tds.settings import settings @@ -24,24 +29,22 @@ def get_qualifier_xrefs(count: int, rdb: Engine): """ with Session(rdb) as session: result = ( - session.query(orm.QualifierXref) - .order_by(orm.QualifierXref.id.asc()) - .limit(count) + session.query(QualifierXref).order_by(QualifierXref.id.asc()).limit(count) ) result = result[::] return result -def get_qualifier_xref(id: int, rdb: Engine) -> str: +def get_qualifier_xref(xref_id: int, rdb: Engine) -> str: """ Get a specific qualifier xref by ID """ with Session(rdb) as session: - result = session.query(orm.QualifierXref).get(id) + result = session.query(QualifierXref).get(xref_id) return result -def create_qualifier_xref(payload: schema.QualifierXref, rdb: Engine): +def create_qualifier_xref(payload: QualifierXrefPayload, rdb: Engine): """ Create a qualifier xref """ @@ -54,9 +57,9 @@ def create_qualifier_xref(payload: schema.QualifierXref, rdb: Engine): qualifier_xrefp = payload logger.info("Set qualifier_xref to raw payload.") del qualifier_xrefp["id"] - qualifier_xref = orm.QualifierXref(**qualifier_xrefp) + qualifier_xref = QualifierXref(**qualifier_xrefp) exists = ( - session.query(orm.QualifierXref).filter_by(**qualifier_xrefp).first() + session.query(QualifierXref).filter_by(**qualifier_xrefp).first() is not None ) if exists: @@ -68,29 +71,29 @@ def create_qualifier_xref(payload: schema.QualifierXref, rdb: Engine): return json.dumps(qualifier_xrefp) -def update_qualifier_xref(payload: schema.Qualifier, id: int, rdb: Engine) -> str: +def update_qualifier_xref(payload: QualifierPayload, xref_id: int, rdb: Engine) -> str: """ Update a qualifier xref by ID """ with Session(rdb) as session: data_payload = payload.dict(exclude_unset=True) - data_payload["id"] = id + data_payload["id"] = xref_id logger.info(data_payload) - data_to_update = session.query(orm.QualifierXref).filter( - orm.QualifierXref.id == id + data_to_update = session.query(QualifierXref).filter( + QualifierXref.id == xref_id ) data_to_update.update(data_payload) session.commit() return "Updated Qualifier xref" -def delete_qualifier_xref(id: int, rdb: Engine) -> None: +def delete_qualifier_xref(xref_id: int, rdb: Engine) -> None: """ Delete a qualifier xref by ID """ with Session(rdb) as session: - session.query(orm.QualifierXref).filter(orm.QualifierXref.id == id).delete() + session.query(QualifierXref).filter(QualifierXref.id == xref_id).delete() session.commit() @@ -112,7 +115,7 @@ def copy_simulation_result_to_dataset(simulation: Simulation): # Get the user name and attach it to dataset. if simulation["user_id"]: with Session(rdb_engine) as session: - user = session.query(orm.Person).get(simulation["user_id"]) + user = session.query(Person).get(simulation["user_id"]) dataset_obj["username"] = user.name dataset = Dataset(**dataset_obj) diff --git a/tds/lib/projects.py b/tds/lib/projects.py index 9083ca5f6..c346d38b2 100644 --- a/tds/lib/projects.py +++ b/tds/lib/projects.py @@ -8,20 +8,21 @@ from sqlalchemy.orm import Session -from tds.autogen import orm, schema +from tds.db.enums import ResourceType +from tds.modules.project.model import ProjectAsset logger = Logger(__file__) def save_project_assets( - project_id: int, assets: Dict[schema.ResourceType, List[int]], session: Session + project_id: int, assets: Dict[ResourceType, List[int]], session: Session ): """ Save project assets to relational DB """ for resource_type, resource_ids in assets.items(): project_assets = [ - orm.ProjectAsset( + ProjectAsset( project_id=project_id, resource_id=resource_id, resource_type=resource_type, @@ -33,20 +34,20 @@ def save_project_assets( def adjust_project_assets( - project_id: int, assets: Dict[schema.ResourceType, List[int]], session: Session + project_id: int, assets: Dict[ResourceType, List[int]], session: Session ): """ Add new entries and remove unused entries """ active = defaultdict(list) - for asset in session.query(orm.ProjectAsset).filter( - orm.ProjectAsset.project_id == project_id + for asset in session.query(ProjectAsset).filter( + ProjectAsset.project_id == project_id ): active[asset.resource_type].append(asset.resource_id) for resource_type, resource_ids in assets.items(): project_assets = [ - orm.ProjectAsset( + ProjectAsset( project_id=project_id, resource_id=resource_id, resource_type=resource_type, @@ -60,4 +61,4 @@ def adjust_project_assets( for resource_type, resource_ids in active.items(): inactive_ids = set(resource_ids) - set(assets.get(resource_type, [])) for id in inactive_ids: - session.delete(session.query(orm.ProjectAsset).get(id)) + session.delete(session.query(ProjectAsset).get(id)) diff --git a/tds/lib/utils.py b/tds/lib/utils.py index b8061cd42..93d61221c 100644 --- a/tds/lib/utils.py +++ b/tds/lib/utils.py @@ -25,3 +25,12 @@ def patchable(model: BaseModel) -> BaseModel: _PATCHABLE_MODELS[model_name] = PatchableModel return PatchableModel + + +def get_singular_index(index_str: str): + """ + Function strips the s off of an index str. + """ + if index_str[-1] == "s": + return index_str.rstrip(index_str[-1]) + return index_str diff --git a/tds/modules/concept/__init__.py b/tds/modules/concept/__init__.py index 57a8ce555..002742abc 100644 --- a/tds/modules/concept/__init__.py +++ b/tds/modules/concept/__init__.py @@ -4,4 +4,4 @@ from tds.modules.concept.controller import concept_router as router ROUTE_PREFIX = "concepts" -TAGS = ["TDS Concepts"] +TAGS = ["Concepts"] diff --git a/tds/modules/concept/controller.py b/tds/modules/concept/controller.py index 32cd61464..18e9143f9 100644 --- a/tds/modules/concept/controller.py +++ b/tds/modules/concept/controller.py @@ -12,17 +12,19 @@ from sqlalchemy.engine.base import Engine from sqlalchemy.orm import Session -from tds.autogen import orm, schema from tds.db import request_rdb +from tds.db.enums import TaggableType from tds.lib.concepts import fetch_from_dkg, mark_concept_active from tds.modules.concept.model import ( ActiveConcept, OntologyConcept, OntologyConceptPayload, ) -from tds.modules.dataset.model import Dataset +from tds.modules.dataset.model import Dataset, Feature, Qualifier +from tds.modules.external.model import Publication from tds.modules.model.model import Model from tds.modules.model_configuration.model import ModelConfiguration +from tds.modules.project.model import Project from tds.modules.simulation.model import Simulation logger = Logger(__file__) @@ -73,26 +75,26 @@ def get_concept_definition(curie: str): return fetch_from_dkg(params) -def get_taggable_orm(taggable_type: schema.TaggableType): +def get_taggable_orm(taggable_type: TaggableType): """ Maps resource type to ORM """ enum_to_orm = { - schema.TaggableType.features: orm.Feature, - schema.TaggableType.qualifiers: orm.Qualifier, - schema.TaggableType.datasets: Dataset, - schema.TaggableType.model_configurations: ModelConfiguration, - schema.TaggableType.models: Model, - schema.TaggableType.projects: orm.Project, - schema.TaggableType.publications: orm.Publication, - schema.TaggableType.simulations: Simulation, + TaggableType.features: Feature, + TaggableType.qualifiers: Qualifier, + TaggableType.datasets: Dataset, + TaggableType.model_configurations: ModelConfiguration, + TaggableType.models: Model, + TaggableType.projects: Project, + TaggableType.publications: Publication, + TaggableType.simulations: Simulation, } return enum_to_orm[taggable_type] @concept_router.get("/facets") def search_concept_using_facets( - types: List[schema.TaggableType] = Query(default=list(schema.TaggableType)), + types: List[TaggableType] = Query(default=list(TaggableType)), curies: Optional[List[str]] = Query(default=None), is_simulation: Optional[bool] = Query(default=None), rdb: Engine = Depends(request_rdb), @@ -110,14 +112,13 @@ def search_concept_using_facets( base_query = base_query.join( Dataset, and_( - OntologyConcept.type == schema.TaggableType.datasets, + OntologyConcept.type == TaggableType.datasets, OntologyConcept.object_id == Dataset.id, ), isouter=True, ).filter( or_( - OntologyConcept.type != schema.TaggableType.datasets, - Dataset.simulation_run == is_simulation, + OntologyConcept.type != TaggableType.datasets, ) ) result = { diff --git a/tds/modules/concept/model.py b/tds/modules/concept/model.py index 789b3bb53..c45126b99 100644 --- a/tds/modules/concept/model.py +++ b/tds/modules/concept/model.py @@ -6,8 +6,8 @@ import sqlalchemy as sa from pydantic import BaseModel -from tds.autogen.enums import OntologicalField, TaggableType -from tds.autogen.orm import Base +from tds.db.base import Base +from tds.db.enums import OntologicalField, TaggableType class OntologyConcept(Base): diff --git a/tds/modules/dataset/__init__.py b/tds/modules/dataset/__init__.py index dbd7e9ea6..92a12e859 100644 --- a/tds/modules/dataset/__init__.py +++ b/tds/modules/dataset/__init__.py @@ -4,4 +4,4 @@ from tds.modules.dataset.controller import dataset_router as router ROUTE_PREFIX = "datasets" -TAGS = ["TDS Dataset"] +TAGS = ["Dataset"] diff --git a/tds/modules/dataset/model.py b/tds/modules/dataset/model.py index ef21d3ff9..ecaa530f5 100644 --- a/tds/modules/dataset/model.py +++ b/tds/modules/dataset/model.py @@ -2,32 +2,13 @@ TDS Dataset """ from datetime import datetime -from enum import Enum from typing import Any, List, Optional +import sqlalchemy as sa from pydantic import AnyUrl, BaseModel, Field -from tds.db.base import TdsModel - -# from sqlalchemy.orm import Session -# from tds.db.relational import engine as pg_engine - - -class ColumnTypes(str, Enum): - """Column type enum""" - - UNKNOWN = "unknown" - BOOLEAN = "boolean" - STRING = "string" - CHAR = "string" - INTEGER = "integer" - INT = "integer" - FLOAT = "float" - DOUBLE = "double" - TIMESTAMP = "timestamp" - DATETIME = "datetime" - DATE = "date" - TIME = "time" +from tds.db.base import Base, TdsModel +from tds.db.enums import ColumnTypes, ValueType class Grounding(BaseModel): @@ -177,13 +158,80 @@ class Config: } } - # def _extract_concepts(self): - # """ - # Method extracts concepts from the dataset and saves them to the db. - # """ - # curies = [] - # with Session(pg_engine) as pg_db: - # pass - # def _establish_provenance(self): - # pass +class QualifierXref(Base): + """ + QualifierXref Data Model. + """ + + __tablename__ = "qualifier_xref" + + id = sa.Column(sa.Integer(), primary_key=True) + qualifier_id = sa.Column( + sa.Integer(), sa.ForeignKey("qualifier.id"), nullable=False + ) + feature_id = sa.Column(sa.Integer(), sa.ForeignKey("feature.id"), nullable=False) + + +class QualifierXrefPayload(BaseModel): + """ + QualifierXref Payload Model. + """ + + id: Optional[int] = None + qualifier_id: Optional[int] = None + feature_id: Optional[int] = None + + +class Qualifier(Base): + """ + Qualifier Data Model. + """ + + __tablename__ = "qualifier" + + id = sa.Column(sa.Integer(), primary_key=True) + dataset_id = sa.Column(sa.Integer(), sa.ForeignKey("dataset.id"), nullable=False) + description = sa.Column(sa.Text()) + name = sa.Column(sa.String(), nullable=False) + value_type = sa.Column(sa.Enum(ValueType), nullable=False) + + +class QualifierPayload(BaseModel): + """ + Qualifier Payload Model. + """ + + id: Optional[int] = None + dataset_id: Optional[int] = None + description: Optional[str] + name: str + value_type: ValueType + + +class Feature(Base): + """ + Feature Data Model. + """ + + __tablename__ = "feature" + + id = sa.Column(sa.Integer(), primary_key=True) + dataset_id = sa.Column(sa.Integer(), sa.ForeignKey("dataset.id"), nullable=False) + description = sa.Column(sa.Text()) + display_name = sa.Column(sa.String()) + name = sa.Column(sa.String(), nullable=False) + value_type = sa.Column(sa.Enum(ValueType), nullable=False) + + +class FeaturePayload(BaseModel): + """ + Feature Payload Model. + """ + + id: Optional[int] = None + dataset_id: Optional[int] = None + description: Optional[str] + display_name: Optional[str] + name: str + value_type: ValueType diff --git a/tds/modules/experimental/__init__.py b/tds/modules/experimental/__init__.py new file mode 100644 index 000000000..797757cea --- /dev/null +++ b/tds/modules/experimental/__init__.py @@ -0,0 +1,10 @@ +""" + TDS Experimental Module. + + A simple REST module that creates the basic endpoints for an entity in + Terarium Data Service (TDS). +""" +from tds.modules.experimental.controller import experimental_router as router + +ROUTE_PREFIX = "experimental" +TAGS = ["Experimental"] diff --git a/tds/routers/experimental.py b/tds/modules/experimental/controller.py similarity index 83% rename from tds/routers/experimental.py rename to tds/modules/experimental/controller.py index edc6c08e9..c0a7ed423 100644 --- a/tds/routers/experimental.py +++ b/tds/modules/experimental/controller.py @@ -1,7 +1,8 @@ """ -Experimental provenance search -""" + TDS Experimental Controller. + Description: Defines the basic rest endpoints for the TDS Module. +""" import re from logging import Logger @@ -9,15 +10,13 @@ from fastapi import APIRouter, Depends from sqlalchemy.engine.base import Engine -from tds.db import request_graph_db, request_rdb -from tds.db.graph.provenance_handler import ProvenanceHandler +from tds.db.graph.neo4j import request_engine as request_graph_db +from tds.db.relational import request_engine as request_rdb from tds.modules.provenance.utils import return_graph_validations from tds.settings import settings +experimental_router = APIRouter() logger = Logger(__name__) -router = APIRouter() - - valid_relations = return_graph_validations() DB_DESC = "Valid relations include:\n" @@ -27,8 +26,8 @@ DB_DESC += f"{dom}-[relation]->{codom}\n" PREAMBLE = """ -I will type "Question:" followed by a question or command in English like "Question: Count all Publications" and you will return a -single line print "Query:" Followed by an openCypher query like "Query: `match (p:Publication) return count(p)`. +I will type "Question:" followed by a question or command in English like "Question: Count all Publications" and you will return a +single line print "Query:" Followed by an openCypher query like "Query: `match (p:Publication) return count(p)`. """ EXAMPLES = """ @@ -52,7 +51,7 @@ """ -@router.get("/cql") +@experimental_router.get("/cql") def convert_to_cypher( query: str, ) -> str: @@ -76,7 +75,7 @@ def convert_to_cypher( return cypher -@router.get("/provenance") +@experimental_router.get("/provenance") def search_provenance( query: str, # rdb: Engine = Depends(request_rdb), @@ -89,7 +88,7 @@ def search_provenance( raise NotImplementedError -@router.get("/set_properties") +@experimental_router.get("/set_properties") def set_properties( rdb: Engine = Depends(request_rdb), graph_db=Depends(request_graph_db), @@ -97,6 +96,10 @@ def set_properties( """ Modify DB contents to work with Neoviz """ + # Importing ProvenanceHandler here to bypass circular import issue. + # pylint: disable-next=import-outside-toplevel + from tds.db.graph.provenance_handler import ProvenanceHandler + if settings.NEO4J_ENABLED: print("Neo4j is set") provenance_handler = ProvenanceHandler(rdb=rdb, graph_db=graph_db) diff --git a/tds/modules/external/__init__.py b/tds/modules/external/__init__.py new file mode 100644 index 000000000..45e9a75a2 --- /dev/null +++ b/tds/modules/external/__init__.py @@ -0,0 +1,10 @@ +""" + TDS External Module. + + A simple REST module that creates the basic endpoints for an entity in + Terarium Data Service (TDS). +""" +from tds.modules.external.controller import external_router as router + +ROUTE_PREFIX = "external" +TAGS = ["External"] diff --git a/tds/modules/external/controller.py b/tds/modules/external/controller.py new file mode 100644 index 000000000..c6ae4ba19 --- /dev/null +++ b/tds/modules/external/controller.py @@ -0,0 +1,331 @@ +""" + TDS External Controller. + + Description: Defines the basic rest endpoints for the TDS Module. +""" +from logging import Logger + +from fastapi import APIRouter, Depends, status +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse +from sqlalchemy.engine.base import Engine +from sqlalchemy.orm import Session +from sqlalchemy.orm.exc import NoResultFound + +from tds.db import entry_exists, request_rdb +from tds.modules.external.model import ( + Publication, + PublicationPayload, + Software, + SoftwarePayload, +) +from tds.operation import create, delete, retrieve, update + +external_router = APIRouter() +logger = Logger(__name__) + + +@external_router.get( + "/software", + response_model=list[SoftwarePayload], + **retrieve.fastapi_endpoint_config, +) +def list_software(rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Retrieve list of software from db. + """ + with Session(rdb) as session: + software = session.query(Software).all() + + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(software), + ) + + +@external_router.post("/software", **create.fastapi_endpoint_config) +def software_post( + payload: SoftwarePayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Create software and return its ID + """ + software_payload = payload.dict() + + with Session(rdb) as session: + software = Software(**software_payload) + session.add(software) + session.commit() + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + headers={ + "content-type": "application/json", + }, + content={"id": software.id}, + ) + + +@external_router.get( + "/software/{software_id}", + response_model=SoftwarePayload, + **retrieve.fastapi_endpoint_config, +) +def software_get(software_id: int, rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Retrieve software record from DB. + """ + try: + with Session(rdb) as session: + software = session.query(Software).filter(Software.id == software_id).all() + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(software), + ) + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={"message": f"Software with id {software_id} does not exist."}, + ) + + +@external_router.put("/software/{software_id}", **update.fastapi_endpoint_config) +def software_put( + software_id: int, payload: SoftwarePayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Update software record in DB + """ + try: + if entry_exists(rdb.connect(), Software, software_id): + software_payload = payload.dict() + software_payload.pop("id") + with Session(rdb) as session: + session.query(Software).filter(Software.id == software_id).update( + software_payload + ) + session.commit() + + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content={"id": software_id}, + ) + raise NoResultFound + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Software with id of {software_id} does not exist.", + }, + ) + + +@external_router.delete("/software/{software_id}", **delete.fastapi_endpoint_config) +def software_delete( + software_id: int, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Delete software record in DB + """ + try: + if entry_exists(rdb.connect(), Software, software_id): + with Session(rdb) as session: + session.query(Software).filter(Software.id == software_id).delete() + session.commit() + + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content={"message": f"Software with id {software_id} deleted."}, + ) + raise NoResultFound + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={"message": f"Software with id {software_id} does not exist."}, + ) + + +@external_router.get( + "/publications", + response_model=PublicationPayload, + **retrieve.fastapi_endpoint_config, +) +def publication_list(rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Retrieve a publication record from DB. + """ + with Session(rdb) as session: + publications = session.query(Publication).all() + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(publications), + ) + + +@external_router.post("/publications", **create.fastapi_endpoint_config) +def publication_post( + payload: PublicationPayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Create a publication and return its ID + """ + publication_payload = payload.dict() + xdd_uri = str(publication_payload["xdd_uri"]) + with Session(rdb) as session: + publications = ( + session.query(Publication).filter(Publication.xdd_uri == xdd_uri).all() + ) + + if len(publications) < 1: + publication = Publication(**publication_payload) + session.add(publication) + session.commit() + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + headers={ + "content-type": "application/json", + }, + content={"id": publication.id}, + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Publication with xdd_uri of {xdd_uri} exists", + "id": publications[0].id, + }, + ) + + +@external_router.get( + "/publications/{publication_id}", + response_model=PublicationPayload, + **retrieve.fastapi_endpoint_config, +) +def publication_get( + publication_id: int, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Retrieve a publication record from DB. + """ + try: + with Session(rdb) as session: + publication = session.query(Publication).get(publication_id) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(publication), + ) + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Publication with id {publication_id} does not exist." + }, + ) + + +@external_router.put("/publications/{publication_id}", **create.fastapi_endpoint_config) +def publication_put( + publication_id: int, payload: PublicationPayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Update a publication and return its ID + """ + try: + if entry_exists(rdb.connect(), Publication, publication_id): + publication_payload = payload.dict() + publication_payload.pop("id") + with Session(rdb) as session: + session.query(Publication).filter( + Publication.id == publication_id + ).update(publication_payload) + session.commit() + + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content={"id": publication_id}, + ) + raise NoResultFound + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Publication with id of {publication_id} does not exist.", + }, + ) + + +@external_router.delete( + "/publications/{publication_id}", **delete.fastapi_endpoint_config +) +def publication_delete( + publication_id: int, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Delete publication record in DB + """ + try: + if entry_exists(rdb.connect(), Publication, publication_id): + with Session(rdb) as session: + session.query(Publication).filter( + Publication.id == publication_id + ).delete() + session.commit() + + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content={"message": f"Publication with id {publication_id} deleted."}, + ) + raise NoResultFound + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Publication with id {publication_id} does not exist." + }, + ) diff --git a/tds/modules/external/model.py b/tds/modules/external/model.py new file mode 100644 index 000000000..c74d57ad3 --- /dev/null +++ b/tds/modules/external/model.py @@ -0,0 +1,59 @@ +""" +TDS External Data Model Definition. +""" +from datetime import datetime +from typing import Optional + +import sqlalchemy as sa +from pydantic import BaseModel +from sqlalchemy import func + +from tds.db.base import Base + + +class Publication(Base): + """ + External Publication Data Model. + """ + + __tablename__ = "publication" + + id = sa.Column(sa.Integer(), primary_key=True) + xdd_uri = sa.Column(sa.String(), nullable=False) + title = sa.Column(sa.String(), nullable=False) + publication_data = sa.Column(sa.JSON, nullable=True) + + +class PublicationPayload(BaseModel): + """ + External PublicationPayload Model. + """ + + id: Optional[int] = None + xdd_uri: str + title: str + publication_data: Optional[dict] + + +class Software(Base): + """ + External Software Data Model. + """ + + __tablename__ = "software" + + id = sa.Column(sa.Integer(), primary_key=True) + timestamp = sa.Column(sa.DateTime(), nullable=False, server_default=func.now()) + source = sa.Column(sa.String(), nullable=False) + storage_uri = sa.Column(sa.String(), nullable=False) + + +class SoftwarePayload(BaseModel): + """ + External SoftwarePayload Model. + """ + + id: Optional[int] = None + timestamp: datetime = datetime.now() + source: str + storage_uri: str diff --git a/tds/modules/framework/__init__.py b/tds/modules/framework/__init__.py new file mode 100644 index 000000000..4c52568ba --- /dev/null +++ b/tds/modules/framework/__init__.py @@ -0,0 +1,10 @@ +""" + TDS Framework Module. + + A simple REST module that creates the basic endpoints for an entity in + Terarium Data Service (TDS). +""" +from tds.modules.framework.controller import framework_router as router + +ROUTE_PREFIX = "frameworks" +TAGS = ["Framework"] diff --git a/tds/modules/framework/controller.py b/tds/modules/framework/controller.py new file mode 100644 index 000000000..e0fc23329 --- /dev/null +++ b/tds/modules/framework/controller.py @@ -0,0 +1,191 @@ +""" +TDS Framework Controller. +""" +from logging import Logger + +from fastapi import APIRouter, Depends, Response, status +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse +from sqlalchemy.engine.base import Engine +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session +from sqlalchemy.orm.exc import NoResultFound + +from tds.db import request_rdb +from tds.modules.framework.response import ModelFrameworkResponse +from tds.modules.model.model import ModelFramework, ModelFrameworkPayload +from tds.operation import create, delete, retrieve, update + +framework_router = APIRouter() +logger = Logger(__name__) + + +@framework_router.get( + "", response_model=list[ModelFrameworkResponse], **retrieve.fastapi_endpoint_config +) +def list_frameworks(rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Retrieve the list of frameworks. + """ + with Session(rdb) as session: + frameworks = session.query(ModelFramework).all() + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(frameworks), + ) + + +@framework_router.post("", **create.fastapi_endpoint_config) +def framework_post( + payload: ModelFrameworkPayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Create framework and return its ID + """ + framework_payload = payload.dict() + name = framework_payload["name"] + try: + with Session(rdb) as session: + model_framework = ModelFramework(**framework_payload) + session.add(model_framework) + session.commit() + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + headers={ + "content-type": "application/json", + }, + content={"name": name}, + ) + except IntegrityError: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + headers={ + "content-type": "application/json", + }, + content={"message": f"A framework with the name {name} already exists."}, + ) + + +@framework_router.get( + "/{framework_name}", + response_model=ModelFrameworkResponse, + **retrieve.fastapi_endpoint_config, +) +def framework_get( + framework_name: str, rdb: Engine = Depends(request_rdb) +) -> JSONResponse | Response: + """ + Retrieve a framework from ElasticSearch + """ + try: + with Session(rdb) as session: + framework = ( + session.query(ModelFramework) + .filter(ModelFramework.name == framework_name) + .all() + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(framework), + ) + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Model Framework with id {framework_name} does not exist." + }, + ) + + +@framework_router.put("/{framework_name}", **update.fastapi_endpoint_config) +def framework_put( + framework_name: str, + payload: ModelFrameworkPayload, + rdb: Engine = Depends(request_rdb), +) -> JSONResponse | Response: + """ + Update a framework in ElasticSearch + """ + try: + framework_payload = payload.dict() + with Session(rdb) as session: + if ( + session.query(ModelFramework) + .filter(ModelFramework.name == framework_name) + .count() + > 0 + ): + session.query(ModelFramework).filter( + ModelFramework.name == framework_name + ).update(framework_payload) + session.commit() + logger.info("framework updated: %s", framework_name) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content={"name": framework_name}, + ) + raise NoResultFound + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Framework with name {framework_name} does not exist." + }, + ) + + +@framework_router.delete("/{framework_name}", **delete.fastapi_endpoint_config) +def framework_delete( + framework_name: str, rdb: Engine = Depends(request_rdb) +) -> JSONResponse | Response: + """ + Delete a Framework in ElasticSearch + """ + try: + with Session(rdb) as session: + if ( + session.query(ModelFramework) + .filter(ModelFramework.name == framework_name) + .count() + > 0 + ): + session.query(ModelFramework).filter( + ModelFramework.name == framework_name + ).delete() + session.commit() + success_msg = f"Framework successfully deleted: {framework_name}" + logger.info(success_msg) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content={"message": success_msg}, + ) + raise NoResultFound + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Framework with name {framework_name} does not exist." + }, + ) diff --git a/tds/modules/framework/response.py b/tds/modules/framework/response.py new file mode 100644 index 000000000..30e437bb4 --- /dev/null +++ b/tds/modules/framework/response.py @@ -0,0 +1,17 @@ +""" +TDS Framework Response object. +""" +from datetime import datetime + +from pydantic import BaseModel + + +class ModelFrameworkResponse(BaseModel): + """ + Framework Response Object. + """ + + id: str + name: str + description: str + timestamp: datetime diff --git a/tds/modules/model/__init__.py b/tds/modules/model/__init__.py index 7d839c1f5..03e62643f 100644 --- a/tds/modules/model/__init__.py +++ b/tds/modules/model/__init__.py @@ -4,4 +4,4 @@ from tds.modules.model.controller import model_router as router ROUTE_PREFIX = "models" -TAGS = ["TDS Model"] +TAGS = ["Model"] diff --git a/tds/modules/model/controller.py b/tds/modules/model/controller.py index 28a0426a3..49fe459a0 100644 --- a/tds/modules/model/controller.py +++ b/tds/modules/model/controller.py @@ -295,3 +295,134 @@ def model_delete(model_id: str) -> Response: "content-type": "application/json", }, ) + + +# @TODO: Refactor this code to work with new AMR model representation in ES. +# @router.post("/opts/{model_operation}", **create.fastapi_endpoint_config) +# def model_opt( +# payload: ModelOptPayload, +# model_operation: enums.ModelOperations, +# rdb: Engine = Depends(request_rdb), +# graph_db=Depends(request_graph_db), +# ) -> Response: +# """ +# Make modeling operations. +# """ +# with Session(rdb) as session: +# payload = payload.dict() +# l_model = session.query(orm.ModelDescription).get(payload.get("left")) +# if payload.get("right", False): +# r_model = session.query(orm.ModelDescription).get(payload.get("right")) +# +# if model_operation == "copy": +# state = orm.ModelState( +# content=session.query(orm.ModelState) +# .get(payload.get("left")) +# .__dict__.get("content") +# ) +# +# elif model_operation in ("decompose", "glue"): +# state = orm.ModelState(content=payload.get("content")) +# else: +# raise HTTPException(status_code=400, detail="Operation not supported") +# +# session.add(state) +# session.commit() +# +# # add new model +# new_model = orm.ModelDescription( +# name=payload.get("name"), +# description=payload.get("description"), +# framework=payload.get("framework"), +# state_id=state.id, +# ) +# session.add(new_model) +# session.commit() +# +# # add parameters to new model. Default to left model id parameters. +# if payload.get("parameters") is None: +# parameters: List[dict] = ( +# session.query(orm.ModelParameter) +# .filter(orm.ModelParameter.model_id == payload.get("left")) +# .all() +# ) +# payload["parameters"] = [] +# for parameter in parameters: +# payload["parameters"].append(parameter.__dict__) +# +# for param in payload.get("parameters"): +# session.add( +# orm.ModelParameter( +# model_id=new_model.id, +# name=param.get("name"), +# default_value=param.get("default_value"), +# type=param.get("type"), +# state_variable=param.get("state_variable"), +# ) +# ) +# session.commit() +# +# if settings.NEO4J_ENABLED: +# provenance_handler = ProvenanceHandler(rdb=rdb, graph_db=graph_db) +# prov_payload = Provenance( +# left=state.id, +# left_type="ModelRevision", +# right=l_model.state_id, +# right_type="ModelRevision", +# relation_type=model_opt_relationship_mapping[model_operation], +# user_id=payload.get("user_id", None), +# concept=".", +# ) +# provenance_handler.create_entry(prov_payload) +# +# if model_operation == "glue" and payload.get("right", False): +# prov_payload = Provenance( +# left=state.id, +# left_type="ModelRevision", +# right=r_model.state_id, +# right_type="ModelRevision", +# relation_type=model_opt_relationship_mapping[model_operation], +# user_id=payload.get("user_id", None), +# concept=".", +# ) +# provenance_handler.create_entry(prov_payload) +# +# # add begins at relationship +# prov_payload = Provenance( +# left=new_model.id, +# left_type="Model", +# right=state.id, +# right_type="ModelRevision", +# relation_type="BEGINS_AT", +# user_id=payload.get("user_id", None), +# concept=".", +# ) +# provenance_handler.create_entry(prov_payload) +# +# # get recently added parameters for the new model +# parameters: Query[orm.ModelParameter] = session.query( +# orm.ModelParameter +# ).filter(orm.ModelParameter.model_id == new_model.id) +# +# created_parameters = orm_to_params(list(parameters)) +# # add ModelParameter nodes +# for parameter in created_parameters: +# payload = Provenance( +# left=parameter.get("id"), +# left_type="ModelParameter", +# right=new_model.state_id, +# right_type="ModelRevision", +# relation_type="PARAMETER_OF", +# user_id=None, +# concept=".", +# ) +# provenance_handler.create_entry(payload) +# +# logger.info("new model created: %i", id) +# return Response( +# status_code=status.HTTP_201_CREATED, +# headers={ +# "content-type": "application/json", +# }, +# content=json.dumps({"id": new_model.id}), +# ) diff --git a/tds/modules/model/model.py b/tds/modules/model/model.py index 4c1a5dfa0..befab100c 100644 --- a/tds/modules/model/model.py +++ b/tds/modules/model/model.py @@ -3,10 +3,11 @@ """ from typing import List, Optional -from pydantic import Field +import sqlalchemy as sa +from pydantic import BaseModel, Field from sqlalchemy.orm import Session -from tds.db.base import TdsModel +from tds.db.base import Base, TdsModel from tds.db.relational import engine as pg_engine from tds.lib.concepts import mark_concept_active from tds.lib.model_configs import model_config @@ -113,3 +114,27 @@ class Config: """ schema_extra = {"example": model_config} + + +class ModelFramework(Base): + """ + ModelFramework Data Model. + """ + + __tablename__ = "model_framework" + + name = sa.Column(sa.String(), primary_key=True) + version = sa.Column(sa.String(), nullable=False) + semantics = sa.Column(sa.String(), nullable=False) + schema_url = sa.Column(sa.String()) + + +class ModelFrameworkPayload(BaseModel): + """ + ModelFrameworkPayload Model. + """ + + name: str + version: str + semantics: str + schema_url: Optional[str] diff --git a/tds/modules/model/utils.py b/tds/modules/model/utils.py index 8e8233449..e30333468 100644 --- a/tds/modules/model/utils.py +++ b/tds/modules/model/utils.py @@ -10,8 +10,8 @@ from fastapi.encoders import jsonable_encoder from sqlalchemy.orm import Session -from tds.autogen import orm from tds.db.relational import engine as pg_engine +from tds.modules.model.model import ModelFramework from tds.modules.model.model_description import ModelDescription model_list_fields = [ @@ -100,7 +100,7 @@ def get_frameworks() -> dict: Get model frameworks from postgres. """ with Session(pg_engine) as pg_db: - frameworks = pg_db.query(orm.ModelFramework).all() + frameworks = pg_db.query(ModelFramework).all() framework_map = {} for framework in frameworks: framework_map[framework.schema_url] = framework.name diff --git a/tds/modules/person/__init__.py b/tds/modules/person/__init__.py new file mode 100644 index 000000000..dccea8687 --- /dev/null +++ b/tds/modules/person/__init__.py @@ -0,0 +1,10 @@ +""" + TDS Person Module. + + A simple REST module that creates the basic endpoints for an entity in + Terarium Data Service (TDS). +""" +from tds.modules.person.controller import person_router as router + +ROUTE_PREFIX = "persons" +TAGS = ["Person"] diff --git a/tds/modules/person/controller.py b/tds/modules/person/controller.py new file mode 100644 index 000000000..661896ce8 --- /dev/null +++ b/tds/modules/person/controller.py @@ -0,0 +1,321 @@ +""" + TDS Person Controller. +""" +from logging import Logger + +from fastapi import APIRouter, Depends, status +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse +from sqlalchemy.engine.base import Engine +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.orm import Session +from sqlalchemy.orm.exc import NoResultFound + +from tds.db import entry_exists, list_by_id, request_rdb +from tds.modules.person.model import ( + Association, + AssociationPayload, + Person, + PersonPayload, +) +from tds.modules.person.response import PersonResponse +from tds.operation import create, delete, retrieve, update + +person_router = APIRouter() +logger = Logger(__name__) + + +@person_router.get( + "", response_model=list[PersonResponse], **retrieve.fastapi_endpoint_config +) +def list_persons( + page_size: int = 100, page: int = 0, rdb: Engine = Depends(request_rdb) +): + """ + Page over persons + """ + people = list_by_id(rdb.connect(), Person, page_size, page) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(people), + ) + + +@person_router.post("", **create.fastapi_endpoint_config) +def person_post( + payload: PersonPayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Create person and return its ID + """ + try: + with Session(rdb) as session: + record = Person(**payload.dict()) + session.add(record) + session.commit() + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + headers={ + "content-type": "application/json", + }, + content={"id": record.id}, + ) + except SQLAlchemyError as error: + logger.error(error) + return JSONResponse( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + headers={ + "content-type": "application/json", + }, + content={"message": "Person was not created."}, + ) + + +@person_router.get( + "/{person_id}", response_model=PersonResponse, **retrieve.fastapi_endpoint_config +) +def person_get(person_id: int, rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Retrieve a person from postgres. + """ + try: + with Session(rdb) as session: + person = session.query(Person).get(person_id) + print(person) + + logger.info("Person retrieved: %s", person_id) + + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(person), + ) + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={"message": f"The person with id {person_id} does not exist."}, + ) + + +@person_router.put("/{person_id}", **update.fastapi_endpoint_config) +def person_put( + person_id: int, payload: PersonPayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Update a person object. + """ + try: + if entry_exists(rdb.connect(), Person, person_id): + with Session(rdb) as session: + project_payload = payload.dict() + + session.query(Person).filter(Person.id == person_id).update( + project_payload + ) + session.commit() + + logger.info("new project created: %i", person_id) + return JSONResponse( + status_code=status.HTTP_202_ACCEPTED, + headers={"content-type": "application/json"}, + content={"id": person_id}, + ) + raise NoResultFound + except NoResultFound as error: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Person with id {person_id} does not exist. {error.code}" + }, + ) + + +@person_router.delete("/{person_id}", **delete.fastapi_endpoint_config) +def person_delete(person_id: int, rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Delete a Person + """ + try: + if entry_exists(rdb.connect(), Person, person_id): + with Session(rdb) as session: + person = session.query(Person).filter(Person.id == person_id).first() + session.delete(person) + session.commit() + + logger.info("Person deleted: %i", person_id) + return JSONResponse( + status_code=status.HTTP_202_ACCEPTED, + headers={"content-type": "application/json"}, + content={"message": f"Person ({person_id}) deleted."}, + ) + raise NoResultFound + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={"message": f"Person with id {person_id} does not exist."}, + ) + + +@person_router.get( + "/{person_id}/associations", + response_model=PersonResponse, + **retrieve.fastapi_endpoint_config, +) +def person_associations( + person_id: int, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Retrieve a person's associations. + """ + try: + if entry_exists(rdb.connect(), Person, person_id): + with Session(rdb) as session: + person = session.query(Person).get(person_id) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(person.associations), + ) + raise NoResultFound + except NoResultFound as error: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Person with id {person_id} does not exist. {error.code}" + }, + ) + + +@person_router.post("/{person_id}/associations", **create.fastapi_endpoint_config) +def create_association( + person_id: int, payload: AssociationPayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Create a person -> association record. + """ + try: + if entry_exists(rdb.connect(), Person, person_id): + with Session(rdb) as session: + person = session.query(Person).get(person_id) + association = Association(**payload.dict()) + person.associations.append(association) + session.commit() + return JSONResponse( + status_code=status.HTTP_201_CREATED, + headers={ + "content-type": "application/json", + }, + content={"id": association.id}, + ) + raise NoResultFound + except NoResultFound as error: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Person with id {person_id} does not exist. {error.code}" + }, + ) + + +@person_router.delete( + "/{person_id}/associations/{association_id}", **delete.fastapi_endpoint_config +) +def delete_association( + person_id: int, association_id: int, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Delete a person -> association record. + """ + try: + print(f"Deleting Record...Person:{person_id} -> Association:{association_id}") + if entry_exists(rdb.connect(), Person, person_id) and entry_exists( + rdb.connect(), Association, association_id + ): + with Session(rdb) as session: + person = session.query(Person).get(person_id) + association = session.query(Association).get(association_id) + print(association) + print(association.person) + person.associations.remove(association) + session.add(person) + session.commit() + # session.delete(association) + # session.commit() + return JSONResponse( + status_code=status.HTTP_202_ACCEPTED, + headers={ + "content-type": "application/json", + }, + content={"message": f"Association {association_id} deleted."}, + ) + raise NoResultFound + except NoResultFound as error: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Person with id {person_id} does not have " + f"an association with id {association_id}. " + f"{error.code}" + }, + ) + + +@person_router.get("/{person_id}/associations/{association_id}") +def get_association( + person_id: int, association_id: int, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Get a specific association by ID + """ + try: + if entry_exists(rdb.connect(), Person, person_id) and entry_exists( + rdb.connect(), Association, association_id + ): + with Session(rdb) as session: + association = session.query(Association).get(association_id) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={ + "content-type": "application/json", + }, + content=jsonable_encoder(association), + ) + raise NoResultFound + except NoResultFound as error: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={ + "content-type": "application/json", + }, + content={ + "message": f"Person with id {person_id} does not have " + f"an association with id {association_id}. " + f"{error.code}" + }, + ) diff --git a/tds/modules/person/model.py b/tds/modules/person/model.py new file mode 100644 index 000000000..b2b13b736 --- /dev/null +++ b/tds/modules/person/model.py @@ -0,0 +1,83 @@ +""" +TDS Person Data Model Definition. +""" +from typing import Optional + +import sqlalchemy as sa +from pydantic import BaseModel +from sqlalchemy.orm import relationship + +from tds.db.base import Base +from tds.db.enums import ResourceType, Role + + +class Person(Base): + """ + Person Data Model + """ + + __tablename__ = "person" + + id = sa.Column(sa.Integer(), primary_key=True) + name = sa.Column(sa.String(), nullable=False) + email = sa.Column(sa.String(), nullable=False) + org = sa.Column(sa.String()) + website = sa.Column(sa.String()) + is_registered = sa.Column(sa.Boolean(), nullable=False) + + associations = relationship( + "Association", + uselist=True, + foreign_keys=[id], + primaryjoin="Person.id == Association.person_id", + backref="person", + cascade_backrefs=False, + # cascade="save-update", + # single_parent=True, + ) + + class Config: + """ + Person Data Model Swagger Example + """ + + schema_extra = {"example": {}} + + +class PersonPayload(BaseModel): + """ + PersonPayload Data Model. + """ + + id: Optional[int] = None + name: str + email: str + org: Optional[str] + website: Optional[str] + is_registered: bool + + +class Association(Base): + """ + Association Data Model + """ + + __tablename__ = "association" + + id = sa.Column(sa.Integer(), primary_key=True) + person_id = sa.Column(sa.Integer(), sa.ForeignKey("person.id"), nullable=False) + resource_id = sa.Column(sa.String(), nullable=False) + resource_type = sa.Column(sa.Enum(ResourceType)) + role = sa.Column(sa.Enum(Role)) + + +class AssociationPayload(BaseModel): + """ + PersonPayload Data Model. + """ + + id: Optional[int] = None + person_id: int + resource_id: str + resource_type: ResourceType + role: Role diff --git a/tds/modules/person/response.py b/tds/modules/person/response.py new file mode 100644 index 000000000..d21041894 --- /dev/null +++ b/tds/modules/person/response.py @@ -0,0 +1,26 @@ +""" +TDS Person Response object. +""" +from typing import Optional + +from pydantic import BaseModel + + +class PersonResponse(BaseModel): + """ + Person Response Object. + """ + + id: int + name: str + email: str + org: Optional[str] + website: Optional[str] + is_registered: bool + + +def person_response(person_list): + """ + Function builds list of persons for response. + """ + return [PersonResponse(**x["_source"]) for x in person_list] diff --git a/tds/modules/project/__init__.py b/tds/modules/project/__init__.py new file mode 100644 index 000000000..ec1020684 --- /dev/null +++ b/tds/modules/project/__init__.py @@ -0,0 +1,10 @@ +""" + TDS Project Module. + + A simple REST module that creates the basic endpoints for an entity in + Terarium Data Service (TDS). +""" +from tds.modules.project.controller import project_router as router + +ROUTE_PREFIX = "projects" +TAGS = ["Project"] diff --git a/tds/modules/project/controller.py b/tds/modules/project/controller.py new file mode 100644 index 000000000..1164ccb19 --- /dev/null +++ b/tds/modules/project/controller.py @@ -0,0 +1,336 @@ +""" +CRUD operations for Project +""" +from logging import Logger +from typing import List, Optional + +from fastapi import APIRouter, Depends, HTTPException +from fastapi import Query as FastAPIQuery +from fastapi import status +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse +from sqlalchemy.engine.base import Engine +from sqlalchemy.orm import Query, Session +from sqlalchemy.orm.exc import NoResultFound + +from tds.db import entry_exists, es_client, request_rdb +from tds.db.enums import ResourceType +from tds.modules.project.helpers import ( + ResourceDoesNotExist, + adjust_project_assets, + es_list_response, + es_resources, + save_project, +) +from tds.modules.project.model import Project, ProjectAsset, ProjectPayload +from tds.modules.project.response import ProjectResponse +from tds.operation import create, delete, retrieve, update +from tds.schema.resource import get_resource_orm +from tds.settings import settings + +project_router = APIRouter() +logger = Logger(__name__) +es = es_client() + + +@project_router.get( + "", response_model=list[ProjectResponse], **retrieve.fastapi_endpoint_config +) +def list_projects(rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Retrieve the list of projects. + """ + with Session(rdb) as session: + projects = session.query(Project).all() + + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={"content-type": "application/json"}, + content=jsonable_encoder(projects), + ) + + +@project_router.post("", **create.fastapi_endpoint_config) +def project_post( + payload: ProjectPayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Create project and return its ID + """ + try: + with Session(rdb) as session: + project_payload = payload.dict() + if "concept" in project_payload: + # pylint: disable-next=unused-variable + concept_payload = project_payload.pop( + "concept" + ) # TODO: Save ontology term + + project = save_project(project=project_payload, session=session) + + project_id: int = project.id + + logger.info("new project created: %i", project_id) + return JSONResponse( + status_code=status.HTTP_201_CREATED, + headers={ + "location": f"/api/projects/{project_id}", + "content-type": "application/json", + }, + content={"id": project_id}, + ) + except ResourceDoesNotExist as error: + return JSONResponse( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + headers={"content-type": "application/json"}, + content={"message": error.message}, + ) + + +@project_router.get( + "/{project_id}", response_model=ProjectResponse, **retrieve.fastapi_endpoint_config +) +def project_get(project_id: int, rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Retrieve a project from ElasticSearch + """ + try: + if entry_exists(rdb.connect(), Project, project_id): + with Session(rdb) as session: + project = session.query(Project).get(project_id) + # pylint: disable-next=unused-variable + parameters: Query[ProjectAsset] = session.query(ProjectAsset).filter( + ProjectAsset.project_id == project_id + ) + else: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={"content-type": "application/json"}, + content=jsonable_encoder(project), + ) + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={"content-type": "application/json"}, + content={"message": f"The project with id {project_id} was not found."}, + ) + + +@project_router.put("/{project_id}", **update.fastapi_endpoint_config) +def project_put( + project_id: int, payload: ProjectPayload, rdb: Engine = Depends(request_rdb) +) -> JSONResponse: + """ + Update a project. + """ + try: + if entry_exists(rdb.connect(), Project, project_id): + with Session(rdb) as session: + project_payload = payload.dict() + assets = project_payload.pop("assets") + if "concept" in project_payload: + # pylint: disable-next=unused-variable + concept_payload = project_payload.pop( + "concept" + ) # TODO: Save ontology term + + session.query(Project).filter(Project.id == project_id).update( + project_payload + ) + adjust_project_assets(project_id, assets, session) + session.commit() + + logger.info("new project created: %i", project_id) + return JSONResponse( + status_code=status.HTTP_202_ACCEPTED, + headers={"content-type": "application/json"}, + content={"id": project_id}, + ) + + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + except ResourceDoesNotExist as error: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={"content-type": "application/json"}, + content={"message": error.message}, + ) + + +@project_router.delete("/{project_id}", **delete.fastapi_endpoint_config) +def project_delete(project_id: int, rdb: Engine = Depends(request_rdb)) -> JSONResponse: + """ + Deactivate project + """ + try: + if entry_exists(rdb.connect(), Project, project_id): + with Session(rdb) as session: + project = session.query(Project).get(project_id) + + # set to dict and active to false + project_ = project.__dict__ + project_.pop("_sa_instance_state") + project_["active"] = False + + with Session(rdb) as session: + session.query(Project).filter(Project.id == project_id).update(project_) + session.commit() + + else: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + return JSONResponse( + headers={"content-type": "application/json"}, + content={"id": project_id, "status": project_["active"]}, + ) + except NoResultFound: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + headers={"content-type": "application/json"}, + content={ + "id": project_id, + "message": f"Project with ID {project_id} not found.", + }, + ) + + +@project_router.delete( + "/{project_id}/assets/{resource_type}/{resource_id}", + **delete.fastapi_endpoint_config, +) +def delete_asset( + project_id: int, + resource_type: ResourceType, + resource_id: int | str, + rdb: Engine = Depends(request_rdb), +) -> JSONResponse: + """ + Remove asset + """ + with Session(rdb) as session: + project_assets = list( + session.query(ProjectAsset).filter( + ProjectAsset.project_id == project_id, + ProjectAsset.resource_type == resource_type, + ProjectAsset.resource_id == str(resource_id), + ) + ) + if len(project_assets) == 0: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + # Loop through assets because the result is an array and if there are + # more than one result, we want to remove it anyway. + for asset in project_assets: + session.delete(asset) + session.commit() + return JSONResponse( + status_code=status.HTTP_204_NO_CONTENT, + headers={"content-type": "application/json"}, + content={"message": "Asset has been deleted."}, + ) + + +@project_router.post( + "/{project_id}/assets/{resource_type}/{resource_id}", + **create.fastapi_endpoint_config, +) +def create_asset( + project_id: int, + resource_type: ResourceType, + resource_id: int | str, + rdb: Engine = Depends(request_rdb), +) -> JSONResponse: + """ + Create asset and return its ID + """ + with Session(rdb) as session: + identical_count = ( + session.query(ProjectAsset) + .filter( + ProjectAsset.project_id == project_id, + ProjectAsset.resource_id == str(resource_id), + ProjectAsset.resource_type == resource_type, + ) + .count() + ) + + if identical_count == 0: + project_asset = ProjectAsset( + project_id=project_id, + resource_id=str(resource_id), + resource_type=resource_type, + ) + session.add(project_asset) + session.commit() + asset_id: int = project_asset.id + + logger.info("new asset created: %i", asset_id) + return JSONResponse( + status_code=status.HTTP_201_CREATED, + headers={"content-type": "application/json"}, + content={"id": asset_id}, + ) + return JSONResponse( + status_code=status.HTTP_409_CONFLICT, + headers={"content-type": "application/json"}, + content={"message": "Asset already exists for project."}, + ) + + +@project_router.get("/{project_id}/assets", **retrieve.fastapi_endpoint_config) +def get_project_assets( + project_id: int, + types: Optional[List[ResourceType]] = FastAPIQuery( + default=[ + ResourceType.datasets, + ResourceType.models, + ResourceType.model_configurations, + ResourceType.publications, + ResourceType.simulations, + ResourceType.workflows, + ResourceType.artifacts, + ] + ), + rdb: Engine = Depends(request_rdb), +) -> JSONResponse: + """ + Retrieve project assets + """ + if entry_exists(rdb.connect(), Project, project_id): + with Session(rdb) as session: + project = session.query(Project).get(project_id) + assets = project.assets + assets_key_ids = {type: [] for type in types} + for asset in list(assets): + if asset.resource_type in types: + assets_key_ids[asset.resource_type].append(asset.resource_id) + + assets_key_objects = {} + for key in assets_key_ids: + orm_type = get_resource_orm(key) + if key in es_resources: + responder = es_list_response[key] + index_singular = key if key[-1] != "s" else key.rstrip("s") + index = f"{settings.ES_INDEX_PREFIX}{index_singular}" + es_items = es.search( + index=index, + query={"ids": {"values": assets_key_ids[key]}}, + fields=responder["fields"], + ) + assets_key_objects[key] = ( + [] + if es_items["hits"]["total"]["value"] == 0 + else responder["function"](es_items["hits"]["hits"]) + ) + else: + assets_key_objects[key] = ( + session.query(orm_type) + .filter(orm_type.id.in_(assets_key_ids[key])) + .all() + ) + else: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + return JSONResponse( + status_code=status.HTTP_200_OK, + headers={"content-type": "application/json"}, + content=jsonable_encoder(assets_key_objects), + ) diff --git a/tds/modules/project/helpers.py b/tds/modules/project/helpers.py new file mode 100644 index 000000000..c33b82680 --- /dev/null +++ b/tds/modules/project/helpers.py @@ -0,0 +1,163 @@ +""" +TDS Project helpers. +""" +from collections import defaultdict +from typing import Dict, List + +from sqlalchemy.orm import Session + +from tds.db import entry_exists, es_client, rdb +from tds.lib.utils import get_singular_index +from tds.modules.artifact.response import artifact_response +from tds.modules.dataset.response import dataset_response +from tds.modules.model.utils import model_list_fields, model_list_response +from tds.modules.model_configuration.response import configuration_response +from tds.modules.project.model import Project, ProjectAsset +from tds.modules.simulation.response import simulation_response +from tds.modules.workflow.response import workflow_response +from tds.schema.resource import ResourceType, get_resource_orm +from tds.settings import settings + +es_list_response = { + ResourceType.models: {"function": model_list_response, "fields": model_list_fields}, + ResourceType.model_configurations: { + "function": configuration_response, + "fields": None, + }, + ResourceType.datasets: {"function": dataset_response, "fields": None}, + ResourceType.simulations: {"fields": None, "function": simulation_response}, + ResourceType.workflows: {"fields": None, "function": workflow_response}, + ResourceType.artifacts: {"fields": None, "function": artifact_response}, +} + +es_resources = [ + ResourceType.datasets, + ResourceType.models, + ResourceType.model_configurations, + ResourceType.simulations, + ResourceType.workflows, + ResourceType.artifacts, +] + + +class ResourceDoesNotExist(Exception): + """ + ResourceDoesNotExist Exception class. + """ + + message = "The Requested Resource does not exist." + + def __init__(self, resource_type): + self.message = f"{resource_type}: {self.message}" + + +def save_project(project: dict, session): + """ + Function saves the project from a payload dict. + """ + asset_dict = project.pop("assets") + assets = check_assets(assets=asset_dict) + + if assets: + project = Project(**project) + session.add(project) + session.commit() + build_asset_records( + project_id=project.id, asset_ids=asset_dict, session=session + ) + session.commit() + return project + + +def check_assets(assets: list) -> bool: + """ + Function verifies assets exist before saving the project. + """ + for resource_type in assets: + if resource_type in es_resources: + resources = handle_es_resource( + object_resource=resource_type, object_ids=assets[resource_type] + ) + else: + resources = handle_orm_resource( + object_resource=resource_type, object_ids=assets[resource_type] + ) + + if resources is False: + raise ResourceDoesNotExist(resource_type) + + return True + + +def handle_es_resource(object_resource: str, object_ids: list): + """ + Function handles an ElasticSearch asset resource. + """ + es = es_client() + index = get_singular_index(f"{settings.ES_INDEX_PREFIX}{object_resource}") + query = {"ids": {"values": object_ids}} + res = es.search(index=index, query=query) + + if int(res["hits"]["total"]["value"]) != len(object_ids): + return False + + return True + + +def handle_orm_resource(object_resource: ResourceType, object_ids: list): + """ + Function handles ORM project assets. + """ + current_orm = get_resource_orm(object_resource) + if not all((entry_exists(rdb.connect(), current_orm, oid) for oid in object_ids)): + raise ResourceDoesNotExist(object_resource) + return True + + +def build_asset_records(project_id, asset_ids, session): + """ + Function builds the asset record list and saves it to the DB. + """ + for resource_type in asset_ids: + resource_ids = asset_ids[resource_type] + project_assets = [ + ProjectAsset( + project_id=project_id, + resource_id=resource_id, + resource_type=resource_type, + external_ref="", + ) + for resource_id in resource_ids + ] + session.bulk_save_objects(project_assets) + + +def adjust_project_assets( + project_id: int, assets: Dict[ResourceType, List[int]], session: Session +): + """ + Add new entries and remove unused entries + """ + active = defaultdict(list) + for asset in session.query(ProjectAsset).filter( + ProjectAsset.project_id == project_id + ): + active[asset.resource_type].append(asset.resource_id) + + for resource_type, resource_ids in assets.items(): + project_assets = [ + ProjectAsset( + project_id=project_id, + resource_id=resource_id, + resource_type=resource_type, + external_ref="", + ) + for resource_id in resource_ids + if resource_id not in active[resource_type] + ] + session.bulk_save_objects(project_assets) + + for resource_type, resource_ids in active.items(): + inactive_ids = set(resource_ids) - set(assets.get(resource_type, [])) + for inactive_id in inactive_ids: + session.delete(session.query(ProjectAsset).get(inactive_id)) diff --git a/tds/modules/project/model.py b/tds/modules/project/model.py new file mode 100644 index 000000000..2b7002915 --- /dev/null +++ b/tds/modules/project/model.py @@ -0,0 +1,96 @@ +""" +TDS Project Data Model Definition. +""" +from typing import Optional + +import sqlalchemy as sa +from pydantic import BaseModel +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +from tds.db.base import Base +from tds.db.enums import ResourceType + + +class Project(Base): + """ + Project data model. + """ + + __tablename__ = "project" + + id = sa.Column(sa.Integer(), primary_key=True) + name = sa.Column(sa.String(), nullable=False) + description = sa.Column(sa.String(), nullable=False) + timestamp = sa.Column(sa.DateTime(), server_default=func.now()) + active = sa.Column(sa.Boolean(), nullable=False) + username = sa.Column(sa.String()) + + assets = relationship( + "ProjectAsset", + uselist=True, + foreign_keys=[id], + primaryjoin="Project.id == ProjectAsset.project_id", + backref="project", + ) + + class Config: + """ + Project Data Model Swagger Example + """ + + schema_extra = {"example": {}} + + +class ProjectPayload(BaseModel): + """ + Project Pydantic Model. + """ + + id: Optional[int] = None + name: str + description: str + assets: Optional[dict] + active: bool + username: Optional[str] + + class Config: + """ + Project Data Model Swagger Example + """ + + schema_extra = { + "example": { + "name": "A cool project", + "description": "Project info goes here.", + "assets": [], + "active": "true", + "username": "Loki", + } + } + + +class ProjectAsset(Base): + """ + ProjectAsset Data Model. + """ + + __tablename__ = "project_asset" + + id = sa.Column(sa.Integer(), primary_key=True) + project_id = sa.Column(sa.Integer(), sa.ForeignKey("project.id"), nullable=False) + resource_id = sa.Column(sa.String(), nullable=False) + resource_type = sa.Column(sa.Enum(ResourceType), nullable=False) + external_ref = sa.Column(sa.String()) + + +class ProjectAssetPayload(BaseModel): + """ + ProjectAssetPayload Data Model. + """ + + id: Optional[int] = None + project_id: Optional[int] = None + resource_id: Optional[int] = None + resource_type: ResourceType + external_ref: Optional[str] diff --git a/tds/modules/project/response.py b/tds/modules/project/response.py new file mode 100644 index 000000000..963a9d5e6 --- /dev/null +++ b/tds/modules/project/response.py @@ -0,0 +1,20 @@ +""" +TDS Project Response object. +""" +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel + + +class ProjectResponse(BaseModel): + """ + ProjectResponse Class. + """ + + id: Optional[int] = None + name: str + description: str + timestamp: Optional[datetime] = datetime.now() + active: bool + username: Optional[str] diff --git a/tds/modules/provenance/controller.py b/tds/modules/provenance/controller.py index 64596f50b..285b76282 100644 --- a/tds/modules/provenance/controller.py +++ b/tds/modules/provenance/controller.py @@ -12,8 +12,7 @@ from sqlalchemy.orm import Session from sqlalchemy.orm.exc import NoResultFound -from tds.autogen import enums -from tds.db import request_graph_db, request_rdb +from tds.db import enums, request_graph_db, request_rdb from tds.db.graph.provenance_handler import ProvenanceHandler from tds.db.graph.search_provenance import SearchProvenance from tds.modules.provenance.model import Provenance, ProvenancePayload, ProvenanceSearch diff --git a/tds/modules/provenance/model.py b/tds/modules/provenance/model.py index 30295832b..4bec11b53 100644 --- a/tds/modules/provenance/model.py +++ b/tds/modules/provenance/model.py @@ -9,8 +9,8 @@ from pydantic.main import BaseModel from sqlalchemy import func -from tds.autogen.enums import ProvenanceType, RelationType -from tds.autogen.orm import Base +from tds.db.base import Base +from tds.db.enums import ProvenanceType, RelationType class Provenance(Base): diff --git a/tds/modules/provenance/response.py b/tds/modules/provenance/response.py index 8fb9f2380..144e54041 100644 --- a/tds/modules/provenance/response.py +++ b/tds/modules/provenance/response.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -from tds.autogen.enums import ProvenanceType, RelationType +from tds.db.enums import ProvenanceType, RelationType class ProvenanceResponse(BaseModel): diff --git a/tds/modules/simulation/model.py b/tds/modules/simulation/model.py index b8173479b..b404f3565 100644 --- a/tds/modules/simulation/model.py +++ b/tds/modules/simulation/model.py @@ -6,8 +6,8 @@ from pydantic import Field -from tds.autogen.enums import SimulationEngine, SimulationStatus from tds.db.base import TdsModel +from tds.db.enums import SimulationEngine, SimulationStatus from tds.settings import settings diff --git a/tds/modules/simulation/response.py b/tds/modules/simulation/response.py index f5648b941..e91461797 100644 --- a/tds/modules/simulation/response.py +++ b/tds/modules/simulation/response.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -from tds.autogen.enums import SimulationEngine, SimulationStatus +from tds.db.enums import SimulationEngine, SimulationStatus class SimulationResponse(BaseModel): diff --git a/tds/modules/workflow/__init__.py b/tds/modules/workflow/__init__.py index 6b5021de9..36b92684f 100644 --- a/tds/modules/workflow/__init__.py +++ b/tds/modules/workflow/__init__.py @@ -7,4 +7,4 @@ from tds.modules.workflow.controller import workflow_router as router ROUTE_PREFIX = "workflows" -TAGS = ["TDS Workflow"] +TAGS = ["Workflow"] diff --git a/tds/routers/__init__.py b/tds/routers/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tds/routers/external.py b/tds/routers/external.py deleted file mode 100644 index 2604bdf87..000000000 --- a/tds/routers/external.py +++ /dev/null @@ -1,151 +0,0 @@ -""" -Basic crud operations for external resources -""" - -import json -from logging import Logger - -from fastapi import APIRouter, Depends, HTTPException, Response, status -from sqlalchemy import or_ -from sqlalchemy.engine.base import Engine -from sqlalchemy.orm import Session - -from tds.autogen import orm -from tds.db import request_rdb -from tds.operation import create, delete, retrieve -from tds.schema.resource import Publication, Software - -logger = Logger(__name__) -router = APIRouter() - - -@router.get("/software/{id}", **retrieve.fastapi_endpoint_config) -def get_software(id: int, rdb: Engine = Depends(request_rdb)) -> Software: - """ - Retrieve software metadata - """ - with Session(rdb) as session: - if session.query(orm.Software).filter(orm.Software.id == id).count() == 1: - return Software.from_orm(session.query(orm.Software).get(id)) - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - - -@router.post("/software", **create.fastapi_endpoint_config) -def create_software(payload: Software, rdb: Engine = Depends(request_rdb)) -> Response: - """ - Create software metadata - """ - with Session(rdb) as session: - software_payload = payload.dict() - software = orm.Software(**software_payload) - session.add(software) - session.commit() - id: int = software.id - logger.info("new software with %i", id) - return Response( - status_code=status.HTTP_201_CREATED, - headers={ - "content-type": "application/json", - }, - content=json.dumps({"id": id}), - ) - - -@router.delete("/software/{id}", **delete.fastapi_endpoint_config) -def delete_software(id: int, rdb: Engine = Depends(request_rdb)) -> Response: - """ - Delete software metadata - """ - with Session(rdb) as session: - if session.query(orm.Software).filter(orm.Software.id == id).count() == 1: - software = session.query(orm.Software).get(id) - session.delete(software) - session.commit() - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - return Response( - status_code=status.HTTP_204_NO_CONTENT, - ) - - -@router.get("/publications/{id}", **retrieve.fastapi_endpoint_config) -def get_publication(id: int | str, rdb: Engine = Depends(request_rdb)) -> Publication: - """ - Retrieve model - """ - with Session(rdb) as session: - publications = ( - session.query(orm.Publication) - .filter( - or_( - str(id) == orm.Publication.xdd_uri, - (str(id).isdigit()) and (int(id) == orm.Publication.id), - ) - ) - .all() - ) - if len(publications) != 0: - publication = publications[0] - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - return Publication.from_orm(publication) - - -@router.post("/publications", **create.fastapi_endpoint_config) -def create_publication( - payload: Publication, rdb: Engine = Depends(request_rdb) -) -> Response: - """ - Create publication and return its ID - """ - with Session(rdb) as session: - publication_payload = payload.dict() - publications = ( - session.query(orm.Publication) - .filter( - str(publication_payload["xdd_uri"]) == orm.Publication.xdd_uri, - ) - .all() - ) - - if len(publications) != 0: - publication = publications[0].__dict__.copy() - publication.pop("_sa_instance_state") - return Response( - status_code=status.HTTP_200_OK, - headers={ - "content-type": "application/json", - }, - content=json.dumps(publication), - ) - # pylint: disable-next=unused-variable - publication = orm.Publication(**publication_payload) - session.add(publication) - session.commit() - id: int = publication.id - - logger.info("new publication created: %i", id) - return Response( - status_code=status.HTTP_201_CREATED, - headers={ - "content-type": "application/json", - }, - content=json.dumps({"id": id}), - ) - - -@router.delete("/publications/{id}", **delete.fastapi_endpoint_config) -def delete_publication(id: int, rdb: Engine = Depends(request_rdb)) -> Response: - """ - Delete publications metadata - """ - with Session(rdb) as session: - if session.query(orm.Publication).filter(orm.Publication.id == id).count() == 1: - publication = session.query(orm.Publication).get(id) - session.delete(publication) - session.commit() - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - return Response( - status_code=status.HTTP_204_NO_CONTENT, - ) diff --git a/tds/routers/models.py b/tds/routers/models.py deleted file mode 100644 index 932f48ce9..000000000 --- a/tds/routers/models.py +++ /dev/null @@ -1,211 +0,0 @@ -""" -CRUD operations for models -""" - -import json -from logging import Logger - -from fastapi import APIRouter, Depends, HTTPException, Response, status -from sqlalchemy.engine.base import Engine -from sqlalchemy.orm import Session - -from tds.autogen import orm -from tds.db import request_rdb -from tds.operation import create, delete, retrieve -from tds.schema.model import ModelFramework - -logger = Logger(__name__) -router = APIRouter() - - -@router.post("/frameworks", **create.fastapi_endpoint_config) -def create_framework( - payload: ModelFramework, rdb: Engine = Depends(request_rdb) -) -> Response: - """ - Create framework metadata - """ - - with Session(rdb) as session: - framework_payload = payload.dict() - framework = orm.ModelFramework(**framework_payload) - session.add(framework) - session.commit() - name: str = framework.name - logger.info("new framework with %i", name) - return Response( - status_code=status.HTTP_201_CREATED, - headers={ - "content-type": "application/json", - }, - content=json.dumps({"name": name}), - ) - - -@router.get("/frameworks/{name}", **retrieve.fastapi_endpoint_config) -def get_framework(name: str, rdb: Engine = Depends(request_rdb)) -> ModelFramework: - """ - Retrieve framework metadata - """ - with Session(rdb) as session: - if ( - session.query(orm.ModelFramework) - .filter(orm.ModelFramework.name == name) - .count() - == 1 - ): - return ModelFramework.from_orm(session.query(orm.ModelFramework).get(name)) - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - - -@router.delete("/frameworks/{name}", **delete.fastapi_endpoint_config) -def delete_framework(name: str, rdb: Engine = Depends(request_rdb)) -> Response: - """ - Delete framework metadata - """ - with Session(rdb) as session: - if ( - session.query(orm.ModelFramework) - .filter(orm.ModelFramework.name == name) - .count() - == 1 - ): - framework = session.query(orm.ModelFramework).get(name) - session.delete(framework) - session.commit() - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - return Response( - status_code=status.HTTP_204_NO_CONTENT, - ) - - -# @TODO: Refactor this code to work with new AMR model representation in ES. -# @router.post("/opts/{model_operation}", **create.fastapi_endpoint_config) -# def model_opt( -# payload: ModelOptPayload, -# model_operation: enums.ModelOperations, -# rdb: Engine = Depends(request_rdb), -# graph_db=Depends(request_graph_db), -# ) -> Response: -# """ -# Make modeling operations. -# """ -# with Session(rdb) as session: -# payload = payload.dict() -# l_model = session.query(orm.ModelDescription).get(payload.get("left")) -# if payload.get("right", False): -# r_model = session.query(orm.ModelDescription).get(payload.get("right")) -# -# if model_operation == "copy": -# state = orm.ModelState( -# content=session.query(orm.ModelState) -# .get(payload.get("left")) -# .__dict__.get("content") -# ) -# -# elif model_operation in ("decompose", "glue"): -# state = orm.ModelState(content=payload.get("content")) -# else: -# raise HTTPException(status_code=400, detail="Operation not supported") -# -# session.add(state) -# session.commit() -# -# # add new model -# new_model = orm.ModelDescription( -# name=payload.get("name"), -# description=payload.get("description"), -# framework=payload.get("framework"), -# state_id=state.id, -# ) -# session.add(new_model) -# session.commit() -# -# # add parameters to new model. Default to left model id parameters. -# if payload.get("parameters") is None: -# parameters: List[dict] = ( -# session.query(orm.ModelParameter) -# .filter(orm.ModelParameter.model_id == payload.get("left")) -# .all() -# ) -# payload["parameters"] = [] -# for parameter in parameters: -# payload["parameters"].append(parameter.__dict__) -# -# for param in payload.get("parameters"): -# session.add( -# orm.ModelParameter( -# model_id=new_model.id, -# name=param.get("name"), -# default_value=param.get("default_value"), -# type=param.get("type"), -# state_variable=param.get("state_variable"), -# ) -# ) -# session.commit() -# -# if settings.NEO4J_ENABLED: -# provenance_handler = ProvenanceHandler(rdb=rdb, graph_db=graph_db) -# prov_payload = Provenance( -# left=state.id, -# left_type="ModelRevision", -# right=l_model.state_id, -# right_type="ModelRevision", -# relation_type=model_opt_relationship_mapping[model_operation], -# user_id=payload.get("user_id", None), -# concept=".", -# ) -# provenance_handler.create_entry(prov_payload) -# -# if model_operation == "glue" and payload.get("right", False): -# prov_payload = Provenance( -# left=state.id, -# left_type="ModelRevision", -# right=r_model.state_id, -# right_type="ModelRevision", -# relation_type=model_opt_relationship_mapping[model_operation], -# user_id=payload.get("user_id", None), -# concept=".", -# ) -# provenance_handler.create_entry(prov_payload) -# -# # add begins at relationship -# prov_payload = Provenance( -# left=new_model.id, -# left_type="Model", -# right=state.id, -# right_type="ModelRevision", -# relation_type="BEGINS_AT", -# user_id=payload.get("user_id", None), -# concept=".", -# ) -# provenance_handler.create_entry(prov_payload) -# -# # get recently added parameters for the new model -# parameters: Query[orm.ModelParameter] = session.query( -# orm.ModelParameter -# ).filter(orm.ModelParameter.model_id == new_model.id) -# -# created_parameters = orm_to_params(list(parameters)) -# # add ModelParameter nodes -# for parameter in created_parameters: -# payload = Provenance( -# left=parameter.get("id"), -# left_type="ModelParameter", -# right=new_model.state_id, -# right_type="ModelRevision", -# relation_type="PARAMETER_OF", -# user_id=None, -# concept=".", -# ) -# provenance_handler.create_entry(payload) -# -# logger.info("new model created: %i", id) -# return Response( -# status_code=status.HTTP_201_CREATED, -# headers={ -# "content-type": "application/json", -# }, -# content=json.dumps({"id": new_model.id}), -# ) diff --git a/tds/routers/persons.py b/tds/routers/persons.py deleted file mode 100644 index 16add4c2e..000000000 --- a/tds/routers/persons.py +++ /dev/null @@ -1,145 +0,0 @@ -""" -CRUD operations for persons and related tables in the DB -""" - -import json -from logging import Logger - -from fastapi import APIRouter, Depends, Response, status -from sqlalchemy.engine.base import Engine -from sqlalchemy.orm import Session - -from tds.autogen import orm, schema -from tds.db import list_by_id, request_rdb - -logger = Logger(__file__) -router = APIRouter() - - -@router.get("/associations/{id}") -def get_association(id: int, rdb: Engine = Depends(request_rdb)) -> str: - """ - Get a specific association by ID - """ - with Session(rdb) as session: - result = session.query(orm.Association).get(id) - return result - - -@router.post("/associations") -def create_association(payload: schema.Association, rdb: Engine = Depends(request_rdb)): - """ - Create a association - """ - with Session(rdb) as session: - associationp = payload.dict() - del associationp["id"] - association = orm.Association(**associationp) - session.add(association) - session.commit() - data_id = association.id - associationp["id"] = data_id - return Response( - status_code=status.HTTP_201_CREATED, - headers={ - "content-type": "application/json", - }, - content=json.dumps(associationp), - ) - - -@router.patch("/associations/{id}") -def update_association( - payload: schema.Association, id: int, rdb: Engine = Depends(request_rdb) -) -> str: - """ - Update a association by ID - """ - with Session(rdb) as session: - data_payload = payload.dict(exclude_unset=True) - data_payload["id"] = id - logger.info(data_payload) - - data_to_update = session.query(orm.Association).filter(orm.Association.id == id) - data_to_update.update(data_payload) - session.commit() - return "Updated association" - - -@router.delete("/associations/{id}") -def delete_association(id: int, rdb: Engine = Depends(request_rdb)) -> str: - """ - Delete a association by ID - """ - with Session(rdb) as session: - session.query(orm.Association).filter(orm.Association.id == id).delete() - session.commit() - - -@router.get("") -def get_persons( - page_size: int = 100, page: int = 0, rdb: Engine = Depends(request_rdb) -): - """ - Page over persons - """ - return list_by_id(rdb.connect(), orm.Person, page_size, page) - - -@router.get("/{id}") -def get_person(id: int, rdb: Engine = Depends(request_rdb)) -> str: - """ - Get a specific person by ID - """ - with Session(rdb) as session: - return session.query(orm.Person).get(id) - - -@router.post("") -def create_person(payload: schema.Person, rdb: Engine = Depends(request_rdb)): - """ - Create a person - """ - with Session(rdb) as session: - personp = payload.dict() - del personp["id"] - person = orm.Person(**personp) - session.add(person) - session.commit() - data_id = person.id - personp["id"] = data_id - return Response( - status_code=status.HTTP_201_CREATED, - headers={ - "content-type": "application/json", - }, - content=json.dumps(personp), - ) - - -@router.patch("/{id}") -def update_person( - payload: schema.Person, id: int, rdb: Engine = Depends(request_rdb) -) -> str: - """ - Update a person by ID - """ - with Session(rdb) as session: - data_payload = payload.dict(exclude_unset=True) - data_payload["id"] = id - logger.info(data_payload) - - data_to_update = session.query(orm.Person).filter(orm.Person.id == id) - data_to_update.update(data_payload) - session.commit() - return "Updated Person" - - -@router.delete("/{id}") -def delete_person(id: int, rdb: Engine = Depends(request_rdb)): - """ - Delete a person by ID - """ - with Session(rdb) as session: - session.query(orm.Person).filter(orm.Person.id == id).delete() - session.commit() diff --git a/tds/routers/projects.py b/tds/routers/projects.py deleted file mode 100644 index c82fc5af8..000000000 --- a/tds/routers/projects.py +++ /dev/null @@ -1,309 +0,0 @@ -""" -CRUD operations for projects -""" - -import json -from logging import Logger -from typing import List, Optional - -from fastapi import APIRouter, Depends, HTTPException -from fastapi import Query as FastAPIQuery -from fastapi import Response, status -from sqlalchemy.engine.base import Engine -from sqlalchemy.orm import Query, Session - -from tds.autogen import orm -from tds.db import entry_exists, es_client, list_by_id, request_rdb -from tds.lib.projects import adjust_project_assets, save_project_assets -from tds.modules.artifact.response import artifact_response -from tds.modules.dataset.response import dataset_response -from tds.modules.model.utils import model_list_fields, model_list_response -from tds.modules.model_configuration.response import configuration_response -from tds.modules.simulation.response import simulation_response -from tds.modules.workflow.response import workflow_response -from tds.operation import create, delete, retrieve, update -from tds.schema.project import Project, ProjectMetadata -from tds.schema.resource import ResourceType, get_resource_orm, get_schema_description -from tds.settings import settings - -logger = Logger(__name__) -router = APIRouter() -es = es_client() - -es_list_response = { - ResourceType.models: {"function": model_list_response, "fields": model_list_fields}, - ResourceType.model_configurations: { - "function": configuration_response, - "fields": None, - }, - ResourceType.datasets: {"function": dataset_response, "fields": None}, - ResourceType.simulations: {"fields": None, "function": simulation_response}, - ResourceType.workflows: {"fields": None, "function": workflow_response}, - ResourceType.artifacts: {"fields": None, "function": artifact_response}, -} - -es_resources = [ - ResourceType.datasets, - ResourceType.models, - ResourceType.model_configurations, - ResourceType.simulations, - ResourceType.workflows, - ResourceType.artifacts, -] - - -@router.get("") -def list_projects( - page_size: int = 50, page: int = 0, rdb: Engine = Depends(request_rdb) -) -> List[ProjectMetadata]: - """ - Retrieve all projects - """ - return list_by_id(rdb.connect(), orm.Project, page_size, page) - - -@router.get("/{id}", **retrieve.fastapi_endpoint_config) -def get_project(id: int, rdb: Engine = Depends(request_rdb)) -> Project: - """ - Retrieve project - """ - if entry_exists(rdb.connect(), orm.Project, id): - with Session(rdb) as session: - project = session.query(orm.Project).get(id) - parameters: Query[orm.ProjectAsset] = session.query( - orm.ProjectAsset - ).filter(orm.ProjectAsset.project_id == id) - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - return Project.from_orm(project, list(parameters)) - - -@router.delete("/{id}", **retrieve.fastapi_endpoint_config) -def deactivate_project(id: int, rdb: Engine = Depends(request_rdb)) -> Project: - """ - Deactivate project - """ - if entry_exists(rdb.connect(), orm.Project, id): - with Session(rdb) as session: - project = session.query(orm.Project).get(id) - - # set to dict and active to false - project_ = project.__dict__ - project_.pop("_sa_instance_state") - project_["active"] = False - - with Session(rdb) as session: - session.query(orm.Project).filter(orm.Project.id == id).update(project_) - session.commit() - - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - return Response( - headers={ - "content-type": "application/json", - }, - content=json.dumps({"id": id, "status": project_["active"]}), - ) - - -@router.post("", **create.fastapi_endpoint_config) -def create_project(payload: Project, rdb: Engine = Depends(request_rdb)) -> Response: - """ - Create project and return its ID - """ - with Session(rdb) as session: - project_payload = payload.dict() - # pylint: disable-next=unused-variable - concept_payload = project_payload.pop("concept") # TODO: Save ontology term - assets = project_payload.pop("assets") - for resource_type in assets: - current_orm = get_resource_orm(resource_type) - if not all( - ( - entry_exists(rdb.connect(), current_orm, id) - for id in assets[resource_type] - ) - ): - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail="Not all listed assets exist.", - ) - project = orm.Project(**project_payload) - session.add(project) - session.commit() - id: int = project.id - save_project_assets(id, assets, session) - session.commit() - logger.info("new project created: %i", id) - return Response( - status_code=status.HTTP_201_CREATED, - headers={ - "location": f"/api/projects/{id}", - "content-type": "application/json", - }, - content=json.dumps({"id": id}), - ) - - -@router.put("/{id}", **update.fastapi_endpoint_config) -def update_project( - id: int, payload: Project, rdb: Engine = Depends(request_rdb) -) -> Response: - """ - Update project - """ - if entry_exists(rdb.connect(), orm.Project, id): - project_payload = payload.dict() - project_payload.pop("concept") # TODO: Save ontology term - project_payload.pop("id") - assets = project_payload.pop("assets") - with Session(rdb) as session: - session.query(orm.Project).filter(orm.Project.id == id).update( - project_payload - ) - adjust_project_assets(id, assets, session) - session.commit() - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - return Response( - headers={ - "content-type": "application/json", - }, - content=json.dumps({"id": id}), - ) - - -@router.delete( - "/{project_id}/assets/{resource_type}/{resource_id}", - **delete.fastapi_endpoint_config, -) -def delete_asset( - project_id: int, - resource_type: ResourceType, - resource_id: int | str, - rdb: Engine = Depends(request_rdb), -) -> Response: - """ - Remove asset - """ - with Session(rdb) as session: - project_assets = list( - session.query(orm.ProjectAsset).filter( - orm.ProjectAsset.project_id == project_id, - orm.ProjectAsset.resource_type == resource_type, - orm.ProjectAsset.resource_id == str(resource_id), - ) - ) - if len(project_assets) == 0: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - session.delete(project_assets[0]) - session.commit() - return Response( - status_code=status.HTTP_204_NO_CONTENT, - ) - - -@router.post( - "/{project_id}/assets/{resource_type}/{resource_id}", - **create.fastapi_endpoint_config, -) -def create_asset( - project_id: int, - resource_type: ResourceType, - resource_id: int | str, - rdb: Engine = Depends(request_rdb), -) -> Response: - """ - Create asset and return its ID - """ - with Session(rdb) as session: - identical_count = ( - session.query(orm.ProjectAsset) - .filter( - orm.ProjectAsset.project_id == project_id, - orm.ProjectAsset.resource_id == str(resource_id), - orm.ProjectAsset.resource_type == resource_type, - ) - .count() - ) - - if identical_count == 0: - project_asset = orm.ProjectAsset( - project_id=project_id, - resource_id=str(resource_id), - resource_type=resource_type, - ) - session.add(project_asset) - session.commit() - id: int = project_asset.id - - logger.info("new asset created: %i", id) - return Response( - status_code=status.HTTP_201_CREATED, - headers={ - "content-type": "application/json", - }, - content=json.dumps({"id": id}), - ) - return Response(status.HTTP_409_CONFLICT) - - -@router.get("/{id}/assets", **retrieve.fastapi_endpoint_config) -def get_project_assets( - id: int, - types: Optional[List[ResourceType]] = FastAPIQuery( - default=[ - ResourceType.datasets, - ResourceType.models, - ResourceType.model_configurations, - ResourceType.publications, - ResourceType.simulations, - ResourceType.workflows, - ResourceType.artifacts, - ] - ), - rdb: Engine = Depends(request_rdb), -): - """ - Retrieve project assets - """ - if entry_exists(rdb.connect(), orm.Project, id): - with Session(rdb) as session: - # project = session.query(orm.Project).get(id) - assets: Query[orm.ProjectAsset] = session.query(orm.ProjectAsset).filter( - orm.ProjectAsset.project_id == id - ) - assets_key_ids = {type: [] for type in types} - for asset in list(assets): - if asset.resource_type in types: - assets_key_ids[asset.resource_type].append(asset.resource_id) - - assets_key_objects = {} - for key in assets_key_ids: - orm_type = get_resource_orm(key) - orm_schema = get_schema_description(key) - if key in es_resources: - responder = es_list_response[key] - index_singular = key if key[-1] != "s" else key.rstrip("s") - index = f"{settings.ES_INDEX_PREFIX}{index_singular}" - es_items = es.search( - index=index, - size=1000, - query={"ids": {"values": assets_key_ids[key]}}, - fields=responder["fields"], - ) - assets_key_objects[key] = ( - [] - if es_items["hits"]["total"]["value"] == 0 - else responder["function"](es_items["hits"]["hits"]) - ) - else: - assets_key_objects[key] = [ - orm_schema.from_orm(asset) - for asset in session.query(orm_type).filter( - orm_type.id.in_(assets_key_ids[key]) - ) - ] - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - return assets_key_objects diff --git a/tds/schema/concept.py b/tds/schema/concept.py index d692c098a..44cdd239e 100644 --- a/tds/schema/concept.py +++ b/tds/schema/concept.py @@ -4,12 +4,12 @@ # pylint: disable=missing-class-docstring, too-few-public-methods from pydantic import BaseModel -from tds.autogen import schema +from tds.modules.concept.model import OntologicalField class Concept(BaseModel): curie: str - status: schema.OntologicalField + status: OntologicalField class Config: orm_mode = True diff --git a/tds/schema/dataset.py b/tds/schema/dataset.py index 5f1c404dd..344f4b9e7 100644 --- a/tds/schema/dataset.py +++ b/tds/schema/dataset.py @@ -4,11 +4,11 @@ # pylint: disable=missing-class-docstring, too-few-public-methods from typing import List, Optional -from tds.autogen import schema +from tds.modules.dataset.model import FeaturePayload, QualifierPayload from tds.schema.concept import Concept -class Qualifier(schema.Qualifier): +class Qualifier(QualifierPayload): feature_names: List[str] concept: Optional[Concept] @@ -16,7 +16,7 @@ class Config: orm_mode = True -class Feature(schema.Feature): +class Feature(FeaturePayload): concept: Optional[Concept] class Config: diff --git a/tds/schema/model.py b/tds/schema/model.py index eb5948087..b5308f638 100644 --- a/tds/schema/model.py +++ b/tds/schema/model.py @@ -7,7 +7,7 @@ from fastapi.encoders import jsonable_encoder from pydantic import BaseModel -from tds.autogen import schema +from tds.modules.model.model import ModelFrameworkPayload ModelParameters = List[Dict] @@ -59,6 +59,6 @@ class Config: } -class ModelFramework(schema.ModelFramework): +class ModelFramework(ModelFrameworkPayload): class Config: orm_mode = True diff --git a/tds/schema/project.py b/tds/schema/project.py index d6f5f1dff..fc56ba71c 100644 --- a/tds/schema/project.py +++ b/tds/schema/project.py @@ -4,23 +4,25 @@ # pylint: disable=missing-class-docstring from typing import Dict, List, Optional, Set -from tds.autogen import orm, schema +from tds.db.enums import ResourceType +from tds.modules.project.model import Project as ProjectModel +from tds.modules.project.model import ProjectAsset, ProjectPayload from tds.schema.concept import Concept -class Project(schema.Project): +class Project(ProjectPayload): concept: Optional[Concept] = None active = True - assets: Dict[schema.ResourceType, Set[int | str]] = {} + assets: Dict[ResourceType, Set[int | str]] = {} @classmethod def from_orm( - cls, body: orm.Project, project_assets: List[orm.ProjectAsset] + cls, body: ProjectModel, project_assets: List[ProjectAsset] ) -> "Project": """ Handle the creation of asset dict """ - assets = {type: [] for type in schema.ResourceType} + assets = {type: [] for type in ResourceType} for asset in project_assets: assets[asset.resource_type].append(asset.resource_id) @@ -39,7 +41,7 @@ class Config: } -class ProjectMetadata(schema.Project): +class ProjectMetadata(ProjectPayload): concept: Optional[Concept] = None class Config: diff --git a/tds/schema/provenance.py b/tds/schema/provenance.py index 11a5271ba..ad268a121 100644 --- a/tds/schema/provenance.py +++ b/tds/schema/provenance.py @@ -6,10 +6,11 @@ from pydantic import BaseModel, Field # pylint: disable=missing-class-docstring -from tds.autogen import schema +from tds.db.enums import ProvenanceType +from tds.modules.provenance.model import Provenance as ProvenanceModel -class Provenance(schema.Provenance): +class Provenance(ProvenanceModel): class Config: orm_mode = True @@ -22,15 +23,15 @@ class NodeSchema(BaseModel): class ProvenancePayload(BaseModel): root_id: Optional[str] = Field(default=1) - root_type: Optional[schema.ProvenanceType] = Field(default="Publication") + root_type: Optional[ProvenanceType] = Field(default="Publication") user_id: Optional[int] curie: Optional[str] edges: Optional[bool] = Field(default=False) nodes: Optional[bool] = Field(default=True) - types: List[schema.ProvenanceType] = Field( + types: List[ProvenanceType] = Field( default=[ type - for type in schema.ProvenanceType + for type in ProvenanceType if type not in ["Concept", "ModelRevision", "Project"] ] ) @@ -39,12 +40,12 @@ class ProvenancePayload(BaseModel): verbose: Optional[bool] = Field(default=False) -provenance_type_to_abbr: Dict[Type[schema.ProvenanceType], str] = { - schema.ProvenanceType.Dataset: "Ds", - schema.ProvenanceType.Model: "Md", - schema.ProvenanceType.ModelConfiguration: "Mc", - schema.ProvenanceType.Publication: "Pu", - schema.ProvenanceType.Simulation: "Si", - schema.ProvenanceType.Project: "Pr", - schema.ProvenanceType.Concept: "Cn", +provenance_type_to_abbr: Dict[Type[ProvenanceType], str] = { + ProvenanceType.Dataset: "Ds", + ProvenanceType.Model: "Md", + ProvenanceType.ModelConfiguration: "Mc", + ProvenanceType.Publication: "Pu", + ProvenanceType.Simulation: "Si", + ProvenanceType.Project: "Pr", + ProvenanceType.Concept: "Cn", } diff --git a/tds/schema/resource.py b/tds/schema/resource.py index 58cb68633..54e1924d5 100644 --- a/tds/schema/resource.py +++ b/tds/schema/resource.py @@ -6,29 +6,30 @@ from collections import defaultdict from typing import Dict, Optional, Type -from tds.autogen import orm, schema -from tds.autogen.schema import ResourceType +from tds.db.enums import ResourceType from tds.modules.artifact.model import Artifact from tds.modules.dataset.model import Dataset +from tds.modules.external.model import Publication as PublicationModel +from tds.modules.external.model import PublicationPayload, SoftwarePayload from tds.modules.model.model import Model from tds.modules.model_configuration.model import ModelConfiguration from tds.modules.simulation.model import Simulation from tds.modules.workflow.model import Workflow -class Publication(schema.Publication): +class Publication(PublicationPayload): class Config: orm_mode = True -class Software(schema.Software): +class Software(SoftwarePayload): class Config: orm_mode = True Resource = Dataset | Model | ModelConfiguration | Publication | Simulation | Workflow -ORMResource = Dataset | orm.Publication | Simulation +ORMResource = Dataset | PublicationModel | Simulation obj_to_enum: Dict[Type[Resource], ResourceType] = { Dataset: ResourceType.datasets, @@ -82,7 +83,7 @@ def get_resource_orm(resource_type: ResourceType) -> Optional[ORMResource]: lambda: None, { ResourceType.datasets: Dataset, - ResourceType.publications: orm.Publication, + ResourceType.publications: PublicationModel, ResourceType.simulations: Simulation, }, ) diff --git a/tds/server/build.py b/tds/server/build.py index 219044314..8778bd1f5 100644 --- a/tds/server/build.py +++ b/tds/server/build.py @@ -4,7 +4,6 @@ from importlib import import_module, metadata from pkgutil import iter_modules -from typing import List from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware @@ -12,33 +11,6 @@ API_DESCRIPTION = "TDS handles data between TERArium and other ASKEM components." -def find_valid_routers() -> List[str]: - """ - Generate list of module names that are possible to import - """ - router = import_module("tds.routers") - return [module.name for module in iter_modules(router.__path__)] - - -def attach_router(api: FastAPI, router_name: str) -> None: - """ - Import router module dynamically and attach it to the API - - At runtime, the routes to be used can be specified instead of - being hardcoded. - """ - router_package = import_module(f"tds.routers.{router_name}") - api.include_router( - router_package.router, tags=[router_name], prefix="/" + router_name - ) - - if api.openapi_tags is None: - api.openapi_tags = [] - api.openapi_tags.append( - {"name": router_name, "description": router_package.__doc__} - ) - - def load_module_routers(api): """ Function loads the module router objects and registers them with FastAPI. @@ -46,12 +18,13 @@ def load_module_routers(api): modules = import_module("tds.modules") for mod in iter_modules(modules.__path__): module = import_module(f"tds.modules.{mod.name}") - api.include_router( - module.router, tags=module.TAGS, prefix="/" + module.ROUTE_PREFIX - ) + if hasattr(module, "router"): + api.include_router( + module.router, tags=module.TAGS, prefix="/" + module.ROUTE_PREFIX + ) -def build_api(*args: str) -> FastAPI: +def build_api() -> FastAPI: """ Build an API using a group of specified router modules """ @@ -76,7 +49,4 @@ def build_api(*args: str) -> FastAPI: # Load routers from the modules package. load_module_routers(api) - for router_name in args if len(args) != 0 else find_valid_routers(): - attach_router(api, router_name) - return api diff --git a/tests/service.py b/tests/service.py index 43730302a..550699b11 100644 --- a/tests/service.py +++ b/tests/service.py @@ -7,8 +7,8 @@ from sqlalchemy.orm import Session from tds.autogen import orm -from tds.autogen.schema import ResourceType, ValueType -from tds.schema.model import ModelFramework +from tds.db.enums import ResourceType, ValueType +from tds.modules.model.model import ModelFramework from tds.schema.resource import Publication, Software from tests.suite import AllowedMethod from tests.suite import ASKEMEntityTestSuite as AETS @@ -22,7 +22,7 @@ class TestProject(AETS): def init_test_data(self): with Session(self.rdb) as session: # Arrange Models - framework = orm.ModelFramework(name="dummy", version="v0", semantics="") + framework = ModelFramework(name="dummy", version="v0", semantics="") session.add(framework) session.commit() state = orm.ModelState(content="") @@ -126,7 +126,7 @@ class TestRun(AETS): def init_test_data(self): with Session(self.rdb) as session: # Arrange Model - framework = orm.ModelFramework(name="dummy", version="v0", semantics="") + framework = ModelFramework(name="dummy", version="v0", semantics="") session.add(framework) session.commit() state = orm.ModelState(content="") @@ -215,7 +215,7 @@ class TestModelConfig(AETS): def init_test_data(self): with Session(self.rdb) as session: # Arrange Model - framework = orm.ModelFramework(name="dummy", version="v0", semantics="") + framework = ModelFramework(name="dummy", version="v0", semantics="") session.add(framework) session.commit() state = orm.ModelState(content="") @@ -284,7 +284,7 @@ class TestModel(AETS): def init_test_data(self): with Session(self.rdb) as session: - framework = orm.ModelFramework(name="dummy", version="v0", semantics="") + framework = ModelFramework(name="dummy", version="v0", semantics="") session.add(framework) session.commit() state = orm.ModelState(content="") @@ -375,7 +375,7 @@ class TestFramework(AETS): def init_test_data(self): with Session(self.rdb) as session: - framework = orm.ModelFramework(name="dummy", version="v0", semantics="") + framework = ModelFramework(name="dummy", version="v0", semantics="") session.add(framework) session.commit()