Skip to content

Commit

Permalink
finishing image pipeline
Browse files Browse the repository at this point in the history
  • Loading branch information
AhmadHAW committed Nov 19, 2024
1 parent 5932784 commit d1a6f84
Show file tree
Hide file tree
Showing 31 changed files with 483 additions and 271 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ repos:
hooks:
- id: pyright
name: "Pyright"
entry: bash -c 'ENV_NAME=dats source backend/_activate_current_env.sh && pyright'
entry: bash -c 'PYRIGHT_PYTHON_PYLANCE_VERSION=2024.10.1 ENV_NAME=dats source backend/_activate_current_env.sh && pyright'
language: system
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v9.11.0
Expand Down
2 changes: 1 addition & 1 deletion backend/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ JWT_SECRET=

# Where to store uploaded files.
# <path_to_dats_repo>/docker/backend_repo
REPO_ROOT=/insert_path_to_dats_repo/docker/backend_repo
SHARED_REPO_ROOT=/insert_path_to_dats_repo/docker/backend_repo

# The system user is automatically created and owns automatically generated data.
SYSTEM_USER_EMAIL="SYSTEM@dats.org"
Expand Down
10 changes: 5 additions & 5 deletions backend/src/api/endpoints/import_.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def start_import_codes_job(
)
user_id = authz_user.user.id
filename = f"import_user_code_{user_id}_{proj_id}.csv"
filepath = repo._get_dst_path_for_temp_file(filename)
filepath = repo.get_dst_path_for_temp_file(filename)
filepath = repo.store_uploaded_file(
uploaded_file=uploaded_file, filepath=filepath, fn=filename
)
Expand Down Expand Up @@ -90,7 +90,7 @@ def start_import_tags_job(
)
user_id = authz_user.user.id
filename = f"import_tags_{user_id}_{proj_id}.csv"
filepath = repo._get_dst_path_for_temp_file(filename)
filepath = repo.get_dst_path_for_temp_file(filename)
filepath = repo.store_uploaded_file(
uploaded_file=uploaded_file, filepath=filepath, fn=filename
)
Expand Down Expand Up @@ -129,7 +129,7 @@ def start_import_project_project_metadata_job(
)
user_id = authz_user.user.id
filename = f"import_project_project_metadata_{proj_id}.csv"
filepath = repo._get_dst_path_for_temp_file(filename)
filepath = repo.get_dst_path_for_temp_file(filename)
filepath = repo.store_uploaded_file(
uploaded_file=uploaded_file, filepath=filepath, fn=filename
)
Expand Down Expand Up @@ -166,7 +166,7 @@ def start_import_project_metadata_job(
user_id = current_user.id
random_temp_project_name = str(uuid.uuid4())
filename = f"import_project_{random_temp_project_name}_for_user_{user_id}.json"
filepath = repo._get_dst_path_for_temp_file(filename)
filepath = repo.get_dst_path_for_temp_file(filename)
filepath = repo.store_uploaded_file(
uploaded_file=uploaded_file, filepath=filepath, fn=filename
)
Expand Down Expand Up @@ -206,7 +206,7 @@ def start_import_project_job(
user_id = current_user.id
random_temp_project_name = str(uuid.uuid4())
filename = f"import_project_{random_temp_project_name}_for_user_{user_id}.zip"
filepath = repo._get_dst_path_for_temp_file(filename)
filepath = repo.get_dst_path_for_temp_file(filename)
filepath = repo.store_uploaded_file(
uploaded_file=uploaded_file, filepath=filepath, fn=filename
)
Expand Down
11 changes: 11 additions & 0 deletions backend/src/app/core/data/dto/code.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,14 @@ class CodeRead(CodeBaseDTO):
updated: datetime = Field(description="Updated timestamp of the Code")
is_system: bool = Field(description="Is the Code a system code")
model_config = ConfigDict(from_attributes=True)


# Properties for importing Codes
class CodeImport(BaseModel):
name: str = Field(description="Name of the Code")
color: str = Field(description="Color of the Code")
description: str = Field(description="Description of the Code")
parent_name: Optional[str] = Field(
description="Name of the Parent Code", default=None
)
created: datetime = Field(description="Created timestamp of the Code")
26 changes: 17 additions & 9 deletions backend/src/app/core/data/export/export_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,18 +201,26 @@ def __get_sdocs_metadata_for_export(
sdoc_ids: Optional[List[int]] = None,
sdocs: Optional[List[SourceDocumentRead]] = None,
) -> List[Dict[str, Any]]:
# TODO Flo: paging for too many docs
if sdocs is None:
if sdoc_ids is None:
if sdoc_ids is None:
if sdocs is None:
raise ValueError("Either IDs or DTOs must be not None")
sdocs = [
SourceDocumentRead.model_validate(sdoc)
for sdoc in crud_sdoc.read_by_ids(db=db, ids=sdoc_ids)
]
sdoc_ids = list(map(lambda sdoc: sdoc.id, sdocs))

sdoc_orms = crud_sdoc.read_by_ids(db=db, ids=sdoc_ids)

if sdocs is None:
sdocs = [SourceDocumentRead.model_validate(sdoc) for sdoc in sdoc_orms]

sdoc_tags: Dict[int, List[DocumentTagORM]] = {sdoc.id: [] for sdoc in sdocs}
for sdoc_orm in sdoc_orms:
for tag in sdoc_orm.document_tags:
sdoc_tags[sdoc_orm.id].append(tag)

exported_sdocs_metadata = []

for sdoc in sdocs:
sdoc_metadatas = crud_sdoc_meta.read_by_sdoc(db=db, sdoc_id=sdoc.id)
sdoc_tags = crud_project.read(db=db, id=sdoc.project_id).document_tags
logger.info(f"export sdoc tags: {sdoc_tags[sdoc.id]} for {sdoc.filename}")
sdoc_metadata_dtos = [
SourceDocumentMetadataReadResolved.model_validate(sdoc_metadata)
for sdoc_metadata in sdoc_metadatas
Expand All @@ -228,7 +236,7 @@ def __get_sdocs_metadata_for_export(
"filename": sdoc.filename,
"doctype": sdoc.doctype,
"metadata": metadata_dict,
"tags": [tag.name for tag in sdoc_tags],
"tags": [tag.name for tag in sdoc_tags[sdoc.id]],
}
)

Expand Down
Loading

0 comments on commit d1a6f84

Please sign in to comment.