Skip to content

Commit

Permalink
Add logs access from API for admins
Browse files Browse the repository at this point in the history
  • Loading branch information
vemonet committed Oct 14, 2024
1 parent 373ca32 commit 9474370
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 5 deletions.
12 changes: 12 additions & 0 deletions backend/src/config.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import os
from dataclasses import dataclass, field

Expand Down Expand Up @@ -64,9 +65,20 @@ def token_endpoint(self) -> str:
def admins_list(self) -> list[str]:
return self.admins.split(",")

@property
def logs_filepath(self) -> str:
return os.path.join(settings.data_folder, "logs.log")


settings = Settings()

# Disable uvicorn logs, does not seems to really do much
uvicorn_error = logging.getLogger("uvicorn.error")
uvicorn_error.disabled = True
uvicorn_access = logging.getLogger("uvicorn.access")
uvicorn_access.disabled = True

logging.basicConfig(filename=settings.logs_filepath, level=logging.INFO, format="%(asctime)s - %(message)s")

# import warnings

Expand Down
24 changes: 22 additions & 2 deletions backend/src/upload.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import glob
import logging
import os
import shutil
from datetime import datetime
Expand Down Expand Up @@ -36,7 +37,7 @@ def publish_graph_to_endpoint(g: Graph, graph_uri: str | None = None) -> bool:
# response = requests.post(url, headers=headers, data=graph_data)
# Check response status and print result
if not response.ok:
print(f"Failed to upload data: {response.status_code}, {response.text}")
logging.warning(f"Failed to upload data: {response.status_code}, {response.text}")
return response.ok


Expand Down Expand Up @@ -308,6 +309,25 @@ def load_cohort_dict_file(dict_path: str, cohort_id: str) -> Dataset:
return g


@router.post(
"/get-logs",
name="Get logs",
response_description="Logs",
)
async def get_logs(
user: Any = Depends(get_current_user),
) -> list[str]:
"""Delete a cohort from the triplestore and delete its metadata file from the server."""
user_email = user["email"]
if user_email not in settings.admins_list:
raise HTTPException(status_code=403, detail="You need to be admin to perform this action.")
with open(settings.logs_filepath) as log_file:
logs = log_file.read()
return logs.split("\n")
# return {
# "message": f"Cohort {cohort_id} has been successfully deleted.",
# }


@router.post(
"/delete-cohort",
Expand All @@ -318,7 +338,7 @@ async def delete_cohort(
user: Any = Depends(get_current_user),
cohort_id: str = Form(...),
) -> dict[str, Any]:
"""Upload a cohort metadata file to the server and add its variables to the triplestore."""
"""Delete a cohort from the triplestore and delete its metadata file from the server."""
user_email = user["email"]
if user_email not in settings.admins_list:
raise HTTPException(status_code=403, detail="You need to be admin to perform this action.")
Expand Down
6 changes: 3 additions & 3 deletions backend/src/utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
from typing import Any

import curies
Expand All @@ -16,7 +17,6 @@

converter = curies.get_bioregistry_converter()


def init_graph(default_graph: str | None = None) -> Dataset:
"""Initialize a new RDF graph for nquads with the iCARE4CVD namespace bindings."""
g = Dataset(store="Oxigraph", default_graph_base=default_graph)
Expand Down Expand Up @@ -123,7 +123,7 @@ def retrieve_cohorts_metadata(user_email: str) -> dict[str, Cohort]:
results = run_query(get_variables_query)["results"]["bindings"]
cohorts_with_variables = {}
cohorts_without_variables = {}
# print(f"Get cohorts metadata query results: {len(results)}")
logging.info(f"Get cohorts metadata query results: {len(results)}")
for row in results:
try:
cohort_id = str(row["cohortId"]["value"])
Expand Down Expand Up @@ -190,6 +190,6 @@ def retrieve_cohorts_metadata(user_email: str) -> dict[str, Cohort]:
if new_category not in target_dict[cohort_id].variables[var_id].categories:
target_dict[cohort_id].variables[var_id].categories.append(new_category)
except Exception as e:
print(f"Error processing row {row}: {e}")
logging.warning(f"Error processing row {row}: {e}")
# Merge dictionaries, cohorts with variables first
return {**cohorts_with_variables, **cohorts_without_variables}

0 comments on commit 9474370

Please sign in to comment.