Skip to content

Commit

Permalink
Merge branch 'main' into alchemiscale-fah
Browse files Browse the repository at this point in the history
  • Loading branch information
dotsdl authored Jun 13, 2024
2 parents ff50735 + 4f2a226 commit 2763efc
Show file tree
Hide file tree
Showing 11 changed files with 63 additions and 22 deletions.
20 changes: 15 additions & 5 deletions alchemiscale/interface/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@
from typing import Dict, List, Optional, Union
from collections import Counter

from fastapi import FastAPI, APIRouter, Body, Depends, HTTPException
from fastapi import FastAPI, APIRouter, Body, Depends, HTTPException, Request
from fastapi import status as http_status
from fastapi.middleware.gzip import GZipMiddleware

import json
from gufe.tokenization import JSON_HANDLER

from ..base.api import (
GufeJSONResponse,
scope_params,
Expand Down Expand Up @@ -100,18 +103,25 @@ def check_existence(


@router.post("/networks", response_model=ScopedKey)
def create_network(
async def create_network(
*,
network: List = Body(embed=True),
scope: Scope = Body(embed=True),
state: str = Body(embed=True),
request: Request,
n4js: Neo4jStore = Depends(get_n4js_depends),
token: TokenData = Depends(get_token_data_depends),
):
# we handle the request directly so we can decode with custom JSON decoder
# this is important for properly handling GUFE objects
body = await request.body()
body_ = json.loads(body.decode("utf-8"), cls=JSON_HANDLER.decoder)

scope = Scope.parse_obj(body_["scope"])
validate_scopes(scope, token)

network = body_["network"]
an = KeyedChain(network).to_gufe()

state = body_["state"]

try:
an_sk, _, _ = n4js.assemble_network(network=an, scope=scope, state=state)
except ValueError as e:
Expand Down
27 changes: 17 additions & 10 deletions alchemiscale/storage/statestore.py
Original file line number Diff line number Diff line change
Expand Up @@ -862,19 +862,26 @@ def query_networks(
gufe_key_pattern=None if key is None else str(key),
)

where_params = dict(
name_pattern="an.name",
org_pattern="an.`_org`",
campaign_pattern="an.`_campaign`",
project_pattern="an.`_project`",
state_pattern="nm.state",
gufe_key_pattern="an.`_gufe_key`",
)

conditions = []

for k, v in query_params.items():
if v is None:
query_params[k] = ".*"
if v is not None:
conditions.append(f"{where_params[k]} =~ ${k}")

q = """
where_clause = "WHERE " + " AND ".join(conditions) if len(conditions) else ""

q = f"""
MATCH (an:AlchemicalNetwork)<-[:MARKS]-(nm:NetworkMark)
WHERE
an.name =~ $name_pattern
AND an.`_gufe_key` =~ $gufe_key_pattern
AND an.`_org` =~ $org_pattern
AND an.`_campaign` =~ $campaign_pattern
AND an.`_project` =~ $project_pattern
AND nm.state =~ $state_pattern
{where_clause}
RETURN an._scoped_key as sk
"""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,10 +162,17 @@ def test_claim_taskhub_task(

taskhub_sks = compute_client.query_taskhubs([scope_test])

remaining_tasks = n4js_preloaded.get_taskhub_unclaimed_tasks(taskhub_sks[0])
assert len(remaining_tasks) == 3

# claim a single task; should get highest priority task
task_sks = compute_client.claim_taskhub_tasks(
taskhub_sks[0], compute_service_id=compute_service_id
)

remaining_tasks = n4js_preloaded.get_taskhub_unclaimed_tasks(taskhub_sks[0])
assert len(remaining_tasks) == 2

all_tasks = n4js_preloaded.get_taskhub_tasks(taskhub_sks[0], return_gufe=True)

assert len(task_sks) == 1
Expand Down
4 changes: 3 additions & 1 deletion alchemiscale/tests/integration/compute/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,10 @@ def n4js_preloaded(
n4js: Neo4jStore = n4js_fresh

# Set up tasks from select set of transformations
# we need to use second_network_an2 because its edges
# are a subset of network_tyk2's edges
transformations = sorted(
filter(lambda x: type(x) is not NonTransformation, network_tyk2.edges)
filter(lambda x: type(x) is not NonTransformation, second_network_an2.edges)
)[0:3]

# set starting contents for many of the tests in this module
Expand Down
2 changes: 1 addition & 1 deletion alchemiscale/tests/integration/storage/test_statestore.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ def test_get_network(self, n4js, network_tyk2, scope_test):

def test_query_networks(self, n4js, network_tyk2, scope_test, multiple_scopes):
an = network_tyk2
an2 = AlchemicalNetwork(edges=list(an.edges)[:-2], name="incomplete")
an2 = AlchemicalNetwork(edges=list(an.edges)[:-2], name=None)

sk: ScopedKey = n4js.assemble_network(an, scope_test)[0]
sk2: ScopedKey = n4js.assemble_network(an2, scope_test)[0]
Expand Down
1 change: 1 addition & 0 deletions devtools/conda-envs/alchemiscale-client.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ channels:
- jaimergp/label/unsupported-cudatoolkit-shim
- conda-forge
- openeye

dependencies:
- pip
- python =3.10
Expand Down
1 change: 1 addition & 0 deletions devtools/conda-envs/alchemiscale-compute.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: alchemiscale-compute
channels:
- conda-forge
- openeye

dependencies:
- pip
- python =3.10
Expand Down
2 changes: 2 additions & 0 deletions devtools/conda-envs/alchemiscale-server.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@ channels:
- jaimergp/label/unsupported-cudatoolkit-shim
- conda-forge
- openeye

dependencies:
- pip
- python =3.10

# alchemiscale dependencies
- gufe=0.9.5
- openfe=0.14.0

- openmmforcefields>=0.12.0
- requests
- click
Expand Down
1 change: 1 addition & 0 deletions devtools/conda-envs/docs.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
name: alchemiscale-docs
channels:
- conda-forge

dependencies:
- sphinx>=5.0,<6
- sphinx_rtd_theme
Expand Down
5 changes: 3 additions & 2 deletions devtools/conda-envs/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@ name: alchemiscale-test
channels:
- jaimergp/label/unsupported-cudatoolkit-shim
- conda-forge

dependencies:
- pip

# alchemiscale dependencies
- gufe>=0.9.5
- openfe>=0.14.0
- gufe>=1.0.0
- openfe>=1.0.1
- openmmforcefields>=0.12.0
- pydantic<2.0

Expand Down
15 changes: 12 additions & 3 deletions docs/operations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -69,15 +69,20 @@ Creating a database dump

**With the Neo4j service shut down**, navigate to the directory containing your database data, set ``$BACKUPS_DIR`` to the absolute path of your choice and ``$NEO4J_VERSION`` to the version of Neo4j you are using, then run::

# create the dump `neo4j.dump`
docker run --rm \
-v $(pwd):/var/lib/neo4j/data \
-v ${BACKUPS_DIR}:/tmp \
--entrypoint /bin/bash \
neo4j:${NEO4J_VERSION} \
-c "neo4j-admin dump --to /tmp/neo4j-$(date -I).dump"
-c "neo4j-admin database dump --to-path /tmp neo4j"

This will create a new database dump in the ``$BACKUPS_DIR`` directory.
# create a copy of the dump with a timestamp
cp ${BACKUPS_DIR}/neo4j.dump ${BACKUPS_DIR}/neo4j-$(date -I).dump

This will create a new database dump in the ``$BACKUPS_DIR`` directory.
Note that this command will fail if ``neo4j.dump`` already exists in this directory.
It is recommended to copy this file to one with a timestamp (e.g. ``neo4j-$(date -I).dump``), as above.

Restoring from a database dump
==============================
Expand All @@ -86,12 +91,16 @@ To later restore from a database dump, navigate to the directory containing your

**With the Neo4j service shut down**, choose ``$DUMP_DATE`` and set ``$NEO4J_VERSION`` to the version of Neo4j you are using, then run::

# create a copy of the timestamped dump to `neo4j.dump`
cp ${BACKUPS_DIR}/neo4j-$(date -I).dump ${BACKUPS_DIR}/neo4j.dump

# load the dump `neo4j.dump`
docker run --rm \
-v $(pwd):/var/lib/neo4j/data \
-v ${BACKUPS_DIR}:/tmp \
--entrypoint /bin/bash \
neo4j:${NEO4J_VERSION} \
-c "neo4j-admin load --from /tmp/neo4j-${DUMP_DATE}.dump"
-c "neo4j-admin database load --from-path=/tmp neo4j"

You may need to perform a ``chown -R`` following this operation to set correct ownership of the newly-loaded database contents.

Expand Down

0 comments on commit 2763efc

Please sign in to comment.