diff --git a/.github/workflows/build_docker.yml b/.github/workflows/build_docker.yml new file mode 100644 index 0000000..bda975b --- /dev/null +++ b/.github/workflows/build_docker.yml @@ -0,0 +1,31 @@ +name: Build and push image to DockerHub + +on: + push: + branches: + - "main" + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/digest:nightly diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c021c9a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,15 @@ +FROM python:3.10 + +# Create parent directory to have a place to mount pre-loaded digest files (currently the nipoppy-qpn repo) +# TODO: Revisit this with longer-term solution to handle available digests (e.g., QPN as submodule?) +WORKDIR /app/code + +COPY ./requirements.txt /app/code/requirements.txt + +RUN pip install --upgrade pip +RUN pip install --no-cache-dir --upgrade -r /app/code/requirements.txt + +COPY ./digest /app/code/digest +COPY ./schemas /app/code/schemas + +CMD ["gunicorn", "digest.app:server", "-b", "0.0.0.0:8050", "--workers", "4", "--threads", "2"] diff --git a/README.md b/README.md index 8dbe5e5..c844e66 100644 --- a/README.md +++ b/README.md @@ -38,6 +38,25 @@ In brief, generating a `bagel.csv` for your dataset can be as simple as: - To see help text for this script: `python run_tracker.py --help` - This step can be repeated as needed to update the `bagel.csv` with newly processed subjects +## Running in a Docker container + +1. To get the most recent changes, pull the `neurobagel/digest` docker image tagged `nightly`: +```bash +docker pull neurobagel/digest:nightly +``` + +2. Currently, `digest` also relies on a local copy of the [`nipoppy-qpn`](https://github.com/neurodatascience/nipoppy-qpn) repository, which contains ready-to-use `digest` files that are automatically generated for the Quebec Parkinson Network data. +``` +git clone https://github.com/neurodatascience/nipoppy-qpn.git +``` + +3. Run `digest` and mount the `nipoppy-qpn` directory into the container: +```bash +docker run -d -p 8050:8050 -v ${PWD}/nipoppy-qpn:/app/nipoppy-qpn neurobagel/digest:nightly +``` + +Now, the dashboard can be accessed at http://127.0.0.1:8050 on your local machine. + ## Local development To install `digest` from the source repository, run the following in a Python environment: ```bash diff --git a/digest/app.py b/digest/app.py index 40d69dc..a4b7911 100644 --- a/digest/app.py +++ b/digest/app.py @@ -59,7 +59,7 @@ def toggle_dataset_name_dialog( return not dialog_is_open, DEFAULT_DATASET_NAME, None # If the user loaded a preset file, do not open the dataset name modal, and get the name of the dataset - # from the preset dataset dictionary insteaad, based on the matching filename. + # from the preset dataset dictionary instead, based on the matching filename. for available_dataset in util.PUBLIC_DIGEST_FILE_PATHS.values(): relevant_digest_path = available_dataset.get( parsed_data.get("type")