diff --git a/.env.example b/.env.example index 92da2ba39..b3c946538 100644 --- a/.env.example +++ b/.env.example @@ -1,20 +1,39 @@ +COMPOSE_FILE=docker-compose.yml:docker-compose.override.local.yml + DEBUG=1 QFIELDCLOUD_HOST=localhost DJANGO_SETTINGS_MODULE=qfieldcloud.settings +DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 0.0.0.0 SECRET_KEY=change_me +LETSENCRYPT_EMAIL=info@opengis.ch +LETSENCRYPT_RSA_KEY_SIZE=4096 +# Set to 1 if you're testing your setup to avoid hitting request limits +LETSENCRYPT_STAGING=1 + STORAGE_ACCESS_KEY_ID=minioadmin STORAGE_SECRET_ACCESS_KEY=minioadmin STORAGE_BUCKET_NAME=qfieldcloud-local STORAGE_REGION_NAME= -# Internal URL to the storage endpoint (from python code) -STORAGE_ENDPOINT_URL=http://s3:9000 -# Public URL to the storage endpoint (external storage should be equivalent to STORAGE_ENDPOINT_URL, local development only, no trailing slash) -STORAGE_ENDPOINT_URL_EXTERNAL=http://localhost:80/minio -# Public port to the storage endpoint browser (local development only) -STORAGE_BROWSER_PORT=8010 + +# URL to the storage endpoint either minio, or external (e.g. S3). +# The URL must be reachable both from within docker and from the host, the default value is the `bridge` docker URL. +# Read more on https://docs.docker.com/network/network-tutorial-standalone/ . +# NOTE: to use minio on windows/mac, change the value to "http://host.docker.internal:8009" +# DEFAULT: http://172.17.0.1:8009 +STORAGE_ENDPOINT_URL=http://172.17.0.1:8009 + +# Public port to the minio API endpoint. It must match the configured port in `STORAGE_ENDPOINT_URL`. +# NOTE: active only when minio is the configured as storage endpoint. Mostly for local development. +# DEFAULT: 8009 +MINIO_API_PORT=8009 + +# Public port to the minio browser endpoint. +# NOTE: active only when minio is the configured as storage endpoint. Mostly for local development. +# DEFAULT: 8010 +MINIO_BROWSER_PORT=8010 WEB_HTTP_PORT=80 WEB_HTTPS_PORT=443 @@ -24,6 +43,8 @@ POSTGRES_PASSWORD=3shJDd2r7Twwkehb POSTGRES_DB=qfieldcloud_db POSTGRES_HOST=db POSTGRES_PORT=5432 +# "prefer" OR "require" most of the times +POSTGRES_SSLMODE=prefer HOST_POSTGRES_PORT=5433 GEODB_HOST=geodb @@ -32,9 +53,6 @@ GEODB_USER=postgres GEODB_PASSWORD=KUAa7h!G&wQEmkS3 GEODB_DB=postgres -CADDY_ACME_CA=https://acme-staging-v02.api.letsencrypt.org/directory -CADDY_IMPORT_GLOB=(*(N)) - SENTRY_DSN= REDIS_PASSWORD=change_me_with_a_very_loooooooooooong_password @@ -56,7 +74,13 @@ COMPOSE_PROJECT_NAME=qfieldcloud QFIELDCLOUD_DEFAULT_NETWORK=qfieldcloud_default QFIELDCLOUD_ADMIN_URI=admin/ +# Timeout in seconds to wait for a job container to finish, otherwise terminate it. +QFIELDCLOUD_WORKER_TIMEOUT_S=60 + GUNICORN_TIMEOUT_S=300 GUNICORN_MAX_REQUESTS=300 GUNICORN_WORKERS=3 GUNICORN_THREADS=3 + +# required for making COMPOSE_FILE above cross-platform (do not change) +COMPOSE_PATH_SEPARATOR=: diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 52fcec698..5674967ea 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -11,6 +11,6 @@ jobs: name: Backport steps: - name: Backport - uses: m-kuhn/backport@v1.2.3 + uses: m-kuhn/backport@v1.2.5 with: - github_token: ${{ secrets.GITHUB_TOKEN }} + github_token: ${{ secrets.NYUKI_TOKEN }} diff --git a/.github/workflows/build.yml b/.github/workflows/build_and_push.yml similarity index 81% rename from .github/workflows/build.yml rename to .github/workflows/build_and_push.yml index ea7777b77..877f70418 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build_and_push.yml @@ -3,7 +3,7 @@ on: push: branches: - master - - build-containers-with-github-actions + - dispatch_deploy_after_build tags: - "v*.*.*" jobs: @@ -44,25 +44,6 @@ jobs: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - # Caddy - - name: Docker Test Caddy - id: docker_test_caddy - uses: docker/build-push-action@v2 - with: - builder: ${{ steps.buildx.outputs.name }} - context: ./docker-caddy - file: ./docker-caddy/Dockerfile - - - name: Docker Build and Push Caddy - id: docker_build_and_push_caddy - uses: docker/build-push-action@v2 - with: - builder: ${{ steps.buildx.outputs.name }} - context: ./docker-caddy - file: ./docker-caddy/Dockerfile - push: ${{ github.event_name != 'pull_request' }} - tags: opengisch/qfieldcloud-caddy:${{ steps.prepare.outputs.tag }} - # Application - name: Docker Test Application id: docker_test_application @@ -119,3 +100,11 @@ jobs: file: ./docker-qgis/Dockerfile push: ${{ github.event_name != 'pull_request' }} tags: opengisch/qfieldcloud-qgis:${{ steps.prepare.outputs.tag }} + + - name: Trigger deployment on private repository + uses: peter-evans/repository-dispatch@v1 + with: + token: ${{ secrets.GIT_ACCESS_TOKEN }} + repository: opengisch/qfieldcloud-private + event-type: public_dispatch + client-payload: '{"version": "${{ steps.prepare.outputs.tag }}"}' diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 000000000..0eab868ef --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,20 @@ +name: 👓 Close stale issues +on: + schedule: + - cron: "30 1 * * *" + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v4 + with: + repo-token: ${{ secrets.NYUKI_TOKEN }} + stale-issue-message: | + The QFieldCloud project highly values your report and would love to see it addressed. However, this issue has been left in feedback mode for the last 14 days and is being automatically marked as "stale". If you would like to continue with this issue, please provide any missing information or answer any open questions. If you could resolve the issue yourself meanwhile, please leave a note for future readers with the same problem and close the issue. + In case you should have any uncertainty, please leave a comment and we will be happy to help you proceed with this issue. + If there is no further activity on this issue, it will be closed in a week. + stale-issue-label: 'stale' + only-labels: 'feedback' + days-before-stale: 14 + days-before-close: 7 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0d3513206..ac75380ec 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,9 +1,11 @@ name: Test -on: push +on: + - push + - pull_request jobs: test: name: Code check and tests - runs-on: ubuntu-18.04 + runs-on: ubuntu-20.04 steps: - name: Checkout repo uses: actions/checkout@v2 @@ -12,24 +14,21 @@ jobs: - name: Setup Python uses: actions/setup-python@v1 with: - python-version: 3.8 + python-version: '3.10' - name: Install pipenv run: pip install pipenv + - name: Check code formatting + uses: pre-commit/action@v2.0.3 - name: Prepare docker-compose override file run: | ln -s docker-compose.override.local.yml docker-compose.override.yml - - name: Check code formatting - run: | - pipenv install pre_commit - pipenv run python -m pre_commit run --all-files - - name: Check environment variables + - name: Check env vars coniguration run: | - pipenv install pyyaml - pipenv run python .github/check_envvars/check_envvars.py .env.example --docker-compose-dir . + scripts/check_envvars.sh - name: Export the env variables file run: | cp .env.example .env - export $(egrep -v '^#' .env | xargs) + eval $(egrep "^[^#;]" .env | xargs -d'\n' -n1 | sed -E 's/(\w+)=(.*)/export \1='"'"'\2'"'"'/g') - name: Pull docker containers run: docker-compose pull - name: Build and run docker containers diff --git a/.gitignore b/.gitignore index 2dad1b4c2..f2d032161 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ __pycache__/ .env docker-compose.override.yml client/projects +conf/nginx/certs/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 867574959..1caa0f1df 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: # Sort imports - repo: https://github.com/pycqa/isort - rev: "5.7.0" + rev: "5.10.1" hooks: - id: isort args: ["--profile", "black"] diff --git a/README.md b/README.md index 254ef0aee..1d449c698 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ QFieldCloud allows seamless synchronization of your field data with your spatial # Hosted solution If you're interested in quickly getting up and running, we suggest subscribing to the version hosted by OPENGIS.ch at https://qfield.cloud. This is also the instance that is integrated by default into QField. -QFieldCloud logo +QFieldCloud logo ## Documentation @@ -20,6 +20,17 @@ Permissions documentation is [here](https://github.com/opengisch/qfieldcloud/blo ## Development +### Clone the repository + +Clone the repository and all its submodules: + + git clone --recurse-submodules git://github.com/opengisch/qfieldcloud.git + +To fetch upstream development, don't forget to update the submodules too: + + git pull --recurse-submodules && git submodule update --recursive + + ### Launch a local instance Copy the `.env.example` into `.env` file and configure it to your @@ -28,16 +39,12 @@ desire with a good editor: cp .env.example .env emacs .env -Link or copy `docker-compose.override.local.yml` into `docker-compose.override.yml`: - - ln -s docker-compose.override.local.yml docker-compose.override.yml - To build development images and run the containers: docker-compose up -d --build -It will read `docker-compose.yml` and `docker-compose.override.yml` -and start a django built-in server at `http://localhost:8000`. +It will read the `docker-compose*.yml` files specified in the `COMPOSE_FILE` +variable and start a django built-in server at `http://localhost:8000`. Run the django database migrations. @@ -67,6 +74,90 @@ To run only a test module (e.g. `test_permission.py`) docker-compose run app python manage.py test qfieldcloud.core.tests.test_permission +### Debugging + +> This section gives examples for VSCode, please adapt to your IDE) + +If using the provided docker-compose overrides for developement, `debugpy` is installed. + +You can debug interactively by adding this snipped anywhere in the code. +```python +import debugpy +debugpy.listen(("0.0.0.0", 5678)) +print("debugpy waiting for debugger... 🐛") +debugpy.wait_for_client() # optional +``` + +Or alternativley, prefix your commands with `python -m debugpy --listen 0.0.0.0:5678 --wait-for-client`. +```shell +docker-compose run app -p 5678:5678 python -m debugpy --listen 0.0.0.0:5678 --wait-for-client manage.py test +docker-compose run worker_wrapper -p 5679:5679 python -m debugpy --listen 0.0.0.0:5679 --wait-for-client manage.py test +``` + +Then, configure your IDE to connect (example given for VSCode's `.vscode/launch.json`, triggered with `F5`): +``` +{ + "version": "0.2.0", + "configurations": [ + { + "name": "QFC debug app", + "type": "python", + "request": "attach", + "justMyCode": false, + "connect": {"host": "localhost", "port": 5678}, + "pathMappings": [{ + "localRoot": "${workspaceFolder}/docker-app/qfieldcloud", + "remoteRoot": "/usr/src/app/qfieldcloud" + }] + }, + { + "name": "QFC debug worker_wrapper", + "type": "python", + "request": "attach", + "justMyCode": false, + "connect": {"host": "localhost", "port": 5679}, + "pathMappings": [{ + "localRoot": "${workspaceFolder}/docker-app/qfieldcloud", + "remoteRoot": "/usr/src/app/qfieldcloud" + }] + } + ] +} +``` + + +## Add root certificate + +QFieldCloud will automatically generate a certificate and it's root certificate in `./config/nginx/certs`. However, you need to trust the root certificate first, so other programs (e.g. curl) can create secure connection to the local QFieldCloud instance. + +On Debian/Ubuntu, copy the root certificate to the directory with trusted certificates. Note the extension has been changed to `.crt`: + + sudo cp ./conf/nginx/certs/rootCA.pem /usr/local/share/ca-certificates/rootCA.crt + +Trust the newly added certificate: + + sudo update-ca-certificates + +Connecting with `curl` should return no errors: + curl https://localhost:8002/ + +### Remove the root certificate + +If you want to remove or change the root certificate, you need to remove the root certificate file and refresh the list of certificates: + + sudo rm /usr/local/share/ca-certificates/rootCA.crt + sudo update-ca-certificates --fresh + +Now connecting with `curl` should fail with a similar error: + + $ curl https://localhost:8002/ + + curl: (60) SSL certificate problem: unable to get local issuer certificate + More details here: https://curl.haxx.se/docs/sslcerts.html + + curl failed to verify the legitimacy of the server and therefore could not + establish a secure connection to it. To learn more about this situation and + how to fix it, please visit the web page mentioned above. ### Code style @@ -102,34 +193,56 @@ desire with a good editor cp .env.example .env emacs .env +Do not forget to set DEBUG=0 and to adapt COMPOSE_FILE to not load local +development configurations. + Create the directory for qfieldcloud logs and supervisor socket file mkdir /var/local/qfieldcloud Run and build the docker containers - # dev server: - docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml up -d --build - - # prod server - docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml up -d --build + docker-compose up -d --build Run the django database migrations - docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml exec app python manage.py migrate + docker-compose exec app python manage.py migrate + + +## Create a certificate using Let's Encrypt + +If you are running the server on a server with a public domain, you can install Let's Encrypt certificate by running the following command: + + ./scripts/init_letsencrypt.sh +Note you may want to change the `LETSENCRYPT_EMAIL`, `LETSENCRYPT_RSA_KEY_SIZE` and `LETSENCRYPT_STAGING` variables. ### Infrastructure Based on this example +### Ports + +| service | port | configuration | local | development | production | +|---------------|------|----------------------|--------------------|--------------------|--------------------| +| nginx http | 80 | WEB_HTTP_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| nginx https | 443 | WEB_HTTPS_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| django http | 5001 | | :white_check_mark: | :x: | :x: | +| postgres | 5433 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| redis | 6379 | REDIS_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| geodb | 5432 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :x: | +| minio API | 8009 | MINIO_API_PORT | :white_check_mark: | :x: | :x: | +| minio browser | 8010 | MINIO_BROWSER_PORT | :white_check_mark: | :x: | :x: | +| smtp web | 5000 | | :white_check_mark: | :x: | :x: | +| smtp | 25 | | :white_check_mark: | :x: | :x: | +| imap | 143 | | :white_check_mark: | :x: | :x: | ### Logs Docker logs are managed by docker in the default way. To read the logs: - docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml logs + docker-compose logs ### Geodb diff --git a/conf/nginx/options-ssl-nginx.conf b/conf/nginx/options-ssl-nginx.conf new file mode 100644 index 000000000..978e6e8ab --- /dev/null +++ b/conf/nginx/options-ssl-nginx.conf @@ -0,0 +1,14 @@ +# This file contains important security parameters. If you modify this file +# manually, Certbot will be unable to automatically provide future security +# updates. Instead, Certbot will print and log an error message with a path to +# the up-to-date file that you will need to refer to when manually updating +# this file. + +ssl_session_cache shared:le_nginx_SSL:10m; +ssl_session_timeout 1440m; +ssl_session_tickets off; + +ssl_protocols TLSv1.2 TLSv1.3; +ssl_prefer_server_ciphers off; + +ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384"; diff --git a/conf/nginx/pages/403.html b/conf/nginx/pages/403.html new file mode 100644 index 000000000..0da8e2cd1 --- /dev/null +++ b/conf/nginx/pages/403.html @@ -0,0 +1,12 @@ + + + + + + + 403 Forbidden + + +

403 Forbidden

+ + diff --git a/conf/nginx/pages/404.html b/conf/nginx/pages/404.html new file mode 100644 index 000000000..8aa91d829 --- /dev/null +++ b/conf/nginx/pages/404.html @@ -0,0 +1,12 @@ + + + + + + + 404 Not Found + + +

404 Not Found

+ + diff --git a/conf/nginx/pages/500.html b/conf/nginx/pages/500.html new file mode 100644 index 000000000..02645e68b --- /dev/null +++ b/conf/nginx/pages/500.html @@ -0,0 +1,12 @@ + + + + + + + 500 Internal Server Error + + +

500 Internal Server Error

+ + diff --git a/conf/nginx/ssl-dhparams.pem b/conf/nginx/ssl-dhparams.pem new file mode 100644 index 000000000..9b182b720 --- /dev/null +++ b/conf/nginx/ssl-dhparams.pem @@ -0,0 +1,8 @@ +-----BEGIN DH PARAMETERS----- +MIIBCAKCAQEA//////////+t+FRYortKmq/cViAnPTzx2LnFg84tNpWp4TZBFGQz ++8yTnc4kmz75fS/jY2MMddj2gbICrsRhetPfHtXV/WVhJDP1H18GbtCFY2VVPe0a +87VXE15/V8k1mE8McODmi3fipona8+/och3xWKE2rec1MKzKT0g6eXq8CrGCsyT7 +YdEIqUuyyOP7uWrat2DX9GgdT0Kj3jlN9K5W7edjcrsZCwenyO4KbXCeAvzhzffi +7MA0BM0oNC9hkXL+nOmFg/+OTxIy7vKBg8P+OxtMb61zO7X8vC7CIAXFjvGDfRaD +ssbzSibBsu/6iGtCOGEoXJf//////////wIBAg== +-----END DH PARAMETERS----- diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template new file mode 100644 index 000000000..a96f9c83f --- /dev/null +++ b/conf/nginx/templates/default.conf.template @@ -0,0 +1,113 @@ +log_format upstreamlog '[$time_local] $remote_addr - $remote_user ' + 'to: $upstream_addr "$request" $status $body_bytes_sent ' + '"$http_referer" "$http_user_agent"' + 'rt=$request_time uct="$upstream_connect_time" uht="$upstream_header_time" urt="$upstream_response_time"'; + +upstream django { + server app:8000 fail_timeout=0; +} + +server { + listen 80; + server_name ${QFIELDCLOUD_HOST}; + + # cerbot + location /.well-known/acme-challenge/ { + root /var/www/certbot; + } + + # prevent access by IP + if ($http_host != "${QFIELDCLOUD_HOST}") { + return 444; + } + + # use $WEB_HTTPS_PORT in the configuration, since it might be different from 443 (e.g. localhost) + location / { + return 302 https://$host:${WEB_HTTPS_PORT}$request_uri; + } +} + +server { + listen 443 ssl; + + ssl_certificate certs/${QFIELDCLOUD_HOST}.pem; + ssl_certificate_key certs/${QFIELDCLOUD_HOST}-key.pem; + + server_name ${QFIELDCLOUD_HOST}; + client_max_body_size 10G; + keepalive_timeout 5; + + # path for static files (only needed for serving local staticfiles) + root /var/www/html/; + + # upgrade to https + error_page 497 https://$host:${WEB_HTTPS_PORT}$request_uri; + + # prevent access by IP + if ($http_host != "${QFIELDCLOUD_HOST}") { + return 444; + } + + # deny annoying bot + deny 34.215.13.216; + + # checks for static file, if not found proxy to app + location / { + try_files $uri @proxy_to_app; + } + + location @proxy_to_app { + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Host $http_host; + + proxy_read_timeout 300; + proxy_send_timeout 300; + + proxy_redirect off; + proxy_pass http://django; + } + + location /storage-download/ { + # Only allow internal redirects + internal; + + access_log /var/log/nginx/access.log upstreamlog; + set $redirect_uri "$upstream_http_redirect_uri"; + + # required DNS + resolver 8.8.8.8; + + # Stops the local disk from being written to (just forwards data through) + proxy_max_temp_file_size 0; + proxy_buffering off; + + # does not work with S3 otherwise + proxy_ssl_server_name on; + + # remove the authorization and the cookie headers + proxy_set_header Authorization ''; + proxy_set_header Cookie ''; + + # hide Object Storage related headers + proxy_hide_header Access-Control-Allow-Credentials; + proxy_hide_header Access-Control-Allow-Headers; + proxy_hide_header Access-Control-Allow-Methods; + proxy_hide_header Access-Control-Allow-Origin; + proxy_hide_header Access-Control-Expose-Headers; + proxy_hide_header X-Amz-Meta-Sha256sum; + proxy_hide_header X-Amz-Req-Time-Micros; + proxy_hide_header X-Amz-Request-Id; + proxy_hide_header X-Amz-Storage-Class; + proxy_hide_header X-Amz-Version-Id; + + proxy_intercept_errors on; + + proxy_pass $redirect_uri; + + error_page 404 =404 /pages/404.html; + error_page 403 =403 /pages/403.html; + error_page 401 402 405 406 407 408 409 410 411 412 413 414 415 416 417 500 501 502 503 504 505 =500 /pages/500.html; + } + +} diff --git a/dev-start-docker-up-and-sass.sh b/dev-start-docker-up-and-sass.sh deleted file mode 100755 index 78a85debc..000000000 --- a/dev-start-docker-up-and-sass.sh +++ /dev/null @@ -1,2 +0,0 @@ -# following command starts up local containers and then starts sass processer which compiles scss files to css. -docker-compose up -d && sass --watch docker-app/qfieldcloud/core/web/staticfiles/scss/qfieldcloud.scss docker-app/qfieldcloud/core/web/staticfiles/css/qfieldcloud.css diff --git a/docker-app/Dockerfile b/docker-app/Dockerfile index 4f3ca8e12..09e5994a8 100644 --- a/docker-app/Dockerfile +++ b/docker-app/Dockerfile @@ -14,12 +14,16 @@ ENV PYTHONUNBUFFERED 1 # install dependencies RUN apt update \ - && apt install -y netcat-openbsd + && apt install -y netcat python3-gdal # install dependencies COPY ./requirements.txt . RUN pip install -r requirements.txt +# install debug dependencies +ARG DEBUG_BUILD +RUN if [ "$DEBUG_BUILD" = "1" ]; then pip install debugpy; fi + # copy project COPY . . diff --git a/docker-app/qfieldcloud/authentication/models.py b/docker-app/qfieldcloud/authentication/models.py index 71ebd31d7..8fceb6980 100644 --- a/docker-app/qfieldcloud/authentication/models.py +++ b/docker-app/qfieldcloud/authentication/models.py @@ -40,17 +40,22 @@ def guess_client_type(user_agent: str) -> ClientType: if user_agent.startswith("cli|"): return AuthToken.ClientType.CLI - if re.search(r" QGIS/[34]\d{4}$", user_agent): + if re.search(r" QGIS/[34]\d{4}(\/.*)?$", user_agent): return AuthToken.ClientType.QFIELDSYNC if re.search( - r"Mozilla/5.0 .+ AppleWebKit/\d+.\d+ (KHTML, like Gecko)", user_agent + r"Mozilla\/5.0 .+(AppleWebKit\/\d+.\d+ \(KHTML, like Gecko\)|Firefox\/[\d\.]+)", + user_agent, ): return AuthToken.ClientType.BROWSER return AuthToken.ClientType.UNKNOWN - single_token_clients = [ClientType.QFIELD, ClientType.QFIELDSYNC] + single_token_clients = [ + ClientType.QFIELD, + ClientType.QFIELDSYNC, + ClientType.UNKNOWN, + ] user = models.ForeignKey( get_user_model(), diff --git a/docker-app/qfieldcloud/authentication/tests/__init__.py b/docker-app/qfieldcloud/authentication/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/docker-app/qfieldcloud/authentication/tests/test_authentication.py b/docker-app/qfieldcloud/authentication/tests/test_authentication.py new file mode 100644 index 000000000..a916133c5 --- /dev/null +++ b/docker-app/qfieldcloud/authentication/tests/test_authentication.py @@ -0,0 +1,204 @@ +import logging + +from django.utils.timezone import datetime, now +from qfieldcloud.authentication.models import AuthToken +from qfieldcloud.core.models import User +from rest_framework.test import APITransactionTestCase + +logging.disable(logging.CRITICAL) + + +class QfcTestCase(APITransactionTestCase): + def setUp(self): + # Create a user + self.user1 = User.objects.create_user(username="user1", password="abc123") + + def assertTokenMatch(self, token, payload): + expires_at = payload.pop("expires_at") + avatar_url = payload.pop("avatar_url") + self.assertDictEqual( + payload, + { + "token": token.key, + # "expires_at": tokens[0].expires_at.isoformat(), + "username": token.user.username, + "email": "", + "first_name": "", + "full_name": "", + "last_name": "", + "user_type": "1", + }, + ) + self.assertTrue(datetime.fromisoformat(expires_at) == token.expires_at) + self.assertTrue(datetime.fromisoformat(expires_at) > now()) + self.assertTrue(avatar_url is None or avatar_url.startswith("http")) + self.assertTrue( + avatar_url is None + or avatar_url.endswith( + f"/api/v1/files/public/users/{token.user.username}/avatar.svg" + ) + ) + + def login(self, username, password, user_agent=""): + response = self.client.post( + "/api/v1/auth/login/", + { + "username": username, + "password": password, + }, + HTTP_USER_AGENT=user_agent, + ) + + self.assertEqual(response.status_code, 200) + + return response + + def test_login_logout(self): + response = self.login("user1", "abc123") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 1) + self.assertTokenMatch(tokens[0], response.json()) + self.assertGreater(tokens[0].expires_at, now()) + + # set auth token + self.client.credentials(HTTP_AUTHORIZATION="Token " + tokens[0].key) + + # logout + response = self.client.post("/api/v1/auth/logout/") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEqual(response.status_code, 200) + + self.assertEquals(len(tokens), 1) + self.assertLess(tokens[0].expires_at, now()) + + def test_multiple_logins(self): + # first single active token login + response = self.login("user1", "abc123", "Mozilla/5.0 QGIS/32203") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 1) + self.assertTokenMatch(tokens[0], response.json()) + + # second single active token login + response = self.login("user1", "abc123", "Mozilla/5.0 QGIS/32203") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 2) + self.assertTokenMatch(tokens[0], response.json()) + self.assertNotEqual(tokens[0], tokens[1]) + self.assertGreater(tokens[0].expires_at, now()) + self.assertLess(tokens[1].expires_at, now()) + + # first single active token login + response = self.login("user1", "abc123", "sdk|py|dev python-requests|2.26.0") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 3) + self.assertTokenMatch(tokens[0], response.json()) + + # second single active token login + response = self.login("user1", "abc123", "sdk|py|dev python-requests|2.26.0") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 4) + self.assertTokenMatch(tokens[0], response.json()) + self.assertNotEqual(tokens[0], tokens[1]) + self.assertGreater(tokens[0].expires_at, now()) + self.assertGreater(tokens[1].expires_at, now()) + + def test_client_type(self): + # QFIELDSYNC login + response = self.login("user1", "abc123", "Mozilla/5.0 QGIS/32203") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.QFIELDSYNC) + + response = self.login( + "user1", "abc123", "Mozilla/5.0 QGIS/32400/Ubuntu 20.04.4 LTS" + ) + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.QFIELDSYNC) + + # SDK login + response = self.login("user1", "abc123", "sdk|py|dev python-requests|2.26.0") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.SDK) + + # BROWSER login + response = self.login( + "user1", + "abc123", + "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:97.0) Gecko/20100101 Firefox/97.0", + ) + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.BROWSER) + + response = self.login( + "user1", + "abc123", + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.105 Safari/537.36", + ) + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.BROWSER) + + # QFIELD login + response = self.login( + "user1", + "abc123", + "qfield|1.0.0|local - dev|3515ce8cba0f0e0abb92e06bf30a00531810656f| QGIS/31900", + ) + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.QFIELD) + + # UNKNOWN login + response = self.login("user1", "abc123", "Слава Україні!") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.UNKNOWN) + + def test_last_used_at(self): + response = self.login("user1", "abc123") + + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 1) + self.assertTokenMatch(tokens[0], response.json()) + self.assertIsNone(tokens[0].last_used_at) + + # set auth token + self.client.credentials(HTTP_AUTHORIZATION="Token " + tokens[0].key) + + # first token usage + response = self.client.get(f"/api/v1/users/{self.user1.username}/") + + self.assertEqual(response.status_code, 200) + + tokens = self.user1.auth_tokens.order_by("-created_at").all() + first_used_at = tokens[0].last_used_at + + self.assertEquals(len(tokens), 1) + + # second token usage + response = self.client.get(f"/api/v1/users/{self.user1.username}/") + + self.assertEqual(response.status_code, 200) + + tokens = self.user1.auth_tokens.order_by("-created_at").all() + second_used_at = tokens[0].last_used_at + + self.assertEquals(len(tokens), 1) + self.assertLess(first_used_at, second_used_at) diff --git a/docker-app/qfieldcloud/authentication/views.py b/docker-app/qfieldcloud/authentication/views.py index 8993490bc..cc5baeff8 100644 --- a/docker-app/qfieldcloud/authentication/views.py +++ b/docker-app/qfieldcloud/authentication/views.py @@ -88,7 +88,7 @@ def post(self, request, *args, **kwargs): def logout(self, request): try: now = timezone.now() - request.user.auth_token.filter(expired_at__gt=now).update(expired_at=now) + request.user.auth_tokens.filter(expires_at__gt=now).update(expires_at=now) except (AttributeError, ObjectDoesNotExist): pass diff --git a/docker-app/qfieldcloud/core/admin.py b/docker-app/qfieldcloud/core/admin.py index 8d9b2d22f..e8c24e8e6 100644 --- a/docker-app/qfieldcloud/core/admin.py +++ b/docker-app/qfieldcloud/core/admin.py @@ -16,10 +16,10 @@ ApplyJob, ApplyJobDelta, Delta, - ExportJob, Geodb, Organization, OrganizationMember, + PackageJob, ProcessProjectfileJob, Project, ProjectCollaborator, @@ -274,8 +274,28 @@ class ProjectAdmin(admin.ModelAdmin): "created_at", "updated_at", ) - fields = ("name", "description", "is_public", "owner", "storage_size") - readonly_fields = ("storage_size",) + fields = ( + "id", + "name", + "description", + "is_public", + "owner", + "storage_size", + "created_at", + "updated_at", + "data_last_updated_at", + "data_last_packaged_at", + "project_details__pre", + ) + readonly_fields = ( + "id", + "storage_size", + "created_at", + "updated_at", + "data_last_updated_at", + "data_last_packaged_at", + "project_details__pre", + ) inlines = (ProjectCollaboratorInline,) search_fields = ( "id", @@ -283,6 +303,12 @@ class ProjectAdmin(admin.ModelAdmin): "owner__username__iexact", ) + def project_details__pre(self, instance): + if instance.project_details is None: + return "" + + return format_pre_json(instance.project_details) + class DeltaInline(admin.TabularInline): model = ApplyJob.deltas_to_apply.through @@ -489,7 +515,7 @@ def response_change(self, request, delta): return super().response_change(request, delta) -class ExportJobAdmin(admin.ModelAdmin): +class PackageJobAdmin(admin.ModelAdmin): list_display = ( "id", "project__owner", @@ -740,7 +766,7 @@ def save_model(self, request, obj, form, change): admin.site.register(Project, ProjectAdmin) admin.site.register(Delta, DeltaAdmin) admin.site.register(ApplyJob, ApplyJobAdmin) -admin.site.register(ExportJob, ExportJobAdmin) +admin.site.register(PackageJob, PackageJobAdmin) admin.site.register(ProcessProjectfileJob, ProcessProjectfileJobAdmin) admin.site.register(Geodb, GeodbAdmin) diff --git a/docker-app/qfieldcloud/core/cron.py b/docker-app/qfieldcloud/core/cron.py index 9d63ec854..26e598c55 100644 --- a/docker-app/qfieldcloud/core/cron.py +++ b/docker-app/qfieldcloud/core/cron.py @@ -1,12 +1,22 @@ import logging +import os +from datetime import timedelta +from django.conf import settings +from django.utils import timezone from django_cron import CronJobBase, Schedule from invitations.utils import get_invitation_model +from sentry_sdk import capture_message +from ..core.models import Job from .invitations_utils import send_invitation logger = logging.getLogger(__name__) +QGIS_CONTAINER_NAME = os.environ.get("QGIS_CONTAINER_NAME", None) + +assert QGIS_CONTAINER_NAME + class DeleteExpiredInvitationsJob(CronJobBase): schedule = Schedule(run_every_mins=60) @@ -35,3 +45,34 @@ def do(self): logger.info( f'Resend {len(invitation_emails)} previously failed invitation(s) to: {", ".join(invitation_emails)}' ) + + +class SetTerminatedWorkersToFinalStatusJob(CronJobBase): + # arbitrary number 3 here, it just feel a good number since the configuration is 10 mins + schedule = Schedule(run_every_mins=3) + code = "qfieldcloud.set_terminated_workers_to_final_status" + + def do(self): + jobs = Job.objects.filter( + status__in=[Job.Status.QUEUED, Job.Status.STARTED], + # add extra seconds just to make sure a properly finished job properly updated the status. + started_at__lt=timezone.now() + - timedelta(seconds=settings.WORKER_TIMEOUT_S + 10), + ) + + for job in jobs: + capture_message( + f'Job "{job.id}" was with status "{job.status}", but worker container no longer exists. Job unexpectedly terminated.' + ) + + jobs.update( + status=Job.Status.FAILED, + finished_at=timezone.now(), + feedback={ + "error_stack": "", + "error": "Job unexpectedly terminated.", + "error_origin": "worker_wrapper", + "container_exit_code": -2, + }, + output="Job unexpectedly terminated.", + ) diff --git a/docker-app/qfieldcloud/core/exceptions.py b/docker-app/qfieldcloud/core/exceptions.py index 895b49aa8..3c9c96d19 100644 --- a/docker-app/qfieldcloud/core/exceptions.py +++ b/docker-app/qfieldcloud/core/exceptions.py @@ -126,11 +126,11 @@ class InvalidJobError(QFieldCloudException): status_code = status.HTTP_400_BAD_REQUEST -class QGISExportError(QFieldCloudException): - """Raised when the QGIS export of a project fails""" +class QGISPackageError(QFieldCloudException): + """Raised when the QGIS package of a project fails""" - code = "qgis_export_error" - message = "QGIS export failed" + code = "qgis_package_error" + message = "QGIS package failed" status_code = status.HTTP_500_INTERNAL_SERVER_ERROR if "Unable to open file with QGIS" in message: diff --git a/docker-app/qfieldcloud/core/logging/filters.py b/docker-app/qfieldcloud/core/logging/filters.py deleted file mode 100644 index 056d2a05a..000000000 --- a/docker-app/qfieldcloud/core/logging/filters.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging - -from django.http import HttpRequest - - -def skip_logging(func): - def wrapper(instance, *args, **kwargs): - request = instance.request - - if not isinstance(request, HttpRequest): - request = request._request - - request.skip_logging = True - - return func(instance, *args, **kwargs) - - return wrapper - - -class SkipLoggingFilter(logging.Filter): - def filter(self, record): - return not getattr(record, "skip_logging", False) - - def extra_from_record(self, record): - """Returns `extra` dict you passed to logger. - The `extra` keyword argument is used to populate the `__dict__` of - the `LogRecord`. - """ - return {attr_name: record.__dict__[attr_name] for attr_name in record.__dict__} diff --git a/docker-app/qfieldcloud/core/logging/formatters.py b/docker-app/qfieldcloud/core/logging/formatters.py index 81d4a0ebd..cbe18fbbb 100644 --- a/docker-app/qfieldcloud/core/logging/formatters.py +++ b/docker-app/qfieldcloud/core/logging/formatters.py @@ -1,8 +1,3 @@ -import datetime -import json -import logging -import traceback - import json_log_formatter from django.core.handlers.wsgi import WSGIRequest from django.core.serializers.json import DjangoJSONEncoder @@ -30,73 +25,6 @@ def to_json(self, record): return self.json_lib.dumps(record, cls=JsonEncoder) -class CustomisedRequestHumanFormatter(logging.Formatter): - def format(self, record): - record.getMessage() - extra = self.extra_from_record(record) - - created = extra.get("created") - if created: - created = datetime.datetime.fromtimestamp(created) - - request_headers = "\n" - for header, value in extra.get("request_headers", {}).items(): - request_headers += f" {header}: {value}\n" - - response_headers = "\n" - for header, value in extra.get("response_headers", {}).items(): - response_headers += f" {header}: {value}\n" - - request_body = ( - extra.get("request_body", "NO_REQUEST_BODY") or "EMPTY_REQUEST_BODY" - ) - if not isinstance(request_body, str): - request_body = json.dumps(request_body, indent=2, cls=JsonEncoder) - - response_body = ( - extra.get("response_body", "NO_RESPONSE_BODY") or "EMPTY_RESPONSE_BODY" - ) - if not isinstance(response_body, str): - response_body = json.dumps(response_body, indent=2, cls=JsonEncoder) - - python_exception = "" - if extra.get("exception"): - exception = extra.get("exception") - tb1 = traceback.TracebackException.from_exception(exception) - exception_str = " ".join(tb1.format()) - python_exception = f"""Exception (ERROR): - {exception_str} -""" - - return f""" -================================================================================ -| HTTP Request -================================================================================ -Request: {extra.get("request_method", "UNKNOWN_REQUEST_METHOD")} {extra.get("request_path", "UNKNOWN_REQUEST_PATH")} {extra.get("status_code", "UNKNOWN_STATUS_CODE")} -Time: {created}; relative - {extra.get("relativeCreated", "UNKNOWN_RELATIVE_CREATED")}; runtime - {extra.get("run_time", "UNKNOWN_RUN_TIME")} -Context: PID #{extra.get("process", "UNKNOWN_PID")}; thread #{extra.get("thread", "UNKNOWN_THREAD")} ({extra.get("threadName", "UNKNOWN_THREAD_NAME")}) -Request headers: {request_headers} -Request files: {", ".join(extra.get("files", [])) or "NO_FILES"} -Request payload: -------------------------------------------------------------------------------S -{request_body} -------------------------------------------------------------------------------E -{python_exception} -Response headers: {response_headers} -Response payload: -------------------------------------------------------------------------------S -{response_body} -------------------------------------------------------------------------------E - """ - - def extra_from_record(self, record): - """Returns `extra` dict you passed to logger. - The `extra` keyword argument is used to populate the `__dict__` of - the `LogRecord`. - """ - return {attr_name: record.__dict__[attr_name] for attr_name in record.__dict__} - - def json_default(obj): if isinstance(obj, WSGIRequest): return str(obj) diff --git a/docker-app/qfieldcloud/core/management/commands/dequeue.py b/docker-app/qfieldcloud/core/management/commands/dequeue.py index ef8204519..16c879986 100644 --- a/docker-app/qfieldcloud/core/management/commands/dequeue.py +++ b/docker-app/qfieldcloud/core/management/commands/dequeue.py @@ -2,6 +2,8 @@ import signal from time import sleep +from django.conf import settings +from django.contrib.contenttypes.models import ContentType from django.core.management.base import BaseCommand from django.db import transaction from django.db.models import Count, Q @@ -9,7 +11,7 @@ from qfieldcloud.core.utils2.db import use_test_db_if_exists from worker_wrapper.wrapper import ( DeltaApplyJobRun, - ExportJobRun, + PackageJobRun, ProcessProjectfileJobRun, ) @@ -41,6 +43,11 @@ def handle(self, *args, **options): while killer.alive: with use_test_db_if_exists(): + # the worker-wrapper caches outdated ContentType ids during tests since + # the worker-wrapper and the tests reside in different containers + if settings.DATABASES["default"]["NAME"].startswith("test_"): + ContentType.objects.clear_cache() + queued_job = None with transaction.atomic(): @@ -105,7 +112,7 @@ def run(self, job_id, *args, **options): def _run(self, job: Job): job_run_classes = { - Job.Type.EXPORT: ExportJobRun, + Job.Type.PACKAGE: PackageJobRun, Job.Type.DELTA_APPLY: DeltaApplyJobRun, Job.Type.PROCESS_PROJECTFILE: ProcessProjectfileJobRun, } diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py new file mode 100644 index 000000000..496814065 --- /dev/null +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -0,0 +1,43 @@ +from django.core.management.base import BaseCommand, CommandError +from qfieldcloud.core.models import Project +from qfieldcloud.core.utils2 import storage + + +class Command(BaseCommand): + """Runs purge_old_file_versions as a management command""" + + help = storage.purge_old_file_versions.__doc__ + + PROMPT_TXT = "This will purge old files for all projects. Rerun with --force, or type 'yes' to continue, or 'no' to cancel: " + + def add_arguments(self, parser): + parser.add_argument( + "--projects", + type=str, + help="Comma separated list of ids of projects to prune. If unset, will purge all projects", + ) + parser.add_argument( + "--force", + action="store_true", + help="Prevent confirmation prompt when purging all projects", + ) + + def handle(self, *args, **options): + + # Determine project ids to work on + proj_ids = options.get("projects") + + # Get the affected projects + if not proj_ids: + if options.get("force") is not True and input(Command.PROMPT_TXT) != "yes": + raise CommandError("Collecting static files cancelled.") + proj_instances = Project.objects.all() + else: + proj_instances = Project.objects.filter(pk__in=proj_ids.split(",")) + + # Iterate through projects + proj_instances = proj_instances.prefetch_related("owner__useraccount") + for proj_instance in proj_instances: + storage.purge_old_file_versions(proj_instance) + + print("done !") diff --git a/docker-app/qfieldcloud/core/migrations/0049_auto_20211117_1843.py b/docker-app/qfieldcloud/core/migrations/0049_auto_20211117_1843.py new file mode 100644 index 000000000..c842fd287 --- /dev/null +++ b/docker-app/qfieldcloud/core/migrations/0049_auto_20211117_1843.py @@ -0,0 +1,45 @@ +# Generated by Django 3.2.9 on 2021-11-17 17:43 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +def fill_in_last_apply_attempt_at(apps, schema_editor): + # Old values in output field of delta table where string instead of json + Delta = apps.get_model("core", "Delta") + ApplyJobDelta = apps.get_model("core", "ApplyJobDelta") + + for delta in Delta.objects.all(): + jobs_qs = ApplyJobDelta.objects.filter(delta=delta) + + if jobs_qs.count(): + job_delta = jobs_qs.latest("apply_job__started_at") + delta.last_apply_attempt_at = job_delta.apply_job.started_at + delta.last_apply_attempt_by = job_delta.apply_job.created_by + delta.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0048_useraccount_notifs_frequency"), + ] + + operations = [ + migrations.AddField( + model_name="delta", + name="last_apply_attempt_at", + field=models.DateTimeField(null=True), + ), + migrations.AddField( + model_name="delta", + name="last_apply_attempt_by", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.RunPython(fill_in_last_apply_attempt_at, migrations.RunPython.noop), + ] diff --git a/docker-app/qfieldcloud/core/migrations/0050_auto_20211118_1150.py b/docker-app/qfieldcloud/core/migrations/0050_auto_20211118_1150.py new file mode 100644 index 000000000..6576284d7 --- /dev/null +++ b/docker-app/qfieldcloud/core/migrations/0050_auto_20211118_1150.py @@ -0,0 +1,50 @@ +# Generated by Django 3.2.8 on 2021-10-27 09:50 + +from django.db import migrations, models + + +def rename_export_to_package(apps, schema_editor): + Job = apps.get_model("core", "Job") + Job.objects.filter(type="export").update(type="package") + + +def rename_package_to_export(apps, schema_editor): + Job = apps.get_model("core", "Job") + Job.objects.filter(type="package").update(type="export") + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0049_auto_20211117_1843"), + ] + + operations = [ + migrations.AddField( + model_name="project", + name="data_last_packaged_at", + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name="project", + name="data_last_updated_at", + field=models.DateTimeField(blank=True, null=True), + ), + migrations.RenameModel( + old_name="ExportJob", + new_name="PackageJob", + ), + migrations.AlterField( + model_name="job", + name="type", + field=models.CharField( + choices=[ + ("package", "Package"), + ("delta_apply", "Delta Apply"), + ("process_projectfile", "Process QGIS Project File"), + ], + max_length=32, + ), + ), + migrations.RunPython(rename_export_to_package, rename_package_to_export), + ] diff --git a/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py new file mode 100644 index 000000000..d3ec31356 --- /dev/null +++ b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py @@ -0,0 +1,128 @@ +# Generated by Django 3.2.9 on 2021-11-25 03:44 + +import django.contrib.gis.db.models.fields +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0050_auto_20211118_1150"), + ] + + operations = [ + migrations.AlterModelOptions( + name="packagejob", + options={ + "verbose_name": "Job: package", + "verbose_name_plural": "Jobs: package", + }, + ), + migrations.AddField( + model_name="delta", + name="new_geom", + field=django.contrib.gis.db.models.fields.GeometryField( + dim=4, null=True, srid=4326 + ), + ), + migrations.AddField( + model_name="delta", + name="old_geom", + field=django.contrib.gis.db.models.fields.GeometryField( + dim=4, null=True, srid=4326 + ), + ), + migrations.RunSQL( + r""" + WITH subquery AS ( + SELECT + id, + CASE + WHEN jsonb_extract_path_text(content, 'localLayerCrs') ~ '^EPSG:\d{1,10}$' + THEN + REGEXP_REPLACE(jsonb_extract_path_text(content, 'localLayerCrs'), '\D*', '', 'g')::int + ELSE + NULL + END AS srid + FROM core_delta + ) + UPDATE core_delta + SET + old_geom = + ST_Transform( + ST_SetSRID( + ST_Force2D( + ST_GeomFromText( + REPLACE( jsonb_extract_path_text(core_delta.content, 'old', 'geometry'), 'nan', '0' ) + ) + ), + subquery.srid + ), + 4326 + ), + new_geom = + ST_Transform( + ST_SetSRID( + ST_Force2D( + ST_GeomFromText( + REPLACE( jsonb_extract_path_text(core_delta.content, 'new', 'geometry'), 'nan', '0' ) + ) + ), + subquery.srid + ), + 4326 + ) + FROM subquery + WHERE core_delta.id = subquery.id + """, + migrations.RunSQL.noop, + ), + migrations.RunSQL( + r""" + CREATE FUNCTION core_delta_geom_trigger_func() + RETURNS trigger + AS + $$ + DECLARE + srid int; + BEGIN + SELECT CASE + WHEN jsonb_extract_path_text(NEW.content, 'localLayerCrs') ~ '^EPSG:\d{1,10}$' + THEN + REGEXP_REPLACE(jsonb_extract_path_text(NEW.content, 'localLayerCrs'), '\D*', '', 'g')::int + ELSE + NULL + END INTO srid; + NEW.old_geom := ST_Transform( ST_SetSRID( ST_Force2D( REPLACE( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'old', 'geometry') ), 'nan', '0' ) ), srid ), 4326 ); + NEW.new_geom := ST_Transform( ST_SetSRID( ST_Force2D( REPLACE( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'new', 'geometry') ), 'nan', '0' ) ), srid ), 4326 ); + RETURN NEW; + END; + $$ + LANGUAGE PLPGSQL + """, + r""" + DROP FUNCTION core_delta_geom_trigger_func(); + """, + ), + migrations.RunSQL( + r""" + CREATE TRIGGER core_delta_geom_update_trigger BEFORE UPDATE ON core_delta + FOR EACH ROW + WHEN (OLD.content IS DISTINCT FROM NEW.content) + EXECUTE FUNCTION core_delta_geom_trigger_func() + """, + r""" + DROP TRIGGER core_delta_geom_update_trigger ON core_delta; + """, + ), + migrations.RunSQL( + r""" + CREATE TRIGGER core_delta_geom_insert_trigger BEFORE INSERT ON core_delta + FOR EACH ROW + EXECUTE FUNCTION core_delta_geom_trigger_func() + """, + r""" + DROP TRIGGER core_delta_geom_insert_trigger ON core_delta + """, + ), + ] diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 289356e31..d959130a7 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -4,24 +4,23 @@ import uuid from datetime import timedelta from enum import Enum -from typing import Any, Iterable, Type +from typing import Iterable, List import qfieldcloud.core.utils2.storage +from auditlog.registry import auditlog from django.contrib.auth.models import AbstractUser, UserManager +from django.contrib.gis.db import models from django.core.exceptions import ValidationError from django.core.validators import RegexValidator -from django.db import models from django.db.models import Case, Exists, OuterRef, Q from django.db.models import Value as V from django.db.models import When from django.db.models.aggregates import Count from django.db.models.fields.json import JSONField -from django.db.models.signals import post_delete, post_save, pre_delete -from django.dispatch import receiver from django.urls import reverse_lazy from django.utils.translation import gettext as _ +from model_utils.managers import InheritanceManager from qfieldcloud.core import geodb_utils, utils, validators -from qfieldcloud.core.utils import get_s3_object_url from timezone_field import TimeZoneField # http://springmeblog.com/2018/how-to-implement-multiple-user-types-with-django/ @@ -343,7 +342,7 @@ def is_team(self): @property def full_name(self) -> str: - return f"{self.first_name} {self.last_name}" + return f"{self.first_name} {self.last_name}".strip() @property def username_with_full_name(self) -> str: @@ -358,12 +357,16 @@ def username_with_full_name(self) -> str: def has_geodb(self) -> bool: return hasattr(self, "geodb") + def save(self, *args, **kwargs): + created = self._state.adding + super().save(*args, **kwargs) + if created: + UserAccount.objects.create(user=self) -# Automatically create a UserAccount instance when a user is created. -@receiver(post_save, sender=User) -def create_account_for_user(sender, instance, created, **kwargs): - if created: - UserAccount.objects.create(user=instance) + def delete(self, *args, **kwargs): + if self.user_type != User.TYPE_TEAM: + qfieldcloud.core.utils2.storage.remove_user_avatar(self) + super().delete(*args, **kwargs) class UserAccount(models.Model): @@ -418,12 +421,15 @@ class UserAccount(models.Model): @property def avatar_url(self): if self.avatar_uri: - return get_s3_object_url(self.avatar_uri) + return reverse_lazy( + "public_files", + kwargs={"filename": self.avatar_uri}, + ) else: return None def __str__(self): - return self.TYPE_CHOICES[self.account_type][1] + return self.get_account_type_display() class Geodb(models.Model): @@ -486,6 +492,18 @@ def __str__(self): self.user.username, self.dbname, self.username ) + def save(self, *args, **kwargs): + created = self._state.adding + super().save(*args, **kwargs) + # Automatically create a role and database when a Geodb object is created. + if created: + geodb_utils.create_role_and_db(self) + + def delete(self, *args, **kwargs): + super().delete(*args, **kwargs) + # Automatically delete role and database when a Geodb object is deleted. + geodb_utils.delete_db_and_role(self.dbname, self.username) + class OrganizationQueryset(models.QuerySet): """Adds of_user(user) method to the organization's querysets, allowing to filter only organization related to that user. @@ -566,18 +584,6 @@ def with_roles(self, user): return self.get_queryset().with_roles(user) -# Automatically create a role and database when a Geodb object is created. -@receiver(post_save, sender=Geodb) -def create_geodb(sender, instance, created, **kwargs): - if created: - geodb_utils.create_role_and_db(instance) - - -@receiver(post_delete, sender=Geodb) -def delete_geodb(sender, instance, **kwargs): - geodb_utils.delete_db_and_role(instance.dbname, instance.username) - - class Organization(User): objects = OrganizationManager() @@ -597,21 +603,6 @@ def save(self, *args, **kwargs): return super().save(*args, **kwargs) -@receiver(post_save, sender=Organization) -def create_account_for_organization(sender, instance, created, **kwargs): - if created: - UserAccount.objects.create(user=instance) - - -@receiver(pre_delete, sender=User) -@receiver(pre_delete, sender=Organization) -def delete_user(sender: Type[User], instance: User, **kwargs: Any) -> None: - if instance.user_type == User.TYPE_TEAM: - return - - qfieldcloud.core.utils2.storage.remove_user_avatar(instance) - - class OrganizationMember(models.Model): class Roles(models.TextChoices): ADMIN = "admin", _("Admin") @@ -697,7 +688,10 @@ class Meta: ) def clean(self) -> None: - if not self.team.team_organization.members.filter(member=self.member): + if ( + self.team.team_organization.members.filter(member=self.member).count() == 0 + and self.team.team_organization.organization_owner != self.member + ): raise ValidationError( _("Cannot add team member that is not an organization member.") ) @@ -819,6 +813,12 @@ class Project(models.Model): The owner of a project is an Organization. """ + # NOTE the status is NOT stored in the db, because it might be refactored + class Status(models.TextChoices): + OK = "ok", _("Ok") + BUSY = "busy", _("Busy") + FAILED = "failed", _("Failed") + objects = ProjectQueryset.as_manager() _cache_files_count = None @@ -866,6 +866,11 @@ class Meta: ) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) + + # NOTE we can track only the file based layers, WFS, WMS, PostGIS etc are impossible to track + data_last_updated_at = models.DateTimeField(blank=True, null=True) + data_last_packaged_at = models.DateTimeField(blank=True, null=True) + overwrite_conflicts = models.BooleanField( default=True, help_text=_( @@ -879,7 +884,10 @@ class Meta: @property def thumbnail_url(self): if self.thumbnail_uri: - return get_s3_object_url(self.thumbnail_uri) + return reverse_lazy( + "project_metafiles", + kwargs={"projectid": self.id, "filename": self.thumbnail_uri[51:]}, + ) else: return None @@ -893,10 +901,34 @@ def __str__(self): return self.name + " (" + str(self.id) + ")" + " owner: " + self.owner.username def storage_size(self): + """Retrieves the storage size from S3""" return utils.get_s3_project_size(self.id) @property - def private(self): + def staticfile_dirs(self) -> List[str]: + """Returns a list of configured staticfile dirs for the project. + + Staticfile dir is a special directory in the QField infrastructure that holds static files + such as images, pdf etc. By default "DCIM" is considered a staticfile directory. + + TODO this function expects whether `staticfile_dirs` key in project_details. However, + neither the extraction from the projectfile, nor the configuration in QFieldSync are implemented. + + Returns: + List[str]: A list configured staticfile dirs for the project. + """ + staticfile_dirs = [] + + if self.project_details and self.project_details.get("staticfile_dirs"): + staticfile_dirs = self.project_details.get("staticfile_dirs", []) + + if not staticfile_dirs: + staticfile_dirs = ["DCIM"] + + return staticfile_dirs + + @property + def private(self) -> bool: # still used in the project serializer return not self.is_public @@ -915,11 +947,65 @@ def files_count(self): def users(self): return User.objects.for_project(self) + @property + def has_online_vector_data(self) -> bool: + # it's safer to assume there is an online vector layer + if not self.project_details: + return True + + layers_by_id = self.project_details.get("layers_by_id") + + # it's safer to assume there is an online vector layer + if layers_by_id is None: + return True + + has_online_vector_layers = False + + for layer_data in layers_by_id.values(): + if layer_data.get("type_name") == "VectorLayer" and not layer_data.get( + "filename", "" + ): + has_online_vector_layers = True + break + + return has_online_vector_layers + + @property + def can_repackage(self) -> bool: + return True + + @property + def needs_repackaging(self) -> bool: + if ( + not self.has_online_vector_data + and self.data_last_updated_at + and self.data_last_packaged_at + ): + # if all vector layers are file based and have been packaged after the last update, it is safe to say there are no modifications + return self.data_last_packaged_at < self.data_last_updated_at + else: + # if the project has online vector layers (PostGIS/WFS/etc) we cannot be sure if there are modification or not, so better say there are + return True + + @property + def status(self) -> Status: + # NOTE the status is NOT stored in the db, because it might be refactored + if ( + Job.objects.filter( + project=self, status__in=[Job.Status.QUEUED, Job.Status.STARTED] + ).count() + > 0 + ): + return Project.Status.BUSY + elif not self.project_filename: + return Project.Status.FAILED + else: + return Project.Status.OK -@receiver(pre_delete, sender=Project) -def delete_project(sender: Type[Project], instance: Project, **kwargs: Any) -> None: - if instance.thumbnail_uri: - qfieldcloud.core.utils2.storage.remove_project_thumbail(instance) + def delete(self, *args, **kwargs): + if self.thumbnail_uri: + qfieldcloud.core.utils2.storage.remove_project_thumbail(self) + super().delete(*args, **kwargs) class ProjectCollaborator(models.Model): @@ -1011,6 +1097,12 @@ class Status(models.TextChoices): ) last_feedback = JSONField(null=True) last_modified_pk = models.TextField(null=True) + last_apply_attempt_at = models.DateTimeField(null=True) + last_apply_attempt_by = models.ForeignKey( + User, + on_delete=models.CASCADE, + null=True, + ) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) created_by = models.ForeignKey( @@ -1018,6 +1110,8 @@ class Status(models.TextChoices): on_delete=models.CASCADE, related_name="uploaded_deltas", ) + old_geom = models.GeometryField(null=True, srid=4326, dim=4) + new_geom = models.GeometryField(null=True, srid=4326, dim=4) def __str__(self): return str(self.id) + ", project: " + str(self.project.id) @@ -1051,8 +1145,11 @@ def method(self): class Job(models.Model): + + objects = InheritanceManager() + class Type(models.TextChoices): - EXPORT = "export", _("Export") + PACKAGE = "package", _("Package") DELTA_APPLY = "delta_apply", _("Delta Apply") PROCESS_PROJECTFILE = "process_projectfile", _("Process QGIS Project File") @@ -1087,14 +1184,14 @@ def short_id(self): return str(self.id)[0:8] -class ExportJob(Job): +class PackageJob(Job): def save(self, *args, **kwargs): - self.type = self.Type.EXPORT + self.type = self.Type.PACKAGE return super().save(*args, **kwargs) class Meta: - verbose_name = "Job: export" - verbose_name_plural = "Jobs: export" + verbose_name = "Job: package" + verbose_name_plural = "Jobs: package" class ProcessProjectfileJob(Job): @@ -1140,3 +1237,17 @@ class ApplyJobDelta(models.Model): def __str__(self): return f"{self.apply_job_id}:{self.delta_id}" + + +auditlog.register(User) +auditlog.register(UserAccount) +auditlog.register(Organization) +auditlog.register(OrganizationMember) +auditlog.register(Team) +auditlog.register(TeamMember) +auditlog.register(Project) +auditlog.register(ProjectCollaborator) +auditlog.register(Delta) +auditlog.register(ProcessProjectfileJob) +auditlog.register(PackageJob) +auditlog.register(ApplyJob) diff --git a/docker-app/qfieldcloud/core/permissions_utils.py b/docker-app/qfieldcloud/core/permissions_utils.py index a7640d103..27100efda 100644 --- a/docker-app/qfieldcloud/core/permissions_utils.py +++ b/docker-app/qfieldcloud/core/permissions_utils.py @@ -1,5 +1,6 @@ from typing import List, Union +from deprecated import deprecated from qfieldcloud.core.models import ( Delta, Organization, @@ -202,6 +203,18 @@ def can_read_deltas(user: QfcUser, project: Project) -> bool: ) +def can_apply_pending_deltas_for_project(user: QfcUser, project: Project) -> bool: + return user_has_project_roles( + user, + project, + [ + ProjectCollaborator.Roles.ADMIN, + ProjectCollaborator.Roles.MANAGER, + ], + ) + + +@deprecated("Use `can_set_delta_status_for_project` instead") def can_apply_deltas(user: QfcUser, project: Project) -> bool: return user_has_project_roles( user, @@ -215,6 +228,7 @@ def can_apply_deltas(user: QfcUser, project: Project) -> bool: ) +@deprecated("Use `can_set_delta_status_for_project` instead") def can_overwrite_deltas(user: QfcUser, project: Project) -> bool: return user_has_project_roles( user, @@ -227,6 +241,32 @@ def can_overwrite_deltas(user: QfcUser, project: Project) -> bool: ) +def can_set_delta_status_for_project(user: QfcUser, project: Project) -> bool: + return user_has_project_roles( + user, + project, + [ + ProjectCollaborator.Roles.ADMIN, + ProjectCollaborator.Roles.MANAGER, + ], + ) + + +def can_set_delta_status(user: QfcUser, delta: Delta) -> bool: + if not can_set_delta_status_for_project(user, delta.project): + return False + + if delta.last_status not in ( + Delta.Status.PENDING, + Delta.Status.CONFLICT, + Delta.Status.NOT_APPLIED, + Delta.Status.ERROR, + ): + return False + + return True + + def can_create_delta(user: QfcUser, delta: Delta) -> bool: """Whether the user can store given delta.""" project: Project = delta.project @@ -249,6 +289,7 @@ def can_create_delta(user: QfcUser, delta: Delta) -> bool: return False +@deprecated("Use `can_set_delta_status` instead") def can_retry_delta(user: QfcUser, delta: Delta) -> bool: if not can_apply_deltas(user, delta.project): return False @@ -263,6 +304,7 @@ def can_retry_delta(user: QfcUser, delta: Delta) -> bool: return True +@deprecated("Use `can_set_delta_status` instead") def can_overwrite_delta(user: QfcUser, delta: Delta) -> bool: if not can_overwrite_deltas(user, delta.project): return False @@ -273,6 +315,7 @@ def can_overwrite_delta(user: QfcUser, delta: Delta) -> bool: return True +@deprecated("Use `can_set_delta_status` instead") def can_ignore_delta(user: QfcUser, delta: Delta) -> bool: if not can_apply_deltas(user, delta.project): return False @@ -362,7 +405,7 @@ def can_delete_collaborators(user: QfcUser, project: Project) -> bool: ) -def can_read_exportations(user: QfcUser, project: Project) -> bool: +def can_read_packages(user: QfcUser, project: Project) -> bool: return user_has_project_roles( user, project, diff --git a/docker-app/qfieldcloud/core/rest_utils.py b/docker-app/qfieldcloud/core/rest_utils.py index e5683088e..d6aceb9a7 100644 --- a/docker-app/qfieldcloud/core/rest_utils.py +++ b/docker-app/qfieldcloud/core/rest_utils.py @@ -1,5 +1,6 @@ import logging +from django.conf import settings from django.core import exceptions from qfieldcloud.core import exceptions as qfieldcloud_exceptions from rest_framework import exceptions as rest_exceptions @@ -9,6 +10,7 @@ def exception_handler(exc, context): + if isinstance(exc, qfieldcloud_exceptions.QFieldCloudException): pass elif isinstance(exc, rest_exceptions.AuthenticationFailed): @@ -24,19 +26,26 @@ def exception_handler(exc, context): elif isinstance(exc, exceptions.ValidationError): exc = qfieldcloud_exceptions.ValidationError(detail=str(exc)) else: + # When running tests, we rethrow the exception, so we get a full trace to + # help with debugging + if settings.IN_TEST_SUITE: + raise exc + logging.exception(exc) exc = qfieldcloud_exceptions.QFieldCloudException(detail=str(exc)) body = { "code": exc.code, "message": exc.message, - "debug": { + } + + if settings.DEBUG: + body["debug"] = { "view": str(context["view"]), "args": context["args"], "kwargs": context["kwargs"], "request": str(context["request"]), "detail": exc.detail, - }, - } + } logging.exception(exc) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index cddd7c0fe..83788a925 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -1,11 +1,18 @@ +import os +from typing import Optional + from django.contrib.auth import get_user_model +from django.contrib.sites.models import Site from qfieldcloud.authentication.models import AuthToken +from qfieldcloud.core import exceptions from qfieldcloud.core.models import ( + ApplyJob, Delta, - ExportJob, Job, Organization, OrganizationMember, + PackageJob, + ProcessProjectfileJob, Project, ProjectCollaborator, Team, @@ -16,6 +23,15 @@ User = get_user_model() +def get_avatar_url(user: User) -> Optional[str]: + if hasattr(user, "useraccount") and user.useraccount.avatar_url: + site = Site.objects.get_current() + port = os.environ.get("WEB_HTTPS_PORT") + port = f":{port}" if port != "443" else "" + return f"https://{site.domain}{port}{user.useraccount.avatar_url}" + return None + + class UserSerializer: class Meta: model = User @@ -62,6 +78,11 @@ class Meta: "is_public", "created_at", "updated_at", + "data_last_packaged_at", + "data_last_updated_at", + "can_repackage", + "needs_repackaging", + "status", "user_role", "user_role_origin", ) @@ -72,7 +93,7 @@ class CompleteUserSerializer(serializers.ModelSerializer): avatar_url = serializers.SerializerMethodField() def get_avatar_url(self, obj): - return obj.useraccount.avatar_url if hasattr(obj, "useraccount") else None + return get_avatar_url(obj) class Meta: model = User @@ -93,7 +114,7 @@ class PublicInfoUserSerializer(serializers.ModelSerializer): username_display = serializers.SerializerMethodField() def get_avatar_url(self, obj): - return obj.useraccount.avatar_url if hasattr(obj, "useraccount") else None + return get_avatar_url(obj) def get_username_display(self, obj): if obj.user_type == obj.TYPE_TEAM: @@ -131,7 +152,7 @@ def get_members(self, obj): ] def get_avatar_url(self, obj): - return obj.useraccount.avatar_url if hasattr(obj, "useraccount") else None + return get_avatar_url(obj) class Meta: model = Organization @@ -167,12 +188,12 @@ class Meta: class TokenSerializer(serializers.ModelSerializer): - username = serializers.StringRelatedField(source="user") + username = serializers.CharField(source="user.username") expires_at = serializers.DateTimeField() - user_type = serializers.StringRelatedField(source="user") - first_name = serializers.StringRelatedField(source="user") - last_name = serializers.StringRelatedField(source="user") - full_name = serializers.StringRelatedField(source="user") + user_type = serializers.CharField(source="user.user_type") + first_name = serializers.CharField(source="user.first_name") + last_name = serializers.CharField(source="user.last_name") + full_name = serializers.CharField(source="user.full_name") token = serializers.CharField(source="key") email = serializers.SerializerMethodField() avatar_url = serializers.SerializerMethodField() @@ -181,11 +202,7 @@ def get_email(self, obj): return obj.user.email def get_avatar_url(self, obj): - return ( - obj.user.useraccount.avatar_url - if hasattr(obj.user, "useraccount") - else None - ) + return get_avatar_url(obj.user) class Meta: model = AuthToken @@ -265,6 +282,7 @@ class Meta: class ExportJobSerializer(serializers.ModelSerializer): + # TODO layers used to hold information about layer validity. No longer needed. layers = serializers.SerializerMethodField() status = serializers.SerializerMethodField(initial="STATUS_ERROR") @@ -281,10 +299,15 @@ def get_layers(self, obj): if not obj.feedback: return None - steps = obj.feedback.get("steps", []) + if obj.status != Job.Status.FINISHED: + return None - if len(steps) > 2 and steps[1].get("stage", 1) == 2: - return steps[1]["outputs"]["layer_checks"] + if obj.feedback.get("feedback_version") == "2.0": + return obj.feedback["outputs"]["qgis_layers_data"]["layers_by_id"] + else: + steps = obj.feedback.get("steps", []) + if len(steps) > 2 and steps[1].get("stage", 1) == 2: + return steps[1]["outputs"]["layer_checks"] return None @@ -305,5 +328,126 @@ def get_status(self, obj): return "STATUS_ERROR" class Meta: - model = ExportJob + model = PackageJob fields = ("status", "layers", "output") + + +class JobMixin: + project_id = serializers.PrimaryKeyRelatedField(queryset=Project.objects.all()) + + def to_internal_value(self, data): + internal_data = super().to_internal_value(data) + internal_data["created_by"] = self.context["request"].user + internal_data["project"] = Project.objects.get(pk=data.get("project_id")) + + return internal_data + + def check_create_new_job(self): + ModelClass: Job = self.Meta.model + last_active_job = ( + ModelClass.objects.filter( + project=self.initial_data.get("project_id"), + status__in=[Job.Status.PENDING, Job.Status.QUEUED, Job.Status.STARTED], + ) + .only("id") + .order_by("-started_at", "-created_at") + .last() + ) + + # check if there are other jobs already active + if last_active_job: + raise exceptions.APIError("Job of this type is already running.") + + class Meta: + model = PackageJob + fields = ( + "id", + "created_at", + "created_by", + "finished_at", + "project_id", + "started_at", + "status", + "type", + "updated_at", + "feedback", + "output", + ) + + read_only_fields = ( + "id", + "created_at", + "created_by", + "finished_at", + "started_at", + "status", + "updated_at", + "feedback", + "output", + ) + + +class PackageJobSerializer(JobMixin, serializers.ModelSerializer): + def check_create_new_job(self): + super().check_create_new_job() + internal_value = self.to_internal_value(self.initial_data) + + if not internal_value["project"].project_filename: + raise exceptions.NoQGISProjectError() + + class Meta(JobMixin.Meta): + model = PackageJob + + +class ApplyJobSerializer(JobMixin, serializers.ModelSerializer): + class Meta(JobMixin.Meta): + model = ApplyJob + + +class ProcessProjectfileJobSerializer(JobMixin, serializers.ModelSerializer): + class Meta(JobMixin.Meta): + model = ProcessProjectfileJob + + +class JobSerializer(serializers.ModelSerializer): + def check_create_new_job(self): + return True + + def get_fields(self, *args, **kwargs): + fields = super().get_fields(*args, **kwargs) + request = self.context.get("request") + + if request and "job_id" not in request.parser_context.get("kwargs", {}): + fields.pop("output", None) + fields.pop("feedback", None) + fields.pop("layers", None) + + return fields + + class Meta: + model = Job + fields = ( + "id", + "created_at", + "created_by", + "finished_at", + "project_id", + "started_at", + "status", + "type", + "updated_at", + "feedback", + "output", + ) + read_only_fields = ( + "id", + "created_at", + "created_by", + "finished_at", + "started_at", + "status", + "updated_at", + "feedback", + "output", + ) + order_by = "-created_at" diff --git a/docker-app/qfieldcloud/core/tests/test_delta.py b/docker-app/qfieldcloud/core/tests/test_delta.py index b8bdd4ae6..6347dd2b4 100644 --- a/docker-app/qfieldcloud/core/tests/test_delta.py +++ b/docker-app/qfieldcloud/core/tests/test_delta.py @@ -4,12 +4,11 @@ import time import fiona -import requests import rest_framework -from django.http.response import HttpResponseRedirect +from django.http.response import FileResponse, HttpResponse from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core import utils -from qfieldcloud.core.models import Project, ProjectCollaborator, User +from qfieldcloud.core.models import Job, Project, ProjectCollaborator, User from rest_framework import status from rest_framework.test import APITransactionTestCase @@ -19,9 +18,6 @@ class QfcTestCase(APITransactionTestCase): - - DJANGO_BASE_URL = "http://localhost:8000/api/v1/" - def setUp(self): # Create a user self.user1 = User.objects.create_user(username="user1", password="abc123") @@ -55,22 +51,35 @@ def setUp(self): role=ProjectCollaborator.Roles.ADMIN, ) - def tearDown(self): - # Remove credentials - self.client.credentials() + def fail(self, msg: str, job: Job = None): + if job: + msg += f"\n\nOutput:\n================\n{job.output}\n================" + + if job.feedback: + if "error_stack" in job.feedback: + msg += "\n\nError:\n================" + for single_error_stack in job.feedback["error_stack"]: + msg += "\n" + msg += single_error_stack - @classmethod - def tearDownClass(cls): - # Remove all projects avoiding bulk delete in order to use - # the overridden delete() function in the model - for p in Project.objects.all(): - bucket = utils.get_s3_bucket() - prefix = utils.safe_join(f"projects/{p.id}/") - bucket.objects.filter(Prefix=prefix).delete() + msg += f" {job.feedback['error']}\n================" - p.delete() + feedback = json.dumps(job.feedback, indent=2, sort_keys=True) + msg += f"\n\nFeedback:\n================\n{feedback}\n================" + else: + msg += "\n\nFeedback: None" - User.objects.all().delete() + super().fail(msg) + + def assertHttpOk(self, response: HttpResponse): + try: + self.assertTrue( + rest_framework.status.is_success(response.status_code), response.json() + ) + except Exception: + self.assertTrue( + rest_framework.status.is_success(response.status_code), response.content + ) def upload_project_files(self, project) -> Project: # Verify the original geojson file @@ -563,14 +572,13 @@ def test_change_and_delete_pushed_only_features(self): def get_file_contents(self, project, filename): response = self.client.get(f"/api/v1/files/{project.id}/{filename}/") - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url) - self.assertTrue(status.is_success(response.status_code)) self.assertEqual(get_filename(response), filename) - return response.content + if isinstance(response, FileResponse): + return b"".join(response.streaming_content) + else: + return response.content def upload_deltas(self, project, delta_filename): delta_file = testdata_path(f"delta/deltas/{delta_filename}") @@ -626,11 +634,15 @@ def upload_and_check_deltas( self.assertIn(payload[idx]["status"], status) self.assertEqual(payload[idx]["created_by"], created_by) + job = Job.objects.filter(project=self.project1).latest("updated_at") + for _ in range(10): time.sleep(2) response = self.client.get(uri) + self.assertHttpOk(response) + payload = response.json() payload = sorted(payload, key=lambda k: k["id"]) @@ -641,7 +653,7 @@ def upload_and_check_deltas( break if payload[idx]["status"] in failing_status: - self.fail(f"Got failing status {payload[idx]['status']}") + self.fail(f"Got failing status {payload[idx]['status']}", job=job) return delta_id, status, created_by = final_value @@ -652,4 +664,4 @@ def upload_and_check_deltas( self.assertEqual(payload[idx]["created_by"], created_by) return - self.fail("Worker didn't finish") + self.fail("Worker didn't finish", job=job) diff --git a/docker-app/qfieldcloud/core/tests/test_geodb.py b/docker-app/qfieldcloud/core/tests/test_geodb.py index 65bf7c95d..c0c3ffd61 100644 --- a/docker-app/qfieldcloud/core/tests/test_geodb.py +++ b/docker-app/qfieldcloud/core/tests/test_geodb.py @@ -45,6 +45,3 @@ def test_create_db(self): ) conn.commit() - - def tearDown(self): - User.objects.all().delete() diff --git a/docker-app/qfieldcloud/core/tests/test_organization.py b/docker-app/qfieldcloud/core/tests/test_organization.py index e7c35694b..d08538682 100644 --- a/docker-app/qfieldcloud/core/tests/test_organization.py +++ b/docker-app/qfieldcloud/core/tests/test_organization.py @@ -30,10 +30,6 @@ def setUp(self): organization_owner=self.user1, ) - def tearDown(self): - # Remove credentials - self.client.credentials() - def test_list_members(self): # Set user2 as member of organization1 diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py new file mode 100644 index 000000000..509cbe0db --- /dev/null +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -0,0 +1,462 @@ +import json +import logging +import os +import tempfile +import time +from typing import List, Tuple + +import psycopg2 +from django.http import FileResponse +from django.utils import timezone +from qfieldcloud.authentication.models import AuthToken +from qfieldcloud.core.geodb_utils import delete_db_and_role +from qfieldcloud.core.models import Geodb, Job, Project, User +from rest_framework import status +from rest_framework.test import APITransactionTestCase + +from .utils import testdata_path + +logging.disable(logging.CRITICAL) + + +class QfcTestCase(APITransactionTestCase): + def setUp(self): + # Create a user + self.user1 = User.objects.create_user(username="user1", password="abc123") + + self.user2 = User.objects.create_user(username="user2", password="abc123") + + self.token1 = AuthToken.objects.get_or_create(user=self.user1)[0] + + # Create a project + self.project1 = Project.objects.create( + name="project1", is_public=False, owner=self.user1 + ) + + try: + delete_db_and_role("test", self.user1.username) + except Exception: + pass + + self.geodb = Geodb.objects.create( + user=self.user1, + dbname="test", + hostname="geodb", + port=5432, + ) + + self.conn = psycopg2.connect( + dbname="test", + user=os.environ.get("GEODB_USER"), + password=os.environ.get("GEODB_PASSWORD"), + host="geodb", + port=5432, + ) + + def tearDown(self): + self.conn.close() + + def upload_files( + self, + token: str, + project: Project, + files: List[Tuple[str, str]], + ): + self.client.credentials(HTTP_AUTHORIZATION=f"Token {token}") + for local_filename, remote_filename in files: + if not local_filename: + continue + + file = testdata_path(local_filename) + response = self.client.post( + f"/api/v1/files/{project.id}/{remote_filename}/", + {"file": open(file, "rb")}, + format="multipart", + ) + self.assertTrue(status.is_success(response.status_code)) + + def wait_for_project_ok_status(self, project: Project, wait_s: int = 30): + jobs = Job.objects.filter(project=project).exclude( + status__in=[Job.Status.FAILED, Job.Status.FINISHED] + ) + + if jobs.count() == 0: + return + + has_no_pending_jobs = False + for _ in range(wait_s): + if ( + Job.objects.filter(project=project, status=Job.Status.PENDING).count() + == 0 + ): + has_no_pending_jobs = True + + time.sleep(1) + + if not has_no_pending_jobs: + self.fail(f"Still pending jobs after waiting for {wait_s} seconds") + + for _ in range(wait_s): + project.refresh_from_db() + if project.status == Project.Status.OK: + return + if project.status == Project.Status.FAILED: + self.fail("Waited for ok status, but got failed") + return + + time.sleep(1) + + self.fail(f"Waited for ok status for {wait_s} seconds") + + def upload_files_and_check_package( + self, + token: str, + project: Project, + files: List[Tuple[str, str]], + expected_files: List[str], + job_create_error: Tuple[int, str] = None, + tempdir: str = None, + invalid_layers: List[str] = [], + ): + self.client.credentials(HTTP_AUTHORIZATION=f"Token {token}") + + self.upload_files(token, project, files) + + before_started_ts = timezone.now() + + response = self.client.post( + "/api/v1/jobs/", + { + "project_id": project.id, + "type": Job.Type.PACKAGE, + }, + ) + + if job_create_error: + self.assertEqual(response.status_code, job_create_error[0]) + self.assertEqual(response.json()["code"], job_create_error[1]) + return + else: + self.assertTrue(status.is_success(response.status_code)) + + job_id = response.json().get("id") + + # Wait for the worker to finish + for _ in range(20): + time.sleep(3) + response = self.client.get(f"/api/v1/jobs/{job_id}/") + payload = response.json() + + if payload["status"] == Job.Status.FINISHED: + response = self.client.get(f"/api/v1/packages/{project.id}/latest/") + package_payload = response.json() + + self.assertLess( + package_payload["packaged_at"], timezone.now().isoformat() + ) + self.assertGreater( + package_payload["packaged_at"], + before_started_ts.isoformat(), + ) + + sorted_downloaded_files = [ + f["name"] + for f in sorted(package_payload["files"], key=lambda k: k["name"]) + ] + sorted_expected_files = sorted(expected_files) + + self.assertListEqual(sorted_downloaded_files, sorted_expected_files) + + if tempdir: + for filename in expected_files: + response = self.client.get( + f"/api/v1/packages/{self.project1.id}/latest/files/project_qfield.qgs/" + ) + local_file = os.path.join(tempdir, filename) + + self.assertIsInstance(response, FileResponse) + + with open(local_file, "wb") as f: + for chunk in response.streaming_content: + f.write(chunk) + + for layer_id in package_payload["layers"]: + layer_data = package_payload["layers"][layer_id] + + if layer_id in invalid_layers: + self.assertFalse(layer_data["is_valid"], layer_id) + else: + self.assertTrue(layer_data["is_valid"], layer_id) + + return + elif payload["status"] == Job.Status.FAILED: + print( + "Job feedback:", + json.dumps( + Job.objects.get(id=job_id).feedback, sort_keys=True, indent=2 + ), + ) + self.fail("Worker failed with error") + + self.fail("Worker didn't finish") + + def test_list_files_for_qfield(self): + cur = self.conn.cursor() + cur.execute("CREATE TABLE point (id integer, geometry geometry(point, 2056))") + self.conn.commit() + cur.execute( + "INSERT INTO point(id, geometry) VALUES(1, ST_GeomFromText('POINT(2725505 1121435)', 2056))" + ) + self.conn.commit() + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/project2.qgs", "project.qgs"), + ("delta/points.geojson", "points.geojson"), + ], + expected_files=["data.gpkg", "project_qfield.qgs"], + ) + + def test_list_files_missing_qgis_project_file(self): + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/points.geojson", "points.geojson"), + ], + job_create_error=(400, "no_qgis_project"), + expected_files=[], + ) + + def test_project_never_packaged(self): + self.upload_files( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/project2.qgs", "project.qgs"), + ], + ) + + response = self.client.get(f"/api/v1/packages/{self.project1.id}/latest/") + + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json()["code"], "invalid_job") + + def test_download_file_for_qfield(self): + tempdir = tempfile.mkdtemp() + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ("delta/testdata.gpkg", "testdata.gpkg"), + ("delta/points.geojson", "points.geojson"), + ("delta/polygons.geojson", "polygons.geojson"), + ("delta/project.qgs", "project.qgs"), + ], + expected_files=[ + "data.gpkg", + "project_qfield.qgs", + ], + tempdir=tempdir, + ) + + local_file = os.path.join(tempdir, "project_qfield.qgs") + with open(local_file, "r") as f: + self.assertEqual( + f.readline().strip(), + "", + ) + + def test_list_files_for_qfield_broken_file(self): + self.upload_files( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/broken.qgs", "broken.qgs"), + ], + ) + + response = self.client.post( + "/api/v1/jobs/", + { + "project_id": self.project1.id, + "type": Job.Type.PACKAGE, + }, + ) + + self.assertTrue(status.is_success(response.status_code)) + job_id = response.json().get("id") + + # Wait for the worker to finish + for _ in range(10): + time.sleep(3) + response = self.client.get( + f"/api/v1/jobs/{job_id}/", + ) + if response.json()["status"] == "failed": + return + + self.fail("Worker didn't finish") + + def test_downloaded_file_has_canvas_name(self): + tempdir = tempfile.mkdtemp() + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ("delta/testdata.gpkg", "testdata.gpkg"), + ("delta/points.geojson", "points.geojson"), + ("delta/polygons.geojson", "polygons.geojson"), + ("delta/project.qgs", "project.qgs"), + ], + expected_files=[ + "data.gpkg", + "project_qfield.qgs", + ], + tempdir=tempdir, + ) + + local_file = os.path.join(tempdir, "project_qfield.qgs") + with open(local_file, "r") as f: + for line in f: + if 'name="theMapCanvas"' in line: + return + + def test_download_project_with_broken_layer_datasources(self): + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/points.geojson", "points.geojson"), + ( + "delta/project_broken_datasource.qgs", + "project_broken_datasource.qgs", + ), + ], + expected_files=[ + "data.gpkg", + "project_broken_datasource_qfield.qgs", + ], + invalid_layers=["surfacestructure_35131bca_337c_483b_b09e_1cf77b1dfb16"], + ) + + def test_needs_repackaging_without_online_vector(self): + self.project1.refresh_from_db() + # newly uploaded project should always need to be packaged at least once + self.assertTrue(self.project1.needs_repackaging) + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ("delta/testdata.gpkg", "testdata.gpkg"), + ("delta/points.geojson", "points.geojson"), + ("delta/polygons.geojson", "polygons.geojson"), + ("delta/project.qgs", "project.qgs"), + ], + expected_files=[ + "data.gpkg", + "project_qfield.qgs", + ], + ) + + self.project1.refresh_from_db() + # no longer needs repackaging since geopackage layers cannot change without deltas/reupload + self.assertFalse(self.project1.needs_repackaging) + + self.upload_files( + self.token1.key, + self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ], + ) + + self.project1.refresh_from_db() + # a layer file changed, so we need to repackage + self.assertTrue(self.project1.needs_repackaging) + + def test_needs_repackaging_with_online_vector(self): + cur = self.conn.cursor() + cur.execute("CREATE TABLE point (id integer, geometry geometry(point, 2056))") + self.conn.commit() + cur.execute( + "INSERT INTO point(id, geometry) VALUES(1, ST_GeomFromText('POINT(2725505 1121435)', 2056))" + ) + self.conn.commit() + + self.project1.refresh_from_db() + # newly uploaded project should always need to be packaged at least once + self.assertTrue(self.project1.needs_repackaging) + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/project2.qgs", "project.qgs"), + ("delta/points.geojson", "points.geojson"), + ], + expected_files=["data.gpkg", "project_qfield.qgs"], + ) + + self.project1.refresh_from_db() + # projects with online vector layer should always show as it needs repackaging + self.assertTrue(self.project1.needs_repackaging) + + def test_has_online_vector_data(self): + cur = self.conn.cursor() + cur.execute("CREATE TABLE point (id integer, geometry geometry(point, 2056))") + self.conn.commit() + + self.upload_files( + self.token1.key, + self.project1, + files=[ + ("delta/project2.qgs", "project.qgs"), + ], + ) + + self.wait_for_project_ok_status(self.project1) + + self.project1.refresh_from_db() + + self.assertTrue(self.project1.has_online_vector_data) + + def test_has_no_online_vector_data(self): + self.upload_files( + self.token1.key, + self.project1, + files=[ + ("delta/project.qgs", "project.qgs"), + ], + ) + + self.wait_for_project_ok_status(self.project1) + + self.project1.refresh_from_db() + + self.assertTrue(self.project1.has_online_vector_data) + + def test_filename_with_whitespace(self): + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ("delta/testdata.gpkg", "testdata.gpkg"), + ("delta/points.geojson", "points.geojson"), + ("delta/polygons.geojson", "polygons.geojson"), + ("delta/project.qgs", "project.qgs"), + ], + expected_files=[ + "data.gpkg", + "project_qfield.qgs", + ], + ) diff --git a/docker-app/qfieldcloud/core/tests/test_permission.py b/docker-app/qfieldcloud/core/tests/test_permission.py index 7d3cf1570..847cf5bea 100644 --- a/docker-app/qfieldcloud/core/tests/test_permission.py +++ b/docker-app/qfieldcloud/core/tests/test_permission.py @@ -34,16 +34,6 @@ def setUp(self): ) self.project1.save() - def tearDown(self): - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - p.delete() - - User.objects.all().delete() - # Remove credentials - self.client.credentials() - def test_collaborator_project_takeover(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) diff --git a/docker-app/qfieldcloud/core/tests/test_project.py b/docker-app/qfieldcloud/core/tests/test_project.py index fde2e0ff0..c7c361974 100644 --- a/docker-app/qfieldcloud/core/tests/test_project.py +++ b/docker-app/qfieldcloud/core/tests/test_project.py @@ -22,17 +22,6 @@ def setUp(self): self.user3 = User.objects.create_user(username="user3", password="abc123") self.token3 = AuthToken.objects.get_or_create(user=self.user3)[0] - def tearDown(self): - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - p.delete() - - User.objects.all().delete() - - # Remove credentials - self.client.credentials() - def test_create_project(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) response = self.client.post( diff --git a/docker-app/qfieldcloud/core/tests/test_qfield_file.py b/docker-app/qfieldcloud/core/tests/test_qfield_file.py index 54c6565ac..f20500b7e 100644 --- a/docker-app/qfieldcloud/core/tests/test_qfield_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qfield_file.py @@ -1,3 +1,4 @@ +import json import logging import os import tempfile @@ -5,10 +6,10 @@ import psycopg2 import requests -from django.http.response import HttpResponseRedirect +from django.http.response import HttpResponse, HttpResponseRedirect from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core.geodb_utils import delete_db_and_role -from qfieldcloud.core.models import Geodb, Project, User +from qfieldcloud.core.models import Geodb, Job, PackageJob, Project, User from rest_framework import status from rest_framework.test import APITransactionTestCase @@ -54,14 +55,31 @@ def setUp(self): def tearDown(self): self.conn.close() - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - p.delete() + def fail(self, msg: str, job: Job = None): + if job: + msg += f"\n\nOutput:\n================\n{job.output}\n================" - User.objects.all().delete() - # Remove credentials - self.client.credentials() + if job.feedback: + if "error_stack" in job.feedback: + msg += "\n\nError:\n================" + for single_error_stack in job.feedback["error_stack"]: + msg += "\n" + msg += single_error_stack + + msg += f" {job.feedback['error']}\n================" + + feedback = json.dumps(job.feedback, indent=2, sort_keys=True) + msg += f"\n\nFeedback:\n================\n{feedback}\n================" + else: + msg += "\n\nFeedback: None" + + super().fail(msg) + + def assertHttpOk(self, response: HttpResponse): + try: + self.assertTrue(status.is_success(response.status_code), response.json()) + except Exception: + self.assertTrue(status.is_success(response.status_code), response.content) def test_list_files_for_qfield(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -181,7 +199,10 @@ def test_download_file_for_qfield(self): response = self.client.get( "/api/v1/qfield-files/export/{}/".format(self.project1.id), ) + + self.assertHttpOk(response) payload = response.json() + if payload["status"] == "STATUS_EXPORTED": response = self.client.get( f"/api/v1/qfield-files/{self.project1.id}/project_qfield.qgs/" @@ -209,7 +230,10 @@ def test_download_file_for_qfield(self): ) return elif payload["status"] == "STATUS_ERROR": - self.fail("Worker failed with error") + self.fail( + "Worker failed with error", + job=PackageJob.objects.filter(project=self.project1).last(), + ) self.fail("Worker didn't finish") @@ -237,10 +261,15 @@ def test_list_files_for_qfield_broken_file(self): response = self.client.get( "/api/v1/qfield-files/export/{}/".format(self.project1.id), ) + + self.assertHttpOk(response) + if response.json()["status"] == "STATUS_ERROR": return - self.fail("Worker didn't finish") + self.fail( + "Worker didn't finish", job=Job.objects.filter(project=self.project1).last() + ) def test_downloaded_file_has_canvas_name(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -307,6 +336,9 @@ def test_downloaded_file_has_canvas_name(self): for line in f: if 'name="theMapCanvas"' in line: return + self.fail( + 'Worker failed, missing .qgs XML attribute: name="theMapCanvas"' + ) elif payload["status"] == "STATUS_ERROR": self.fail("Worker failed with error") @@ -351,6 +383,8 @@ def test_download_project_with_broken_layer_datasources(self): "/api/v1/qfield-files/{}/".format(self.project1.id), ) + self.assertHttpOk(response) + export_payload = response.json() layer_ok = export_payload["layers"][ "points_c2784cf9_c9c3_45f6_9ce5_98a6047e4d6c" @@ -363,7 +397,10 @@ def test_download_project_with_broken_layer_datasources(self): self.assertFalse(layer_failed["valid"], layer_failed["status"]) return elif payload["status"] == "STATUS_ERROR": - self.fail("Worker failed with error") + self.fail( + "Worker failed with error", + job=Job.objects.filter(project=self.project1).last(), + ) self.fail("Worker didn't finish") diff --git a/docker-app/qfieldcloud/core/tests/test_qgis_file.py b/docker-app/qfieldcloud/core/tests/test_qgis_file.py index 634904c27..0caa98dc6 100644 --- a/docker-app/qfieldcloud/core/tests/test_qgis_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qgis_file.py @@ -1,13 +1,14 @@ -import filecmp +import io import logging import tempfile import time +from pathlib import PurePath -import requests -from django.http.response import HttpResponseRedirect +from django.core.management import call_command +from django.http import FileResponse from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core import utils -from qfieldcloud.core.models import Project, User +from qfieldcloud.core.models import Project, User, UserAccount from rest_framework import status from rest_framework.test import APITransactionTestCase @@ -33,19 +34,20 @@ def setUp(self): ) self.project1.save() - def tearDown(self): - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - bucket = utils.get_s3_bucket() - prefix = utils.safe_join("projects/{}/".format(p.id)) - bucket.objects.filter(Prefix=prefix).delete() + def get_file_contents(self, project, filename, version=None): + qs = "" + if version: + qs = f"?version={version}" - p.delete() + response = self.client.get(f"/api/v1/files/{project.id}/{filename}/{qs}") - User.objects.all().delete() - # Remove credentials - self.client.credentials() + self.assertTrue(status.is_success(response.status_code)) + self.assertEqual(get_filename(response), PurePath(filename).name) + + if isinstance(response, FileResponse): + return b"".join(response.streaming_content) + else: + return response.content def test_push_file(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -83,24 +85,10 @@ def test_push_download_file(self): self.assertTrue(status.is_success(response.status_code)) self.assertEqual(Project.objects.get(pk=self.project1.pk).files_count, 1) - # Pull the file - response = self.client.get(f"/api/v1/files/{self.project1.id}/file.txt/") - - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file.txt"))) + self.assertEqual( + self.get_file_contents(self.project1, "file.txt"), + open(testdata_path("file.txt"), "rb").read(), + ) def test_push_download_file_with_path(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -122,24 +110,17 @@ def test_push_download_file_with_path(self): # Pull the file response = self.client.get( - f"/api/v1/files/{self.project1.id}/foo/bar/file.txt/" + f"/api/v1/files/{self.project1.id}/foo/bar/file.txt/", + stream=True, ) - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "foo/bar/file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) + self.assertEqual(get_filename(response), "file.txt") - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file.txt"))) + self.assertEqual( + self.get_file_contents(self.project1, "foo/bar/file.txt"), + open(testdata_path("file.txt"), "rb").read(), + ) def test_push_list_file(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -306,25 +287,15 @@ def test_push_download_specific_version_file(self): self.assertTrue(status.is_success(response.status_code)) self.assertEqual(Project.objects.get(pk=self.project1.pk).files_count, 1) - # Pull the last file (without version parameter) - response = self.client.get(f"/api/v1/files/{self.project1.id}/file.txt/") - - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) + self.assertNotEqual( + self.get_file_contents(self.project1, "file.txt"), + open(testdata_path("file.txt"), "rb").read(), + ) - self.assertFalse(filecmp.cmp(temp_file.name, testdata_path("file.txt"))) - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file2.txt"))) + self.assertEqual( + self.get_file_contents(self.project1, "file.txt"), + open(testdata_path("file2.txt"), "rb").read(), + ) # List files response = self.client.get("/api/v1/files/{}/".format(self.project1.id)) @@ -335,49 +306,21 @@ def test_push_download_specific_version_file(self): ) # Pull the oldest version - response = self.client.get( - f"/api/v1/files/{self.project1.id}/file.txt/", - {"version": versions[0]["version_id"]}, + self.assertEqual( + self.get_file_contents( + self.project1, "file.txt", versions[0]["version_id"] + ), + open(testdata_path("file.txt"), "rb").read(), ) - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file.txt"))) - # Pull the newest version - response = self.client.get( - f"/api/v1/files/{self.project1.id}/file.txt/", - {"version": versions[1]["version_id"]}, + self.assertEqual( + self.get_file_contents( + self.project1, "file.txt", versions[1]["version_id"] + ), + open(testdata_path("file2.txt"), "rb").read(), ) - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file2.txt"))) - def test_push_delete_file(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -562,3 +505,96 @@ def test_upload_10mb_file(self): self.assertEqual("bigfile.big", response.json()[0]["name"]) self.assertGreater(response.json()[0]["size"], 10000000) self.assertLess(response.json()[0]["size"], 11000000) + + def test_purge_old_versions_command(self): + """This tests manual purging of old versions with the management command""" + + self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) + + def count_versions(): + """counts the versions in first file of project1""" + file = list(self.project1.files)[0] + return len(file.versions) + + def read_version(n): + """returns the content of version in first file of project1""" + file = list(self.project1.files)[0] + return file.versions[n]._data.get()["Body"].read().decode() + + # Create 20 versions (direct upload to s3) + bucket = utils.get_s3_bucket() + key = f"projects/{self.project1.id}/files/file.txt/" + for i in range(20): + test_file = io.BytesIO(f"v{i}".encode()) + bucket.upload_fileobj(test_file, key) + + # Ensure it worked + self.assertEqual(count_versions(), 20) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(19), "v0") + + # Run management command on other project should have no effect + other = Project.objects.create(name="other", owner=self.user1) + call_command("purge_old_file_versions", "--force", "--projects", other.pk) + self.assertEqual(count_versions(), 20) + + # Run management command should leave 3 + call_command("purge_old_file_versions", "--force") + self.assertEqual(count_versions(), 3) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(2), "v17") + + # Run management command is idempotent + call_command("purge_old_file_versions", "--force") + self.assertEqual(count_versions(), 3) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(2), "v17") + + def test_purge_old_versions(self): + """This tests automated purging of old versions when uploading files""" + + self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) + + apipath = f"/api/v1/files/{self.project1.id}/file.txt/" + + def count_versions(): + """counts the versions in first file of project1""" + file = list(self.project1.files)[0] + return len(file.versions) + + def read_version(n): + """returns the content of version in first file of project1""" + file = list(self.project1.files)[0] + return file.versions[n]._data.get()["Body"].read().decode() + + # As PRO account, 10 version should be kept out of 20 + self.user1.useraccount.account_type = UserAccount.TYPE_PRO + self.user1.useraccount.save() + for i in range(20): + test_file = io.StringIO(f"v{i}") + self.client.post(apipath, {"file": test_file}, format="multipart") + self.assertEqual(count_versions(), 10) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(9), "v10") + + # As COMMUNITY account, 3 version should be kept + self.user1.useraccount.account_type = UserAccount.TYPE_COMMUNITY + self.user1.useraccount.save() + + # But first we check that uploading to another project doesn't affect a projct + otherproj = Project.objects.create(name="other", owner=self.user1) + otherpath = f"/api/v1/files/{otherproj.id}/file.txt/" + self.client.post(otherpath, {"file": io.StringIO("v1")}, format="multipart") + self.assertEqual(count_versions(), 10) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(9), "v10") + + # As COMMUNITY account, 3 version should be kept out of 20 new ones + self.user1.useraccount.account_type = UserAccount.TYPE_COMMUNITY + self.user1.useraccount.save() + for i in range(20, 40): + test_file = io.StringIO(f"v{i}") + self.client.post(apipath, {"file": test_file}, format="multipart") + self.assertEqual(count_versions(), 3) + self.assertEqual(read_version(0), "v39") + self.assertEqual(read_version(2), "v37") diff --git a/docker-app/qfieldcloud/core/tests/test_queryset.py b/docker-app/qfieldcloud/core/tests/test_queryset.py index 7ba8e1db2..a1bf44491 100644 --- a/docker-app/qfieldcloud/core/tests/test_queryset.py +++ b/docker-app/qfieldcloud/core/tests/test_queryset.py @@ -115,10 +115,6 @@ def setUp(self): role=ProjectCollaborator.Roles.EDITOR, ) - def tearDown(self): - # Remove credentials - self.client.credentials() - def test_get_users(self): # should get all the available users queryset = querysets_utils.get_users("") diff --git a/docker-app/qfieldcloud/core/tests/test_user.py b/docker-app/qfieldcloud/core/tests/test_user.py index 9e9c60091..59380a3f9 100644 --- a/docker-app/qfieldcloud/core/tests/test_user.py +++ b/docker-app/qfieldcloud/core/tests/test_user.py @@ -67,11 +67,8 @@ def setUp(self): is_public=True, ).save() - def tearDown(self): - # Remove credentials - self.client.credentials() - def test_login(self): + response = self.client.post( "/api/v1/auth/login/", {"username": "user1", "password": "abc123"} ) diff --git a/docker-app/qfieldcloud/core/urls.py b/docker-app/qfieldcloud/core/urls.py index 9ec369c9e..63c2df070 100644 --- a/docker-app/qfieldcloud/core/urls.py +++ b/docker-app/qfieldcloud/core/urls.py @@ -3,7 +3,9 @@ collaborators_views, deltas_views, files_views, + jobs_views, members_views, + package_views, projects_views, qfield_files_views, status_views, @@ -13,6 +15,29 @@ router = DefaultRouter() router.register(r"projects", projects_views.ProjectViewSet, basename="project") +router.register(r"jobs", jobs_views.JobViewSet, basename="jobs") + +""" +TODO future URL refactor +projects/ +projects/ +projects// +projects//files/ +projects//files// +projects//jobs/ +projects//jobs// +projects//packages/ +projects//packages/latest/files/ +projects//packages/latest/files// +projects//deltas/ +projects//deltas// +projects//collaborators/ +organizations/ +organizations// +organizations//members/ +organizations//teams/ +organizations//teams//members/ +""" urlpatterns = [ path("projects/public/", projects_views.PublicProjectsListView.as_view()), @@ -37,13 +62,32 @@ files_views.DownloadPushDeleteFileView.as_view(), name="project_file_download", ), + path( + "files/meta//", + files_views.ProjectMetafilesView.as_view(), + name="project_metafiles", + ), + path( + "files/public/", + files_views.PublicFilesView.as_view(), + name="public_files", + ), + path( + "packages//latest/", + package_views.LatestPackageView.as_view(), + ), + path( + "packages//latest/files//", + package_views.LatestPackageDownloadFilesView.as_view(), + ), path("qfield-files//", qfield_files_views.ListFilesView.as_view()), path( "qfield-files///", qfield_files_views.DownloadFileView.as_view(), ), path( - "qfield-files/export//", qfield_files_views.ExportView.as_view() + "qfield-files/export//", + qfield_files_views.PackageView.as_view(), ), path("members//", members_views.ListCreateMembersView.as_view()), path( diff --git a/docker-app/qfieldcloud/core/utils.py b/docker-app/qfieldcloud/core/utils.py index 6e8ab5b01..96c26b7f9 100644 --- a/docker-app/qfieldcloud/core/utils.py +++ b/docker-app/qfieldcloud/core/utils.py @@ -5,7 +5,7 @@ import posixpath from datetime import datetime from pathlib import PurePath -from typing import IO, Iterable, List, NamedTuple, Optional, TypedDict, Union +from typing import IO, Iterable, List, NamedTuple, Optional, Union import boto3 import jsonschema @@ -23,10 +23,11 @@ class S3PrefixPath(NamedTuple): class S3Object(NamedTuple): - Key: str - LastModified: datetime - Size: int - ETag: str + name: str + key: str + last_modified: datetime + size: int + etag: str class S3ObjectVersion: @@ -62,6 +63,10 @@ def e_tag(self) -> str: def is_latest(self) -> bool: return self._data.is_latest + @property + def display(self) -> str: + return self.last_modified.strftime("v%Y%m%d%H%M%S") + class S3ObjectWithVersions(NamedTuple): latest: S3ObjectVersion @@ -247,37 +252,38 @@ def get_deltafile_schema_validator() -> jsonschema.Draft7Validator: def get_s3_project_size(project_id: str) -> int: - """Return the size in MiB of the project on the storage, included the - exported files""" + """Return the size in MiB of the project on the storage, including the + exported files and their versions""" bucket = get_s3_bucket() - prefix = "projects/{}/".format(project_id) - total_size = 0 + prefix = f"projects/{project_id}/" - for obj in bucket.objects.filter(Prefix=prefix): - total_size += obj.size + total_size = 0 + for version in bucket.object_versions.filter(Prefix=prefix): + total_size += version.size or 0 return round(total_size / (1024 * 1024), 3) -class ProjectFileVersion(TypedDict): - name: str - size: int - sha256: str - last_modified: datetime - is_latest: bool +def get_project_files_with_versions(project_id: str) -> Iterable[S3ObjectWithVersions]: + """Returns a list of files and their versions. + Args: + project_id (str): the project id -class ProjectFile(TypedDict): - name: str - size: int - sha256: str - last_modified: datetime - versions: List[ProjectFileVersion] + Returns: + Iterable[S3ObjectWithVersions]: the list of files + """ + bucket = get_s3_bucket() + prefix = f"projects/{project_id}/files/" + return list_files_with_versions(bucket, prefix, strip_prefix=True) -def get_project_files_with_versions(project_id: str) -> Iterable[S3ObjectWithVersions]: + +def get_project_file_with_versions( + project_id: str, filename: str +) -> Optional[S3ObjectWithVersions]: """Returns a list of files and their versions. Args: @@ -287,9 +293,29 @@ def get_project_files_with_versions(project_id: str) -> Iterable[S3ObjectWithVer Iterable[S3ObjectWithVersions]: the list of files """ bucket = get_s3_bucket() - prefix = f"projects/{project_id}/files/" + prefix = f"projects/{project_id}/files/{filename}" + files = [ + f + for f in list_files_with_versions(bucket, prefix, strip_prefix=True) + if f.latest.key == prefix + ] - return list_files_with_versions(bucket, prefix, strip_prefix=True) + return files[0] if files else None + + +def get_project_package_files(project_id: str) -> Iterable[S3Object]: + """Returns a list of package files. + + Args: + project_id (str): the project id + + Returns: + Iterable[S3ObjectWithVersions]: the list of package files + """ + bucket = get_s3_bucket() + prefix = f"projects/{project_id}/export/" + + return list_files(bucket, prefix, strip_prefix=True) def get_project_files_count(project_id: str) -> int: @@ -301,6 +327,15 @@ def get_project_files_count(project_id: str) -> int: return len(files) +def get_project_package_files_count(project_id: str) -> int: + """Returns the number of package files within a project.""" + bucket = get_s3_bucket() + prefix = f"projects/{project_id}/export/" + files = list(bucket.objects.filter(Prefix=prefix)) + + return len(files) + + def get_s3_object_url( key: str, bucket: mypy_boto3_s3.service_resource.Bucket = get_s3_bucket() ) -> str: @@ -313,7 +348,29 @@ def get_s3_object_url( Returns: str: URL """ - return f"{settings.STORAGE_ENDPOINT_URL_EXTERNAL}/{bucket.name}/{key}" + return f"{settings.STORAGE_ENDPOINT_URL}/{bucket.name}/{key}" + + +def list_files( + bucket: mypy_boto3_s3.service_resource.Bucket, + prefix: str, + strip_prefix: bool = True, +) -> Iterable[S3Object]: + """Iterator that lists a bucket's objects under prefix.""" + for f in bucket.objects.filter(Prefix=prefix): + if strip_prefix: + start_idx = len(prefix) + name = f.key[start_idx:] + else: + name = f.key + + yield S3Object( + name=name, + key=f.key, + last_modified=f.last_modified, + size=f.size, + etag=f.e_tag, + ) def list_versions( diff --git a/docker-app/qfieldcloud/core/utils2/__init__.py b/docker-app/qfieldcloud/core/utils2/__init__.py new file mode 100644 index 000000000..dfd92040c --- /dev/null +++ b/docker-app/qfieldcloud/core/utils2/__init__.py @@ -0,0 +1,6 @@ +import qfieldcloud.core.utils2.audit as audit +import qfieldcloud.core.utils2.db as db +import qfieldcloud.core.utils2.jobs as jobs +import qfieldcloud.core.utils2.storage as storage + +__all__ = ["audit", "db", "jobs", "storage"] diff --git a/docker-app/qfieldcloud/core/utils2/audit.py b/docker-app/qfieldcloud/core/utils2/audit.py new file mode 100644 index 000000000..9922b6f5e --- /dev/null +++ b/docker-app/qfieldcloud/core/utils2/audit.py @@ -0,0 +1,37 @@ +import json +from typing import Any, Dict, List, Union + +from auditlog.models import LogEntry +from django.contrib.auth.models import User +from django_currentuser.middleware import get_current_authenticated_user + + +def audit( + instance, + action: LogEntry.Action, + changes: Union[Dict[str, Any], List[Any], str] = None, + actor: User = None, + remote_addr: str = None, + additional_data: Any = None, +): + changes_json = None + + try: + if changes is not None: + changes_json = json.dumps(changes) + except Exception: + changes_json = json.dumps(str(changes)) + + if actor is None: + actor = get_current_authenticated_user() + + actor_id = actor.pk if actor else None + + return LogEntry.objects.log_create( + instance, + action=action, + changes=changes_json, + actor_id=actor_id, + remote_addr=remote_addr, + additional_data=additional_data, + ) diff --git a/docker-app/qfieldcloud/core/utils2/jobs.py b/docker-app/qfieldcloud/core/utils2/jobs.py index 455ec0674..4d4bb40dd 100644 --- a/docker-app/qfieldcloud/core/utils2/jobs.py +++ b/docker-app/qfieldcloud/core/utils2/jobs.py @@ -1,34 +1,40 @@ import logging -from typing import Optional +from typing import List, Optional -from qfieldcloud.core.models import ApplyJob, Delta, Job +import qfieldcloud.core.models as models +from django.db.models import Q +from qfieldcloud.core import exceptions logger = logging.getLogger(__name__) def apply_deltas( - project, user, project_file, overwrite_conflicts, delta_ids=None -) -> Optional[ApplyJob]: + project: "models.Project", + user: "models.User", + project_file: str, + overwrite_conflicts: bool, + delta_ids: List[str] = None, +) -> Optional["models.ApplyJob"]: """Apply a deltas""" logger.info( f"Requested apply_deltas on {project} with {project_file}; overwrite_conflicts: {overwrite_conflicts}; delta_ids: {delta_ids}" ) - apply_jobs = ApplyJob.objects.filter( + apply_jobs = models.ApplyJob.objects.filter( project=project, status=[ - Job.Status.PENDING, - Job.Status.QUEUED, + models.Job.Status.PENDING, + models.Job.Status.QUEUED, ], ) if len(apply_jobs) > 0: return apply_jobs[0] - pending_deltas = Delta.objects.filter( + pending_deltas = models.Delta.objects.filter( project=project, - last_status=Delta.Status.PENDING, + last_status=models.Delta.Status.PENDING, ) if delta_ids is not None: @@ -37,10 +43,52 @@ def apply_deltas( if len(pending_deltas) == 0: return None - apply_job = ApplyJob.objects.create( + apply_job = models.ApplyJob.objects.create( project=project, created_by=user, overwrite_conflicts=overwrite_conflicts, ) return apply_job + + +def repackage(project: "models.Project", user: "models.User") -> "models.PackageJob": + """Returns an unfinished or freshly created package job. + + Checks if there is already an unfinished package job and returns it, + or creates a new package job and returns it. + """ + if not project.project_filename: + raise exceptions.NoQGISProjectError() + + # Check if active package job already exists + query = Q(project=project) & ( + Q(status=models.PackageJob.Status.PENDING) + | Q(status=models.PackageJob.Status.QUEUED) + | Q(status=models.PackageJob.Status.STARTED) + ) + + if models.PackageJob.objects.filter(query).count(): + return models.PackageJob.objects.get(query) + + package_job = models.PackageJob.objects.create(project=project, created_by=user) + + return package_job + + +def repackage_if_needed( + project: "models.Project", user: "models.User" +) -> "models.PackageJob": + if not project.project_filename: + raise exceptions.NoQGISProjectError() + + if project.needs_repackaging: + package_job = repackage(project, user) + else: + package_job = ( + models.PackageJob.objects.filter(project=project) + .order_by("started_at") + .get() + ) + + return package_job diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index c238502ff..aeeb28d45 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -1,8 +1,104 @@ from __future__ import annotations -from typing import IO +import logging +import os +from pathlib import PurePath +from typing import IO, List +import qfieldcloud.core.models import qfieldcloud.core.utils +from django.conf import settings +from django.core.files.base import ContentFile +from django.http import FileResponse, HttpRequest +from django.http.response import HttpResponse, HttpResponseBase + +logger = logging.getLogger(__name__) + +QFIELDCLOUD_HOST = os.environ.get("QFIELDCLOUD_HOST", None) +WEB_HTTPS_PORT = os.environ.get("WEB_HTTPS_PORT", None) + + +def staticfile_prefix(project: "Project", filename: str) -> str: # noqa: F821 + """Returns the staticfile dir where the file belongs to or empty string if it does not. + + Args: + project (Project): project to check + filename (str): filename to check + + Returns: + str: the staticfile dir or empty string if no match found + """ + for staticfile_dir in project.staticfile_dirs: + if filename.startswith(staticfile_dir): + return staticfile_dir + + return "" + + +def file_response( + request: HttpRequest, + key: str, + presigned: bool = False, + expires: int = 60, + version: str = None, + as_attachment: bool = False, +) -> HttpResponseBase: + url = "" + filename = PurePath(key).name + extra_params = {} + + if version is not None: + extra_params["VersionId"] = version + + # check if we are in NGINX proxy + http_host = request.META.get("HTTP_HOST", "") + https_port = http_host.split(":")[-1] if ":" in http_host else "443" + + if https_port == WEB_HTTPS_PORT and not settings.IN_TEST_SUITE: + if presigned: + if as_attachment: + extra_params["ResponseContentType"] = "application/force-download" + extra_params[ + "ResponseContentDisposition" + ] = f'attachment;filename="{filename}"' + + url = qfieldcloud.core.utils.get_s3_client().generate_presigned_url( + "get_object", + Params={ + **extra_params, + "Key": key, + "Bucket": qfieldcloud.core.utils.get_s3_bucket().name, + }, + ExpiresIn=expires, + HttpMethod="GET", + ) + else: + url = qfieldcloud.core.utils.get_s3_object_url(key) + + # Let's NGINX handle the redirect to the storage and streaming the file contents back to the client + response = HttpResponse() + response["X-Accel-Redirect"] = "/storage-download/" + response["redirect_uri"] = url + + return response + elif settings.DEBUG or settings.IN_TEST_SUITE: + return_file = ContentFile(b"") + qfieldcloud.core.utils.get_s3_bucket().download_fileobj( + key, + return_file, + extra_params, + ) + + return FileResponse( + return_file.open(), + as_attachment=as_attachment, + filename=filename, + content_type="text/html", + ) + + raise Exception( + "Expected to either run behind nginx proxy, debug mode or within a test suite." + ) def upload_user_avatar(user: "User", file: IO, mimetype: str) -> str: # noqa: F821 @@ -96,3 +192,100 @@ def remove_project_thumbail(project: "Project") -> None: # noqa: F821 bucket = qfieldcloud.core.utils.get_s3_bucket() key = project.thumbnail_uri bucket.object_versions.filter(Prefix=key).delete() + + +def purge_old_file_versions(project: "Project") -> None: # noqa: F821 + """ + Deletes old versions of all files in the given project. Will keep __3__ + versions for COMMUNITY user accounts, and __10__ versions for PRO user + accounts + """ + + logger.info(f"Cleaning up old files for {project}") + + # Determine account type + account_type = project.owner.useraccount.account_type + if account_type == qfieldcloud.core.models.UserAccount.TYPE_COMMUNITY: + keep_count = 3 + elif account_type == qfieldcloud.core.models.UserAccount.TYPE_PRO: + keep_count = 10 + else: + raise NotImplementedError(f"Unknown account type {account_type}") + + logger.debug(f"Keeping {keep_count} versions") + + # Process file by file + for file in qfieldcloud.core.utils.get_project_files_with_versions(project.pk): + + # Skip the newest N + old_versions_to_purge = sorted( + file.versions, key=lambda v: v.last_modified, reverse=True + )[keep_count:] + + # Debug print + logger.debug( + f'Purging {len(old_versions_to_purge)} out of {len(file.versions)} old versions for "{file.latest.name}"...' + ) + + # Remove the N oldest + for old_version in old_versions_to_purge: + if old_version.is_latest: + # This is not supposed to happen, as versions were sorted above, + # but leaving it here as a security measure in case version + # ordering changes for some reason. + raise Exception("Trying to delete latest version") + # TODO: any way to batch those ? will probaby get slow on production + old_version._data.delete() + # TODO: audit ? take implementation from files_views.py:211 + + +def delete_file_version( + project: "Project", # noqa: F821 + filename: str, + version_id: str, + include_older: bool = False, +) -> List[qfieldcloud.core.utils.S3ObjectVersion]: + """Deletes a specific version of given file. + + Args: + project (Project): project the file belongs to + filename (str): filename the version belongs to + version_id (str): version id to delete + include_older (bool, optional): when True, versions older than the passed `version` will also be deleted. If the version_id is the latest version of a file, this parameter will treated as False. Defaults to False. + + Returns: + int: the number of versions deleted + """ + file = qfieldcloud.core.utils.get_project_file_with_versions(project.id, filename) + + if not file: + raise Exception("No file with such name in the given project found") + + if file.latest.id == version_id: + include_older = False + + versions_to_delete = [] + + for file_version in file.versions: + if file_version.id == version_id: + versions_to_delete.append(file_version) + + if include_older: + continue + else: + break + + if versions_to_delete: + assert ( + include_older + ), "We should continue to loop only if `include_older` is True" + assert ( + versions_to_delete[-1].last_modified > file_version.last_modified + ), "Assert the other versions are really older than the requested one" + + versions_to_delete.append(file_version) + + for file_version in versions_to_delete: + file_version._data.delete() + + return versions_to_delete diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index eaa1d62b5..efe22498e 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -1,8 +1,12 @@ from pathlib import PurePath -from django.http.response import HttpResponseRedirect +import qfieldcloud.core.utils2 as utils2 +from django.utils import timezone from qfieldcloud.core import exceptions, permissions_utils, utils from qfieldcloud.core.models import ProcessProjectfileJob, Project +from qfieldcloud.core.utils import S3ObjectVersion, get_project_file_with_versions +from qfieldcloud.core.utils2.audit import LogEntry, audit +from qfieldcloud.core.utils2.storage import purge_old_file_versions, staticfile_prefix from rest_framework import permissions, status, views from rest_framework.parsers import MultiPartParser from rest_framework.response import Response @@ -65,6 +69,7 @@ def get(self, request, projectid): "version_id": version.version_id, "last_modified": last_modified, "is_latest": version.is_latest, + "display": S3ObjectVersion(version.key, version).display, } ) @@ -102,27 +107,19 @@ class DownloadPushDeleteFileView(views.APIView): def get(self, request, projectid, filename): Project.objects.get(id=projectid) - extra_args = {} + version = None if "version" in self.request.query_params: version = self.request.query_params["version"] - extra_args["VersionId"] = version - - filekey = utils.safe_join("projects/{}/files/".format(projectid), filename) - - url = utils.get_s3_client().generate_presigned_url( - "get_object", - Params={ - **extra_args, - "Key": filekey, - "Bucket": utils.get_s3_bucket().name, - "ResponseContentType": "application/force-download", - "ResponseContentDisposition": f'attachment;filename="{filename}"', - }, - ExpiresIn=600, - HttpMethod="GET", - ) - return HttpResponseRedirect(url) + key = utils.safe_join("projects/{}/files/".format(projectid), filename) + return utils2.storage.file_response( + request, + key, + presigned=True, + expires=600, + version=version, + as_attachment=True, + ) def post(self, request, projectid, filename, format=None): project = Project.objects.get(id=projectid) @@ -130,23 +127,19 @@ def post(self, request, projectid, filename, format=None): if "file" not in request.data: raise exceptions.EmptyContentError() + is_qgis_project_file = utils.is_qgis_project_file(filename) # check only one qgs/qgz file per project - if utils.is_qgis_project_file(filename): - if project.project_filename is not None and PurePath(filename) != PurePath( - project.project_filename - ): - raise exceptions.MultipleProjectsError( - "Only one QGIS project per project allowed" - ) - else: - project.project_filename = filename - project.save() - ProcessProjectfileJob.objects.create( - project=project, created_by=self.request.user - ) + if ( + is_qgis_project_file + and project.project_filename is not None + and PurePath(filename) != PurePath(project.project_filename) + ): + raise exceptions.MultipleProjectsError( + "Only one QGIS project per project allowed" + ) request_file = request.FILES.get("file") - + old_object = get_project_file_with_versions(project.id, filename) sha256sum = utils.get_sha256(request_file) bucket = utils.get_s3_bucket() @@ -155,6 +148,39 @@ def post(self, request, projectid, filename, format=None): bucket.upload_fileobj(request_file, key, ExtraArgs={"Metadata": metadata}) + new_object = get_project_file_with_versions(project.id, filename) + + assert new_object + + if staticfile_prefix(project, filename) == "" and ( + is_qgis_project_file or project.project_filename is not None + ): + if is_qgis_project_file: + project.project_filename = filename + + ProcessProjectfileJob.objects.create( + project=project, created_by=self.request.user + ) + + project.data_last_updated_at = timezone.now() + project.save() + + if old_object: + audit( + project, + LogEntry.Action.UPDATE, + changes={filename: [old_object.latest.e_tag, new_object.latest.e_tag]}, + ) + else: + audit( + project, + LogEntry.Action.CREATE, + changes={filename: [None, new_object.latest.e_tag]}, + ) + + # Delete the old file versions + purge_old_file_versions(project) + return Response(status=status.HTTP_201_CREATED) def delete(self, request, projectid, filename): @@ -162,10 +188,40 @@ def delete(self, request, projectid, filename): key = utils.safe_join(f"projects/{projectid}/files/", filename) bucket = utils.get_s3_bucket() + old_object = get_project_file_with_versions(project.id, filename) + + assert old_object + bucket.object_versions.filter(Prefix=key).delete() if utils.is_qgis_project_file(filename): project.project_filename = None project.save() + audit( + project, + LogEntry.Action.DELETE, + changes={filename: [old_object.latest.e_tag, None]}, + ) + return Response(status=status.HTTP_200_OK) + + +class ProjectMetafilesView(views.APIView): + parser_classes = [MultiPartParser] + permission_classes = [ + permissions.IsAuthenticated, + DownloadPushDeleteFileViewPermissions, + ] + + def get(self, request, projectid, filename): + key = utils.safe_join("projects/{}/meta/".format(projectid), filename) + return utils2.storage.file_response(request, key, presigned=True) + + +class PublicFilesView(views.APIView): + parser_classes = [MultiPartParser] + permission_classes = [] + + def get(self, request, filename): + return utils2.storage.file_response(request, filename) diff --git a/docker-app/qfieldcloud/core/views/jobs_views.py b/docker-app/qfieldcloud/core/views/jobs_views.py new file mode 100644 index 000000000..68a0c53d5 --- /dev/null +++ b/docker-app/qfieldcloud/core/views/jobs_views.py @@ -0,0 +1,73 @@ +from django.core.exceptions import ObjectDoesNotExist +from qfieldcloud.core import permissions_utils, serializers +from qfieldcloud.core.models import Job, Project +from rest_framework import generics, permissions, viewsets +from rest_framework.response import Response +from rest_framework.status import HTTP_201_CREATED + + +class JobPermissions(permissions.BasePermission): + def has_permission(self, request, view): + project_id = permissions_utils.get_param_from_request(request, "project_id") + + try: + project = Project.objects.get(id=project_id) + except ObjectDoesNotExist: + return False + + return permissions_utils.can_read_files(request.user, project) + + +class JobViewSet(viewsets.ReadOnlyModelViewSet): + + serializer_class = serializers.JobSerializer + lookup_url_kwarg = "job_id" + permission_classes = [permissions.IsAuthenticated] + + def get_serializer_by_job_type(self, job_type, *args, **kwargs): + if job_type == Job.Type.DELTA_APPLY: + return serializers.ApplyJobSerializer(*args, **kwargs) + elif job_type == Job.Type.PACKAGE: + return serializers.PackageJobSerializer(*args, **kwargs) + elif job_type == Job.Type.PROCESS_PROJECTFILE: + return serializers.ProcessProjectfileJobSerializer(*args, **kwargs) + else: + raise NotImplementedError(f'Unknown job type "{job_type}"') + + def get_serializer(self, *args, **kwargs): + kwargs.setdefault("context", self.get_serializer_context()) + + if self.action in ("create"): + job_type = kwargs["data"]["type"] + + return self.get_serializer_by_job_type(job_type, *args, **kwargs) + + if self.action in ("retrieve",): + job_type = args[0].type + + return self.get_serializer_by_job_type(job_type, *args, **kwargs) + + return serializers.JobSerializer(*args, **kwargs) + + def create(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + + if bool(int(request.data.get("force", 0))): + serializer.is_valid(raise_exception=True) + serializer.save() + else: + serializer.is_valid(raise_exception=True) + serializer.check_create_new_job() + serializer.save() + + return Response(serializer.data, status=HTTP_201_CREATED) + + def get_queryset(self): + qs = Job.objects.select_subclasses() + + if self.action == "list": + project_id = self.request.data.get("project_id") + project = generics.get_object_or_404(Project, pk=project_id) + qs = qs.filter(project=project) + + return qs diff --git a/docker-app/qfieldcloud/core/views/package_views.py b/docker-app/qfieldcloud/core/views/package_views.py new file mode 100644 index 000000000..805023672 --- /dev/null +++ b/docker-app/qfieldcloud/core/views/package_views.py @@ -0,0 +1,102 @@ +import qfieldcloud.core.utils2 as utils2 +from django.core.exceptions import ObjectDoesNotExist +from qfieldcloud.core import exceptions, permissions_utils, utils +from qfieldcloud.core.models import PackageJob, Project +from qfieldcloud.core.utils import check_s3_key, get_project_package_files +from rest_framework import permissions, views +from rest_framework.response import Response + + +class PackageViewPermissions(permissions.BasePermission): + def has_permission(self, request, view): + try: + project_id = request.parser_context["kwargs"].get("project_id") + project = Project.objects.get(pk=project_id) + return permissions_utils.can_read_project(request.user, project) + except ObjectDoesNotExist: + return False + + +class LatestPackageView(views.APIView): + + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] + + def get(self, request, project_id): + """Get last project package status and file list.""" + project = Project.objects.get(id=project_id) + last_job = ( + PackageJob.objects.filter( + project=project, status=PackageJob.Status.FINISHED + ) + .order_by("started_at") + .last() + ) + + # Check if the project was packaged at least once + if not last_job: + raise exceptions.InvalidJobError( + "Packaging has never been triggered or successful for this project." + ) + + files = [] + for f in get_project_package_files(project_id): + files.append( + { + "name": f.name, + "size": f.size, + "last_modified": f.last_modified, + "sha256": check_s3_key(f.key), + } + ) + + if not files: + raise exceptions.InvalidJobError("Empty project package.") + + if last_job.feedback.get("feedback_version") == "2.0": + layers = last_job.feedback["outputs"]["qgis_layers_data"]["layers_by_id"] + else: + steps = last_job.feedback.get("steps", []) + layers = ( + steps[1]["outputs"]["layer_checks"] + if len(steps) > 2 and steps[1].get("stage", 1) == 2 + else None + ) + + return Response( + { + "files": files, + "layers": layers, + "status": last_job.status, + "package_id": last_job.pk, + "packaged_at": last_job.project.data_last_packaged_at, + "data_last_updated_at": last_job.project.data_last_updated_at, + } + ) + + +class LatestPackageDownloadFilesView(views.APIView): + + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] + + def get(self, request, project_id, filename): + """Download package file. + + Raises: + exceptions.InvalidJobError: [description] + """ + project = Project.objects.get(id=project_id) + last_job = PackageJob.objects.filter( + project=project, status=PackageJob.Status.FINISHED + ).latest("started_at") + + # Check if the project was packaged at least once + if not last_job: + raise exceptions.InvalidJobError( + "Packaging has never been triggered or successful for this project." + ) + + key = utils.safe_join("projects/{}/export/".format(project_id), filename) + + return utils2.storage.file_response( + request, key, presigned=True, expires=600, as_attachment=True + ) diff --git a/docker-app/qfieldcloud/core/views/qfield_files_views.py b/docker-app/qfieldcloud/core/views/qfield_files_views.py index fba873b38..bd60611a3 100644 --- a/docker-app/qfieldcloud/core/views/qfield_files_views.py +++ b/docker-app/qfieldcloud/core/views/qfield_files_views.py @@ -6,12 +6,12 @@ from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from qfieldcloud.core import exceptions, permissions_utils, serializers, utils -from qfieldcloud.core.models import ExportJob, Project +from qfieldcloud.core.models import PackageJob, Project from rest_framework import permissions, views from rest_framework.response import Response -class ExportViewPermissions(permissions.BasePermission): +class PackageViewPermissions(permissions.BasePermission): def has_permission(self, request, view): projectid = permissions_utils.get_param_from_request(request, "projectid") try: @@ -25,20 +25,20 @@ def has_permission(self, request, view): @method_decorator( name="post", decorator=swagger_auto_schema( - operation_description="Launch QField export project", - operation_id="Launch qfield export", + operation_description="Launch QField packaging project", + operation_id="Launch qfield packaging", ), ) @method_decorator( name="get", decorator=swagger_auto_schema( - operation_description="Get QField export status", - operation_id="Get qfield export status", + operation_description="Get QField packaging status", + operation_id="Get qfield packaging status", ), ) -class ExportView(views.APIView): +class PackageView(views.APIView): - permission_classes = [permissions.IsAuthenticated, ExportViewPermissions] + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] def post(self, request, projectid): @@ -47,22 +47,32 @@ def post(self, request, projectid): if not project_obj.project_filename: raise exceptions.NoQGISProjectError() - # Check if active export job already exists + # Check if active packaging job already exists # TODO: !!!!!!!!!!!! cache results for some minutes query = Q(project=project_obj) & ( - Q(status=ExportJob.Status.PENDING) - | Q(status=ExportJob.Status.QUEUED) - | Q(status=ExportJob.Status.STARTED) + Q(status=PackageJob.Status.PENDING) + | Q(status=PackageJob.Status.QUEUED) + | Q(status=PackageJob.Status.STARTED) ) # NOTE uncomment to enforce job creation - # ExportJob.objects.filter(query).delete() + # PackageJob.objects.filter(query).delete() - if ExportJob.objects.filter(query).exists(): - serializer = serializers.ExportJobSerializer(ExportJob.objects.get(query)) + if not project_obj.needs_repackaging: + export_job = ( + PackageJob.objects.filter(status=PackageJob.Status.FINISHED) + .filter(project=project_obj) + .latest("started_at") + ) + if export_job: + serializer = serializers.ExportJobSerializer(export_job) + return Response(serializer.data) + + if PackageJob.objects.filter(query).exists(): + serializer = serializers.ExportJobSerializer(PackageJob.objects.get(query)) return Response(serializer.data) - export_job = ExportJob.objects.create( + export_job = PackageJob.objects.create( project=project_obj, created_by=self.request.user ) @@ -75,7 +85,7 @@ def get(self, request, projectid): project_obj = Project.objects.get(id=projectid) export_job = ( - ExportJob.objects.filter(project=project_obj).order_by("updated_at").last() + PackageJob.objects.filter(project=project_obj).order_by("updated_at").last() ) serializer = serializers.ExportJobSerializer(export_job) @@ -91,23 +101,23 @@ def get(self, request, projectid): ) class ListFilesView(views.APIView): - permission_classes = [permissions.IsAuthenticated, ExportViewPermissions] + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] def get(self, request, projectid): project_obj = Project.objects.get(id=projectid) # Check if the project was exported at least once - if not ExportJob.objects.filter( - project=project_obj, status=ExportJob.Status.FINISHED + if not PackageJob.objects.filter( + project=project_obj, status=PackageJob.Status.FINISHED ): raise exceptions.InvalidJobError( "Project files have not been exported for the provided project id" ) export_job = ( - ExportJob.objects.filter( - project=project_obj, status=ExportJob.Status.FINISHED + PackageJob.objects.filter( + project=project_obj, status=PackageJob.Status.FINISHED ) .order_by("updated_at") .last() @@ -141,12 +151,19 @@ def get(self, request, projectid): } ) - steps = export_job.feedback.get("steps", []) - layers = ( - steps[1]["outputs"]["layer_checks"] - if len(steps) > 2 and steps[1].get("stage", 1) == 2 - else None - ) + if export_job.feedback.get("feedback_version") == "2.0": + layers = export_job.feedback["outputs"]["qgis_layers_data"]["layers_by_id"] + + for data in layers.values(): + data["valid"] = data["is_valid"] + data["status"] = data["error_code"] + else: + steps = export_job.feedback.get("steps", []) + layers = ( + steps[1]["outputs"]["layer_checks"] + if len(steps) > 2 and steps[1].get("stage", 1) == 2 + else None + ) return Response( { @@ -167,16 +184,16 @@ def get(self, request, projectid): ) class DownloadFileView(views.APIView): - permission_classes = [permissions.IsAuthenticated, ExportViewPermissions] + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] def get(self, request, projectid, filename): project_obj = Project.objects.get(id=projectid) # Check if the project was exported at least once - if not ExportJob.objects.filter( + if not PackageJob.objects.filter( project=project_obj, - status=ExportJob.Status.FINISHED, + status=PackageJob.Status.FINISHED, ): raise exceptions.InvalidJobError( "Project files have not been exported for the provided project id" diff --git a/docker-app/qfieldcloud/core/views/status_views.py b/docker-app/qfieldcloud/core/views/status_views.py index 5e3cc8c19..92577bc15 100644 --- a/docker-app/qfieldcloud/core/views/status_views.py +++ b/docker-app/qfieldcloud/core/views/status_views.py @@ -3,7 +3,6 @@ from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from qfieldcloud.core import geodb_utils, utils -from qfieldcloud.core.logging.filters import skip_logging from rest_framework import status, views from rest_framework.permissions import AllowAny from rest_framework.response import Response @@ -19,7 +18,6 @@ class APIStatusView(views.APIView): permission_classes = [AllowAny] - @skip_logging def get(self, request): # Try to get the status from the cache results = cache.get("status_results", {}) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 116f0609f..c8f3d0890 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -48,6 +48,7 @@ "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", + "django.contrib.gis", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", @@ -69,6 +70,7 @@ "invitations", "django_cron", "timezone_field", + "auditlog", # Local "qfieldcloud.core", "qfieldcloud.notifs", @@ -88,7 +90,7 @@ "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "django_currentuser.middleware.ThreadLocalUserMiddleware", - "qfieldcloud.core.middleware.request_response_log.RequestResponseLogMiddleware", + "auditlog.middleware.AuditlogMiddleware", "qfieldcloud.core.middleware.timezone.TimezoneMiddleware", "axes.middleware.AxesMiddleware", ] @@ -97,6 +99,7 @@ "qfieldcloud.notifs.cron.SendNotificationsJob", # "qfieldcloud.core.cron.DeleteExpiredInvitationsJob", "qfieldcloud.core.cron.ResendFailedInvitationsJob", + "qfieldcloud.core.cron.SetTerminatedWorkersToFinalStatusJob", ] ROOT_URLCONF = "qfieldcloud.urls" @@ -128,12 +131,13 @@ DATABASES = { "default": { - "ENGINE": "django.db.backends.postgresql", + "ENGINE": "django.contrib.gis.db.backends.postgis", "NAME": os.environ.get("SQL_DATABASE"), "USER": os.environ.get("SQL_USER"), "PASSWORD": os.environ.get("SQL_PASSWORD"), "HOST": os.environ.get("SQL_HOST"), "PORT": os.environ.get("SQL_PORT"), + "OPTIONS": {"sslmode": os.environ.get("SQL_SSLMODE")}, } } @@ -188,7 +192,6 @@ STORAGE_BUCKET_NAME = os.environ.get("STORAGE_BUCKET_NAME") STORAGE_REGION_NAME = os.environ.get("STORAGE_REGION_NAME") STORAGE_ENDPOINT_URL = os.environ.get("STORAGE_ENDPOINT_URL") -STORAGE_ENDPOINT_URL_EXTERNAL = os.environ.get("STORAGE_ENDPOINT_URL_EXTERNAL") AUTH_USER_MODEL = "core.User" @@ -273,59 +276,40 @@ # Django invitations configurations # https://github.com/bee-keeper/django-invitations#additional-configuration INVITATIONS_INVITATION_EXPIRY = 365 # integer in days, 0 disables invitations -INVITATIONS_INVITATION_ONLY = True +INVITATIONS_INVITATION_ONLY = False INVITATIONS_ACCEPT_INVITE_AFTER_SIGNUP = True INVITATIONS_GONE_ON_ACCEPT_ERROR = False +TEST_RUNNER = "qfieldcloud.testing.QfcTestSuiteRunner" + LOGLEVEL = os.environ.get("LOGLEVEL", "DEBUG").upper() LOGGING = { "version": 1, - "disable_existing_loggers": True, + "disable_existing_loggers": False, "formatters": { - "request.human": { - "()": "qfieldcloud.core.logging.formatters.CustomisedRequestHumanFormatter", - }, "json": { "()": "qfieldcloud.core.logging.formatters.CustomisedJSONFormatter", }, }, - "filters": { - "skip_logging": { - "()": "qfieldcloud.core.logging.filters.SkipLoggingFilter", - }, - }, "handlers": { "console.json": { "class": "logging.StreamHandler", "formatter": "json", }, - "console.human": { - "class": "logging.StreamHandler", - "formatter": "request.human", - }, }, "root": { "handlers": ["console.json"], "level": "INFO", }, - "loggers": { - "qfieldcloud.request_response_log": { - "level": LOGLEVEL, - "filters": [ - "skip_logging", - ], - "handlers": [ - # TODO enable console.json once it is clear how we do store the json logs - # 'console.json', - "console.human", - ], - "propagate": False, - }, - }, } DEFAULT_AUTO_FIELD = "django.db.models.AutoField" +# Whether we are currently running tests +# NOTE automatically set when running tests, don't change manually! +IN_TEST_SUITE = False QFIELDCLOUD_TOKEN_SERIALIZER = "qfieldcloud.core.serializers.TokenSerializer" QFIELDCLOUD_USER_SERIALIZER = "qfieldcloud.core.serializers.CompleteUserSerializer" + +WORKER_TIMEOUT_S = int(os.environ.get("QFIELDCLOUD_WORKER_TIMEOUT_S", 60)) diff --git a/docker-app/qfieldcloud/testing.py b/docker-app/qfieldcloud/testing.py new file mode 100644 index 000000000..138cc54fa --- /dev/null +++ b/docker-app/qfieldcloud/testing.py @@ -0,0 +1,8 @@ +from django.conf import settings +from django.test.runner import DiscoverRunner + + +class QfcTestSuiteRunner(DiscoverRunner): + def __init__(self, *args, **kwargs): + settings.IN_TEST_SUITE = True + super().__init__(*args, **kwargs) diff --git a/docker-app/requirements.txt b/docker-app/requirements.txt index afd8f66ad..618596930 100644 --- a/docker-app/requirements.txt +++ b/docker-app/requirements.txt @@ -1,28 +1,88 @@ -Django>=3.2.11,<3.3 -djangorestframework>=3.12.2,<3.13 -markdown>=3.3.3,<3.4 -django-filter>=21.1,<22 -gunicorn>=20.1,<20.2 -psycopg2-binary>=2.8.6,<2.9 -django-allauth>=0.44.0,<0.45 -pyyaml==5.4 -drf-yasg>=1.20.0,<1.21 +asgiref==3.4.1 +attrs==21.2.0 +beautifulsoup4==4.10.0 +boto3==1.18.65 +boto3-stubs==1.20.26 +botocore==1.21.65 +botocore-stubs==1.23.26 +certifi==2021.10.8 +cffi==1.15.0 +charset-normalizer==2.0.9 +click==8.0.3 +click-plugins==1.1.1 +cligj==0.7.2 +coreapi==2.3.3 +coreschema==0.0.4 coverage==5.3 -boto3>=1.18,<1.19 -boto3-stubs[s3]>=1.18.46 -django-storages>=1.11,<1.12 -sentry-sdk -jsonschema>=3.2.0,<3.3 -django-tables2>=2.4,<2.5 -django-bootstrap4>=3.0,<4.0 -django-cron==0.5 -django-invitations>=1.9.3,<1.10 -redis==3.5.3 -JSON-log-formatter>=0.3.0<0.4.0 -docker>=4.2,<4.3 -fiona>=1.8.20<2.0.0 -django-notifications-hq==1.6.0 +cryptography==36.0.1 +defusedxml==0.7.1 +Deprecated==1.2.13 +Django==3.2.12 +django-allauth==0.44.0 +django-auditlog==1.0a1 +django-axes==5.28.0 +django-bootstrap4==3.0.1 +django-classy-tags==3.0.1 +django-common-helpers==0.9.2 +django-cron==0.5.0 django-currentuser==0.5.3 -django-axes>=5.26.0,<6.0.0 -mkdocs>=1.2.3 -django-timezone-field>=4.2.1 +django-filter==21.1 +django-invitations==1.9.3 +django-ipware==4.0.2 +django-jsonfield==1.4.1 +django-model-utils==4.2.0 +django-notifications-hq==1.6.0 +django-storages==1.11.1 +django-tables2==2.4.1 +django-timezone-field==4.2.1 +djangorestframework==3.12.4 +docker==4.2.2 +drf-yasg==1.20.0 +Fiona==1.8.20 +ghp-import==2.0.2 +gunicorn==20.1.0 +idna==3.3 +importlib-metadata==4.10.0 +inflection==0.5.1 +itypes==1.2.0 +Jinja2==3.0.3 +jmespath==0.10.0 +JSON-log-formatter==0.5.0 +jsonfield==3.1.0 +jsonschema==3.2.0 +Markdown==3.3.6 +MarkupSafe==2.0.1 +mergedeep==1.3.4 +mkdocs==1.2.3 +munch==2.5.0 +mypy-boto3-s3==1.20.17 +oauthlib==3.1.1 +packaging==21.3 +psycopg2-binary==2.8.6 +pycparser==2.21 +PyJWT==2.3.0 +pyparsing==3.0.6 +pyrsistent==0.18.0 +python-dateutil==2.8.2 +python3-openid==3.2.0 +pytz==2021.3 +PyYAML==5.4 +pyyaml-env-tag==0.1 +redis==3.5.3 +requests==2.26.0 +requests-oauthlib==1.3.0 +ruamel.yaml==0.17.17 +ruamel.yaml.clib==0.2.6 +s3transfer==0.5.0 +sentry-sdk==1.5.1 +six==1.16.0 +soupsieve==2.3.1 +sqlparse==0.4.2 +swapper==1.3.0 +typing-extensions==4.0.1 +uritemplate==4.1.1 +urllib3==1.26.7 +watchdog==2.1.6 +websocket-client==1.2.3 +wrapt==1.13.3 +zipp==3.6.0 diff --git a/docker-app/wait_for_services.py b/docker-app/wait_for_services.py index b855087cf..6324f70e4 100644 --- a/docker-app/wait_for_services.py +++ b/docker-app/wait_for_services.py @@ -19,7 +19,9 @@ def wait_for_postgres(): "dbname": os.environ.get("SQL_DATABASE"), "user": os.environ.get("SQL_USER"), "password": os.environ.get("SQL_PASSWORD"), - "host": "db", + "host": os.environ.get("SQL_HOST"), + "port": os.environ.get("SQL_PORT"), + "sslmode": os.environ.get("SQL_SSLMODE"), } start_time = time() while time() - start_time < TIMEOUT: diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index 1f3a9012e..25768f3d5 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -11,15 +11,17 @@ import docker import qfieldcloud.core.utils2.storage import requests +from django.conf import settings from django.db import transaction from django.forms.models import model_to_dict from django.utils import timezone +from docker.models.containers import Container from qfieldcloud.core.models import ( ApplyJob, ApplyJobDelta, Delta, - ExportJob, Job, + PackageJob, ProcessProjectfileJob, ) from qfieldcloud.core.utils import get_qgis_project_file @@ -39,7 +41,7 @@ class QgisException(Exception): class JobRun: - container_timeout_secs = 3 * 60 + container_timeout_secs = settings.WORKER_TIMEOUT_S job_class = Job command = [] @@ -204,7 +206,7 @@ def _run_docker( logger.info(f"Execute: {' '.join(command)}") volumes.append(f"{TRANSFORMATION_GRIDS_VOLUME_NAME}:/transformation_grids:ro") - container = client.containers.run( + container: Container = client.containers.run( # type:ignore QGIS_CONTAINER_NAME, command, environment={ @@ -225,6 +227,8 @@ def _run_docker( detach=True, ) + logger.info(f"Starting worker {container.id} ...") + response = {"StatusCode": TIMEOUT_ERROR_EXIT_CODE} try: @@ -243,9 +247,20 @@ def _run_docker( return response["StatusCode"], logs -class ExportJobRun(JobRun): - job_class = ExportJob - command = ["export", "%(project__id)s", "%(project__project_filename)s"] +class PackageJobRun(JobRun): + job_class = PackageJob + command = ["package", "%(project__id)s", "%(project__project_filename)s"] + data_last_packaged_at = None + + def before_docker_run(self) -> None: + # at the start of docker we assume we make the snapshot of the data + self.data_last_packaged_at = timezone.now() + + def after_docker_run(self) -> None: + # only successfully finished packaging jobs should update the Project.data_last_packaged_at + if self.job.status == Job.Status.FINISHED: + self.job.project.data_last_packaged_at = self.data_last_packaged_at + self.job.project.save() class DeltaApplyJobRun(JobRun): @@ -312,7 +327,8 @@ def before_docker_run(self) -> None: json.dump(deltafile_contents, f) def after_docker_run(self) -> None: - delta_feedback = self.job.feedback["steps"][1]["outputs"]["delta_feedback"] + delta_feedback = self.job.feedback["outputs"]["apply_deltas"]["delta_feedback"] + is_data_modified = False for feedback in delta_feedback: delta_id = feedback["delta_id"] @@ -321,17 +337,22 @@ def after_docker_run(self) -> None: if status == "status_applied": status = Delta.Status.APPLIED + is_data_modified = True elif status == "status_conflict": status = Delta.Status.CONFLICT elif status == "status_apply_failed": status = Delta.Status.NOT_APPLIED else: status = Delta.Status.ERROR + # not certain what happened + is_data_modified = True Delta.objects.filter(pk=delta_id).update( last_status=status, last_feedback=feedback, last_modified_pk=modified_pk, + last_apply_attempt_at=self.job.started_at, + last_apply_attempt_by=self.job.created_by, ) ApplyJobDelta.objects.filter( @@ -343,6 +364,10 @@ def after_docker_run(self) -> None: modified_pk=modified_pk, ) + if is_data_modified: + self.job.project.data_last_updated_at = timezone.now() + self.job.project.save() + def after_docker_exception(self) -> None: Delta.objects.filter( id__in=self.delta_ids, @@ -376,9 +401,9 @@ def get_context(self, *args) -> Dict[str, Any]: def after_docker_run(self) -> None: project = self.job.project - - project_details = self.job.feedback["steps"][3]["outputs"]["project_details"] - project.project_details = project_details + project.project_details = self.job.feedback["outputs"]["project_details"][ + "project_details" + ] thumbnail_filename = self.shared_tempdir.joinpath("thumbnail.png") with open(thumbnail_filename, "rb") as f: diff --git a/docker-caddy/Caddyfile b/docker-caddy/Caddyfile deleted file mode 100644 index 7fe2c19a4..000000000 --- a/docker-caddy/Caddyfile +++ /dev/null @@ -1,27 +0,0 @@ -{ - acme_ca {$CADDY_ACME_CA} - email info@opengis.ch -} - -http://{$QFIELDCLOUD_HOST} https://{$QFIELDCLOUD_HOST} { - log { - level debug - format json - } - - root * /home/app/web/ - file_server - - # Caddy imports. Currently imports the minio configuration from `Caddyfile.proxy-minio`. - # PROD Since we are using a remote S3, `CADDY_IMPORT_GLOB` should be an empty glob ("(*(N))"). - # DEV Use local minio. The file is mounted in docker-compose and CADDY_IMPORT_GLOB is set to the filename. - import {$CADDY_IMPORT_GLOB} - - @notStatic { - not { - path /staticfiles/* - path /mediafiles/* - } - } - reverse_proxy @notStatic app:8000 -} diff --git a/docker-caddy/Caddyfile.proxy-minio b/docker-caddy/Caddyfile.proxy-minio deleted file mode 100644 index c109e66cd..000000000 --- a/docker-caddy/Caddyfile.proxy-minio +++ /dev/null @@ -1,12 +0,0 @@ -# This is to be included in the Caddyfile when using local minio S3 (development/testing) -# On production, an empty file should be included instead - - - handle_path /minio/* { - rewrite * {path} - reverse_proxy { - to s3:9000 - header_up Host {upstream_hostport} - header_up X-Forwarded-Host {host} - } - } diff --git a/docker-caddy/Dockerfile b/docker-caddy/Dockerfile deleted file mode 100644 index 2c94bf454..000000000 --- a/docker-caddy/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM caddy:2.1.0-alpine - -ADD ./Caddyfile /etc/caddy/Caddyfile -RUN touch /etc/caddy/Caddyfile.proxy-minio diff --git a/docker-compose.dockerhub.yml b/docker-compose.dockerhub.yml index 0a894fd9c..1acf9fe9b 100644 --- a/docker-compose.dockerhub.yml +++ b/docker-compose.dockerhub.yml @@ -56,22 +56,6 @@ services: depends_on: - db - web: - image: opengischprivate/qfieldcloud-caddy:latest - restart: unless-stopped - environment: - QFIELDCLOUD_HOST: ${QFIELDCLOUD_HOST} - CADDY_ACME_CA: ${CADDY_ACME_CA} - CADDY_IMPORT_GLOB: ${CADDY_IMPORT_GLOB} - ACME_AGREE: "true" - volumes: - - static_volume:/home/app/web/staticfiles/ - - media_volume:/home/app/web/mediafiles/ - - caddy_data:/data - ports: - - ${WEB_HTTP_PORT}:80 - - ${WEB_HTTPS_PORT}:443 - qgis: image: opengischprivate/qfieldcloud-qgis:latest tty: true @@ -90,4 +74,3 @@ volumes: postgres_data: static_volume: media_volume: - caddy_data: diff --git a/docker-compose.override.dev.yml b/docker-compose.override.dev.yml index 0e60c7995..0676ce03b 100644 --- a/docker-compose.override.dev.yml +++ b/docker-compose.override.dev.yml @@ -5,6 +5,9 @@ services: app: depends_on: - geodb + build: + args: + DEBUG_BUILD: ${DEBUG} geodb: image: postgis/postgis:12-3.0 diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 7ca4b9169..f3d073d0b 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -3,28 +3,37 @@ version: '3.7' services: app: + build: + args: + - DEBUG_BUILD=1 ports: - # allow direct access without caddy - - "8000:8000" + # allow direct access without nginx + - "5001:8000" volumes: # mount the source for live reload - ./docker-app/qfieldcloud:/usr/src/app/qfieldcloud environment: # run flush, migrate and collectstatic in entrypoint DEBUG: 1 - STORAGE_ENDPOINT_URL_EXTERNAL: ${STORAGE_ENDPOINT_URL_EXTERNAL} - STORAGE_BROWSER_PORT: ${STORAGE_BROWSER_PORT} command: python3 manage.py runserver 0.0.0.0:8000 depends_on: + - db - geodb - - s3 + - minio - smtp4dev worker_wrapper: + build: + args: + - DEBUG_BUILD=1 volumes: # mount the source for live reload - ./docker-app/qfieldcloud:/usr/src/app/qfieldcloud - ./docker-app/worker_wrapper:/usr/src/app/worker_wrapper + depends_on: + - db + - redis + - app smtp4dev: image: rnwood/smtp4dev:v3 @@ -42,6 +51,18 @@ services: # Specifies the server hostname. Used in auto-generated TLS certificate if enabled. - ServerOptions__HostName=smtp4dev + db: + image: postgis/postgis:13-3.1-alpine + restart: unless-stopped + environment: + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + volumes: + - postgres_data:/var/lib/postgresql/data/ + ports: + - ${HOST_POSTGRES_PORT}:5432 + geodb: image: postgis/postgis:12-3.0 restart: unless-stopped @@ -54,44 +75,39 @@ services: ports: - ${GEODB_PORT}:5432 - web: - environment: - CADDY_IMPORT_GLOB: Caddyfile.proxy-minio - volumes: - # Include config for reverse proxying caddy - - ./docker-caddy/Caddyfile.proxy-minio:/etc/caddy/Caddyfile.proxy-minio - - s3: + minio: image: minio/minio:RELEASE.2021-07-27T02-40-15Z restart: unless-stopped volumes: - - s3_data1:/data1 - - s3_data2:/data2 - - s3_data3:/data3 - - s3_data4:/data4 + - minio_data1:/data1 + - minio_data2:/data2 + - minio_data3:/data3 + - minio_data4:/data4 environment: MINIO_ROOT_USER: ${STORAGE_ACCESS_KEY_ID} MINIO_ROOT_PASSWORD: ${STORAGE_SECRET_ACCESS_KEY} - MINIO_BROWSER_REDIRECT_URL: http://${QFIELDCLOUD_HOST}:${STORAGE_BROWSER_PORT} + MINIO_BROWSER_REDIRECT_URL: http://${QFIELDCLOUD_HOST}:${MINIO_BROWSER_PORT} command: server /data{1...4} --console-address :9001 healthcheck: test: [ - "CMD", - "curl", - "-A", - "Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0", - "-f", - "${STORAGE_ENDPOINT_URL}/minio/index.html"] + "CMD", + "curl", + "-A", + "Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0", + "-f", + "${STORAGE_ENDPOINT_URL}/minio/index.html" + ] interval: 5s timeout: 20s retries: 5 ports: - - ${STORAGE_BROWSER_PORT}:9001 + - ${MINIO_BROWSER_PORT}:9001 + - ${MINIO_API_PORT}:9000 createbuckets: image: minio/mc depends_on: - s3: + minio: condition: service_healthy entrypoint: > /bin/sh -c " @@ -103,9 +119,10 @@ services: " volumes: + postgres_data: geodb_data: smtp4dev_data: - s3_data1: - s3_data2: - s3_data3: - s3_data4: + minio_data1: + minio_data2: + minio_data3: + minio_data4: diff --git a/docker-compose.override.prod.yml b/docker-compose.override.prod.yml deleted file mode 100644 index 1e5a90c5a..000000000 --- a/docker-compose.override.prod.yml +++ /dev/null @@ -1 +0,0 @@ -version: '3.7' diff --git a/docker-compose.yml b/docker-compose.yml index ec26a6c8c..172a5c6e7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,18 +8,6 @@ x-logging: max-file: "10" services: - db: - image: postgres:11 - restart: unless-stopped - environment: - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_USER: ${POSTGRES_USER} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - volumes: - - postgres_data:/var/lib/postgresql/data/ - ports: - - ${HOST_POSTGRES_PORT}:5432 - logging: *default-logging app: &default-django build: @@ -37,7 +25,7 @@ services: - static_volume:/usr/src/app/staticfiles - media_volume:/usr/src/app/mediafiles/ environment: - DJANGO_ALLOWED_HOSTS: ${QFIELDCLOUD_HOST} + DJANGO_ALLOWED_HOSTS: ${DJANGO_ALLOWED_HOSTS} DJANGO_SETTINGS_MODULE: ${DJANGO_SETTINGS_MODULE} SECRET_KEY: ${SECRET_KEY} DEBUG: ${DEBUG} @@ -46,12 +34,12 @@ services: SQL_PASSWORD: ${POSTGRES_PASSWORD} SQL_HOST: ${POSTGRES_HOST} SQL_PORT: ${POSTGRES_PORT} + SQL_SSLMODE: ${POSTGRES_SSLMODE} STORAGE_ACCESS_KEY_ID: ${STORAGE_ACCESS_KEY_ID} STORAGE_SECRET_ACCESS_KEY: ${STORAGE_SECRET_ACCESS_KEY} STORAGE_BUCKET_NAME: ${STORAGE_BUCKET_NAME} STORAGE_REGION_NAME: ${STORAGE_REGION_NAME} STORAGE_ENDPOINT_URL: ${STORAGE_ENDPOINT_URL} - STORAGE_ENDPOINT_URL_EXTERNAL: ${STORAGE_ENDPOINT_URL} QFIELDCLOUD_DEFAULT_NETWORK: ${QFIELDCLOUD_DEFAULT_NETWORK} SENTRY_DSN: ${SENTRY_DSN} SENTRY_SERVER_NAME: ${QFIELDCLOUD_HOST} @@ -78,9 +66,9 @@ services: QFIELDCLOUD_ADMIN_URI: ${QFIELDCLOUD_ADMIN_URI} WEB_HTTP_PORT: ${WEB_HTTP_PORT} WEB_HTTPS_PORT: ${WEB_HTTPS_PORT} + QFIELDCLOUD_WORKER_TIMEOUT_S: ${QFIELDCLOUD_WORKER_TIMEOUT_S} TRANSFORMATION_GRIDS_VOLUME_NAME: ${COMPOSE_PROJECT_NAME}_transformation_grids depends_on: - - db - redis logging: driver: "json-file" @@ -92,24 +80,46 @@ services: ofelia.job-exec.runcrons.schedule: 0 * * * * * ofelia.job-exec.runcrons.command: python manage.py runcrons - web: - build: - context: ./docker-caddy + nginx: + image: nginx:stable restart: unless-stopped - environment: - QFIELDCLOUD_HOST: ${QFIELDCLOUD_HOST} - CADDY_ACME_CA: ${CADDY_ACME_CA} - CADDY_IMPORT_GLOB: ${CADDY_IMPORT_GLOB} - ACME_AGREE: "true" volumes: - - static_volume:/home/app/web/staticfiles/ - - media_volume:/home/app/web/mediafiles/ - - caddy_data:/data + - static_volume:/var/www/html/staticfiles/ + - media_volume:/var/www/html/mediafiles/ + - ./conf/nginx/pages/:/var/www/html/pages/ + - ./conf/nginx/templates/:/etc/nginx/templates/ + - ./conf/nginx/certs/:/etc/nginx/certs/:ro + - ./conf/nginx/options-ssl-nginx.conf:/etc/nginx/options-ssl-nginx.conf + - ./conf/nginx/ssl-dhparams.pem:/etc/nginx/ssl-dhparams.pem + - certbot_www:/var/www/certbot ports: - ${WEB_HTTP_PORT}:80 - ${WEB_HTTPS_PORT}:443 + environment: + QFIELDCLOUD_HOST: ${QFIELDCLOUD_HOST} + WEB_HTTP_PORT: ${WEB_HTTP_PORT} + WEB_HTTPS_PORT: ${WEB_HTTPS_PORT} + LETSENCRYPT_EMAIL: ${LETSENCRYPT_EMAIL} + LETSENCRYPT_STAGING: ${LETSENCRYPT_STAGING} + LETSENCRYPT_RSA_KEY_SIZE: ${LETSENCRYPT_RSA_KEY_SIZE} logging: *default-logging + mkcert: + image: vishnunair/docker-mkcert + environment: + domain: ${QFIELDCLOUD_HOST} + volumes: + - ./conf/nginx/certs/:/root/.local/share/mkcert/ + command: /bin/sh -c 'mkcert -install && for i in $$(echo $$domain | sed "s/,/ /g"); do [ ! -f /root/.local/share/mkcert/$$i.pem ] && mkcert $$i; done && tail -f -n0 /etc/hosts' + + certbot: + image: certbot/certbot + restart: unless-stopped + volumes: + - ./conf/certbot/conf:/etc/letsencrypt + - certbot_www:/var/www/certbot + entrypoint: "/bin/sh -c 'trap exit TERM; while :; do certbot renew; sleep 12h & wait $${!}; done;'" + qgis: build: context: ./docker-qgis @@ -141,7 +151,6 @@ services: - ${TMP_DIRECTORY}:/tmp logging: *default-logging depends_on: - - db - redis - app @@ -162,8 +171,7 @@ services: - transformation_grids:/transformation_grids volumes: - postgres_data: static_volume: media_volume: - caddy_data: transformation_grids: + certbot_www: diff --git a/docker-qgis/Dockerfile b/docker-qgis/Dockerfile index 55f122c5e..dbeb24f4b 100644 --- a/docker-qgis/Dockerfile +++ b/docker-qgis/Dockerfile @@ -1,9 +1,10 @@ -FROM qgis/qgis:final-3_20_2 +FROM qgis/qgis:final-3_22_3 RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install -y \ python3-pip \ xvfb \ + iputils-ping \ && apt-get clean COPY ./requirements.txt /tmp/ @@ -27,4 +28,6 @@ ENV LIBC_FATAL_STDERR_=1 ENV LANG=C.UTF-8 ENV PYTHONPATH="/usr/src/app/lib:${PYTHONPATH}" +ENV XDG_RUNTIME_DIR="/run/user/0" + ENTRYPOINT ["/bin/sh", "-c", "/usr/bin/xvfb-run -a \"$@\"", ""] diff --git a/docker-qgis/apply_deltas.py b/docker-qgis/apply_deltas.py index 4742542f9..aecdd2ff3 100755 --- a/docker-qgis/apply_deltas.py +++ b/docker-qgis/apply_deltas.py @@ -193,15 +193,12 @@ def delta_apply( start_app() project = QgsProject.instance() - logging.info(project_filename) - logging.info(delta_filename) + logging.info(f'Loading project file "{project_filename}"...') project.read(str(project_filename)) - logging.info(project.mapLayers()) + logging.info(f'Loading delta file "{delta_filename}"...') delta_file = delta_file_file_loader({"delta_file": delta_filename}) # type: ignore - logging.info(delta_file) - if not delta_file: raise Exception("Missing delta file") diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 9076fbff5..24ed8d671 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -6,16 +6,25 @@ import os import tempfile from pathlib import Path, PurePath -from typing import Dict, List +from typing import Dict, Union import boto3 import qfieldcloud.qgis.apply_deltas import qfieldcloud.qgis.process_projectfile from libqfieldsync.offline_converter import ExportType, OfflineConverter from libqfieldsync.project import ProjectConfiguration -from qfieldcloud.qgis.utils import Step +from libqfieldsync.utils.file_utils import get_project_in_folder +from qfieldcloud.qgis.utils import ( + Step, + StepOutput, + WorkDirPath, + Workflow, + get_layers_data, + layers_data_to_string, + start_app, + stop_app, +) from qgis.core import ( - QgsApplication, QgsCoordinateTransform, QgsOfflineEditing, QgsProject, @@ -67,7 +76,19 @@ def _get_sha256sum(filepath): return hasher.hexdigest() -def _download_project_directory(project_id: str, tmpdir: Path = None) -> Path: +def _get_md5sum(filepath): + """Calculate sha256sum of a file""" + BLOCKSIZE = 65536 + hasher = hashlib.md5() + with filepath as f: + buf = f.read(BLOCKSIZE) + while len(buf) > 0: + hasher.update(buf) + buf = f.read(BLOCKSIZE) + return hasher.hexdigest() + + +def _download_project_directory(project_id: str, download_dir: Path = None) -> Path: """Download the files in the project "working" directory from the S3 Storage into a temporary directory. Returns the directory path""" @@ -76,12 +97,12 @@ def _download_project_directory(project_id: str, tmpdir: Path = None) -> Path: # Prefix of the working directory on the Storages working_prefix = "/".join(["projects", project_id, "files"]) - if not tmpdir: + if not download_dir: # Create a temporary directory - tmpdir = Path(tempfile.mkdtemp()) + download_dir = Path(tempfile.mkdtemp()) # Create a local working directory - working_dir = tmpdir.joinpath("files") + working_dir = download_dir.joinpath("files") working_dir.mkdir(parents=True) # Download the files @@ -90,29 +111,39 @@ def _download_project_directory(project_id: str, tmpdir: Path = None) -> Path: # Get the path of the file relative to the project directory relative_filename = key_filename.relative_to(*key_filename.parts[:2]) - absolute_filename = tmpdir.joinpath(relative_filename) + absolute_filename = download_dir.joinpath(relative_filename) absolute_filename.parent.mkdir(parents=True, exist_ok=True) + # NOTE the E_TAG already is surrounded by double quotes + logging.info( + f'Downloading file "{obj.key}", size: {obj.size} bytes, md5sum: {obj.e_tag} ' + ) + bucket.download_file(obj.key, str(absolute_filename)) - return tmpdir + return download_dir def _upload_project_directory( project_id: str, local_dir: Path, should_delete: bool = False ) -> None: """Upload the files in the local_dir to the storage""" + stop_app() bucket = _get_s3_bucket() - # either "files" or "export" + # either "files" or "package" subdir = local_dir.parts[-1] prefix = "/".join(["projects", project_id, subdir]) if should_delete: - # Remove existing export directory on the storage + logging.info("Deleting older file versions...") + + # Remove existing package directory on the storage bucket.objects.filter(Prefix=prefix).delete() - # Loop recursively in the local export directory + uploaded_files_count = 0 + + # Loop recursively in the local package directory for elem in Path(local_dir).rglob("*.*"): # Don't upload .qgs~ and .qgz~ files if str(elem).endswith("~"): @@ -125,8 +156,12 @@ def _upload_project_directory( with open(elem, "rb") as e: sha256sum = _get_sha256sum(e) + with open(elem, "rb") as e: + md5sum = _get_md5sum(e) + # Create the key - key = "/".join([prefix, str(elem.relative_to(*elem.parts[:4]))]) + filename = str(elem.relative_to(*elem.parts[:4])) + key = "/".join([prefix, filename]) metadata = {"sha256sum": sha256sum} if should_delete: @@ -138,52 +173,33 @@ def _upload_project_directory( ) # Check if the file is different on the storage + # TODO switch to etag/md5sum comparison if metadata["sha256sum"] != storage_metadata["sha256sum"]: + uploaded_files_count += 1 + logging.info( + f'Uploading file "{key}", size: {elem.stat().st_size} bytes, md5sum: {md5sum}, sha256sum: "{sha256sum}" ' + ) bucket.upload_file(str(elem), key, ExtraArgs={"Metadata": metadata}) + if uploaded_files_count == 0: + logging.info("No files need to be uploaded.") + else: + logging.info(f"{uploaded_files_count} file(s) uploaded.") + -def _call_qfieldsync_exporter(project_filepath: Path, export_dir: Path) -> Dict: - """Call the function of QFieldSync to export a project for QField""" +def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> str: + """Call the function of QFieldSync to package a project for QField""" - argvb = list(map(os.fsencode, [""])) - qgis_app = QgsApplication(argvb, True) - qgis_app.initQgis() + start_app() project = QgsProject.instance() - if not project_filepath.exists(): - raise FileNotFoundError(project_filepath) + if not project_filename.exists(): + raise FileNotFoundError(project_filename) - if not project.read(str(project_filepath)): - raise Exception(f"Unable to open file with QGIS: {project_filepath}") + if not project.read(str(project_filename)): + raise Exception(f"Unable to open file with QGIS: {project_filename}") layers = project.mapLayers() - # Check if the layers are valid (i.e. if the datasources are available) - layer_checks = {} - for layer in layers.values(): - is_valid = True - status = "ok" - if layer: - if layer.dataProvider(): - if not layer.dataProvider().isValid(): - is_valid = False - status = "invalid_dataprovider" - # there might be another reason why the layer is not valid, other than the data provider - elif not layer.isValid(): - is_valid = False - status = "invalid_layer" - else: - is_valid = False - status = "missing_dataprovider" - else: - is_valid = False - status = "missing_layer" - - layer_checks[layer.id()] = { - "name": layer.name(), - "valid": is_valid, - "status": status, - } - project_config = ProjectConfiguration(project) vl_extent_wkt = QgsRectangle() vl_extent_crs = project.crs().authid() @@ -241,7 +257,7 @@ def _call_qfieldsync_exporter(project_filepath: Path, export_dir: Path) -> Dict: offline_editing = QgsOfflineEditing() offline_converter = OfflineConverter( project, - str(export_dir), + str(package_dir), vl_extent_wkt, vl_extent_crs, offline_editing, @@ -254,170 +270,201 @@ def _call_qfieldsync_exporter(project_filepath: Path, export_dir: Path) -> Dict: offline_converter.project_configuration.create_base_map = False offline_converter.convert() - qgis_app.exitQgis() - - return layer_checks - - -def cmd_export_project(args): - tmpdir = Path(tempfile.mkdtemp()) - exportdir = tmpdir.joinpath("export") - exportdir.mkdir() - - steps: List[Step] = [ - Step( - name="Download Project Directory", - arguments={ - "tmpdir": tmpdir, - "project_id": args.projectid, - }, - arg_names=["project_id", "tmpdir"], - method=_download_project_directory, - return_names=["tmp_project_dir"], - public_returns=["tmp_project_dir"], - ), - Step( - name="Export Project", - arguments={ - "project_filename": tmpdir.joinpath("files", args.project_file), - "exportdir": exportdir, - }, - arg_names=["project_filename", "exportdir"], - return_names=["layer_checks"], - output_names=["layer_checks"], - method=_call_qfieldsync_exporter, - ), - Step( - name="Upload Exported Project", - arguments={ - "project_id": args.projectid, - "exportdir": exportdir, - "should_delete": True, - }, - arg_names=["project_id", "exportdir", "should_delete"], - method=_upload_project_directory, - ), - ] - - qfieldcloud.qgis.utils.run_task( - steps, + packaged_project_filename = get_project_in_folder(str(package_dir)) + if Path(packaged_project_filename).stat().st_size == 0: + raise Exception("The packaged QGIS project file is empty.") + + return packaged_project_filename + + +def _extract_layer_data(project_filename: Union[str, Path]) -> Dict: + start_app() + + project_filename = str(project_filename) + project = QgsProject.instance() + project.read(project_filename) + layers_by_id = get_layers_data(project) + + logging.info( + f"QGIS project layer checks\n{layers_data_to_string(layers_by_id)}", + ) + + return layers_by_id + + +def cmd_package_project(args): + workflow = Workflow( + id="package_project", + name="Package Project", + version="2.0", + description="Packages a QGIS project to be used on QField. Converts layers for offline editing if configured.", + steps=[ + Step( + id="download_project_directory", + name="Download Project Directory", + arguments={ + "project_id": args.projectid, + "download_dir": WorkDirPath(mkdir=True), + }, + method=_download_project_directory, + return_names=["tmp_project_dir"], + ), + Step( + id="qgis_layers_data", + name="QGIS Layers Data", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + }, + method=_extract_layer_data, + return_names=["layers_by_id"], + outputs=["layers_by_id"], + ), + Step( + id="package_project", + name="Package Project", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + "package_dir": WorkDirPath("export", mkdir=True), + }, + method=_call_qfieldsync_packager, + return_names=["qfield_project_filename"], + ), + Step( + id="qfield_layer_data", + name="Packaged Layers Data", + arguments={ + "project_filename": StepOutput( + "package_project", "qfield_project_filename" + ), + }, + method=_extract_layer_data, + return_names=["layers_by_id"], + outputs=["layers_by_id"], + ), + Step( + id="upload_packaged_project", + name="Upload Packaged Project", + arguments={ + "project_id": args.projectid, + "local_dir": WorkDirPath("export", mkdir=True), + "should_delete": True, + }, + method=_upload_project_directory, + ), + ], + ) + + qfieldcloud.qgis.utils.run_workflow( + workflow, Path("/io/feedback.json"), ) def _apply_delta(args): - tmpdir = Path(tempfile.mkdtemp()) - files_dir = tmpdir.joinpath("files") - steps: List[Step] = [ - Step( - name="Download Project Directory", - arguments={ - "project_id": args.projectid, - "tmpdir": tmpdir, - }, - arg_names=["project_id", "tmpdir"], - method=_download_project_directory, - return_names=["tmp_project_dir"], - public_returns=["tmp_project_dir"], - ), - Step( - name="Apply Deltas", - arguments={ - "project_filename": tmpdir.joinpath("files", args.project_file), - "delta_filename": "/io/deltafile.json", - "inverse": args.inverse, - "overwrite_conflicts": args.overwrite_conflicts, - }, - arg_names=[ - "project_filename", - "delta_filename", - "inverse", - "overwrite_conflicts", - ], - method=qfieldcloud.qgis.apply_deltas.delta_apply, - return_names=["delta_feedback"], - output_names=["delta_feedback"], - ), - Step( - name="Upload Exported Project", - arguments={ - "project_id": args.projectid, - "files_dir": files_dir, - "should_delete": False, - }, - arg_names=["project_id", "files_dir", "should_delete"], - method=_upload_project_directory, - ), - ] - - qfieldcloud.qgis.utils.run_task( - steps, + workflow = Workflow( + id="apply_changes", + name="Apply Changes", + version="2.0", + steps=[ + Step( + id="download_project_directory", + name="Download Project Directory", + arguments={ + "project_id": args.projectid, + "download_dir": WorkDirPath(mkdir=True), + }, + method=_download_project_directory, + return_names=["tmp_project_dir"], + ), + Step( + id="apply_deltas", + name="Apply Deltas", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + "delta_filename": "/io/deltafile.json", + "inverse": args.inverse, + "overwrite_conflicts": args.overwrite_conflicts, + }, + method=qfieldcloud.qgis.apply_deltas.delta_apply, + return_names=["delta_feedback"], + outputs=["delta_feedback"], + ), + Step( + id="upload_exported_project", + name="Upload Project", + arguments={ + "project_id": args.projectid, + "local_dir": WorkDirPath("files"), + "should_delete": False, + }, + method=_upload_project_directory, + ), + ], + ) + + qfieldcloud.qgis.utils.run_workflow( + workflow, Path("/io/feedback.json"), ) def cmd_process_projectfile(args): - project_id = args.projectid - project_file = args.project_file - - tmpdir = Path(tempfile.mkdtemp()) - project_filename = tmpdir.joinpath("files", project_file) - steps: List[Step] = [ - Step( - name="Download Project Directory", - arguments={ - "project_id": project_id, - "tmpdir": tmpdir, - }, - arg_names=["project_id", "tmpdir"], - method=_download_project_directory, - return_names=["tmp_project_dir"], - public_returns=["tmp_project_dir"], - ), - Step( - name="Project Validity Check", - arguments={ - "project_filename": project_filename, - }, - arg_names=["project_filename"], - method=qfieldcloud.qgis.process_projectfile.check_valid_project_file, - ), - Step( - name="Opening Check", - arguments={ - "project_filename": project_filename, - }, - arg_names=["project_filename"], - method=qfieldcloud.qgis.process_projectfile.load_project_file, - return_names=["project"], - public_returns=["project"], - ), - Step( - name="Project Details", - arg_names=["project"], - method=qfieldcloud.qgis.process_projectfile.extract_project_details, - return_names=["project_details"], - output_names=["project_details"], - ), - Step( - name="Layer Validity Check", - arg_names=["project"], - method=qfieldcloud.qgis.process_projectfile.check_layer_validity, - return_names=["layers_summary"], - output_names=["layers_summary"], - ), - Step( - name="Generate Thumbnail Image", - arguments={ - "thumbnail_filename": Path("/io/thumbnail.png"), - }, - arg_names=["project", "thumbnail_filename"], - method=qfieldcloud.qgis.process_projectfile.generate_thumbnail, - ), - ] - - qfieldcloud.qgis.utils.run_task( - steps, + workflow = Workflow( + id="process_projectfile", + name="Process Projectfile", + version="2.0", + steps=[ + Step( + id="download_project_directory", + name="Download Project Directory", + arguments={ + "project_id": args.projectid, + "download_dir": WorkDirPath(mkdir=True), + }, + method=_download_project_directory, + return_names=["tmp_project_dir"], + ), + Step( + id="project_validity_check", + name="Project Validity Check", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + }, + method=qfieldcloud.qgis.process_projectfile.check_valid_project_file, + ), + Step( + id="opening_check", + name="Opening Check", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + }, + method=qfieldcloud.qgis.process_projectfile.load_project_file, + return_names=["project"], + ), + Step( + id="project_details", + name="Project Details", + arguments={ + "project": StepOutput("opening_check", "project"), + }, + method=qfieldcloud.qgis.process_projectfile.extract_project_details, + return_names=["project_details"], + outputs=["project_details"], + ), + Step( + id="generate_thumbnail_image", + name="Generate Thumbnail Image", + arguments={ + "project": StepOutput("opening_check", "project"), + "thumbnail_filename": Path("/io/thumbnail.png"), + }, + method=qfieldcloud.qgis.process_projectfile.generate_thumbnail, + ), + ], + ) + + qfieldcloud.qgis.utils.run_workflow( + workflow, Path("/io/feedback.json"), ) @@ -435,10 +482,10 @@ def cmd_process_projectfile(args): subparsers = parser.add_subparsers(dest="cmd") - parser_export = subparsers.add_parser("export", help="Export a project") - parser_export.add_argument("projectid", type=str, help="projectid") - parser_export.add_argument("project_file", type=str, help="QGIS project file path") - parser_export.set_defaults(func=cmd_export_project) + parser_package = subparsers.add_parser("package", help="Package a project") + parser_package.add_argument("projectid", type=str, help="projectid") + parser_package.add_argument("project_file", type=str, help="QGIS project file path") + parser_package.set_defaults(func=cmd_package_project) parser_delta = subparsers.add_parser("delta_apply", help="Apply deltafile") parser_delta.add_argument("projectid", type=str, help="projectid") diff --git a/docker-qgis/libqfieldsync b/docker-qgis/libqfieldsync index e45bf4a94..7389735b8 160000 --- a/docker-qgis/libqfieldsync +++ b/docker-qgis/libqfieldsync @@ -1 +1 @@ -Subproject commit e45bf4a94d70fd03cebc5f2d10faaac45d35969b +Subproject commit 7389735b8f270633c0cc5537eeae41a5950e650b diff --git a/docker-qgis/process_projectfile.py b/docker-qgis/process_projectfile.py index e69d67a40..70289cdb1 100644 --- a/docker-qgis/process_projectfile.py +++ b/docker-qgis/process_projectfile.py @@ -1,14 +1,13 @@ import logging import sys from pathlib import Path -from typing import Dict, List +from typing import Dict from xml.etree import ElementTree from qfieldcloud.qgis.utils import ( BaseException, - get_layer_filename, - has_ping, - is_localhost, + get_layers_data, + layers_data_to_string, start_app, ) from qgis.core import QgsMapRendererParallelJob, QgsMapSettings, QgsProject @@ -82,9 +81,11 @@ def extract_project_details(project: QgsProject) -> Dict[str, str]: """Extract project details""" logging.info("Extract project details...") - map_settings = QgsMapSettings() details = {} + logging.info("Reading QGIS project file...") + map_settings = QgsMapSettings() + def on_project_read(doc): r, _success = project.readNumEntry("Gui", "/CanvasColorRedPart", 255) g, _success = project.readNumEntry("Gui", "/CanvasColorGreenPart", 255) @@ -116,74 +117,16 @@ def on_project_read(doc): details["crs"] = project.crs().authid() details["project_name"] = project.title() - return details + logging.info("Extracting layer and datasource details...") + details["layers_by_id"] = get_layers_data(project) + details["ordered_layer_ids"] = list(details["layers_by_id"].keys()) -def check_layer_validity(project: QgsProject) -> List: - logging.info("Check layer and datasource validity...") - - has_invalid_layers = False - layers_summary = [] - - for layer in project.mapLayers().values(): - error = layer.error() - layer_data = { - "id": layer.name(), - "name": layer.name(), - "crs": layer.crs().authid() if layer.crs() else None, - "is_valid": layer.isValid(), - "datasource": layer.dataProvider().uri().uri() - if layer.dataProvider() - else None, - "error_summary": error.summary() if error.messageList() else "", - "error_message": layer.error().message(), - "filename": get_layer_filename(layer), - "provider_error_summary": None, - "provider_error_message": None, - } - layers_summary.append(layer_data) - - if layer_data["is_valid"]: - continue - - has_invalid_layers = True - data_provider = layer.dataProvider() - - if data_provider: - data_provider_error = data_provider.error() - - layer_data["provider_error_summary"] = ( - data_provider_error.summary() - if data_provider_error.messageList() - else "" - ) - layer_data["provider_error_message"] = data_provider_error.message() - - if not layer_data["provider_error_summary"]: - service = data_provider.uri().service() - if service: - layer_data[ - "provider_error_summary" - ] = f'Unable to connect to service "{service}"' - - host = data_provider.uri().host() - port = ( - int(data_provider.uri().port()) - if data_provider.uri().port() - else None - ) - if host and (is_localhost(host, port) or has_ping(host)): - layer_data[ - "provider_error_summary" - ] = f'Unable to connect to host "{host}"' - - else: - layer_data["provider_error_summary"] = "No data provider available" - - if has_invalid_layers: - raise InvalidLayersException(layers_summary=layers_summary) - - return layers_summary + logging.info( + f'QGIS project layer checks\n{layers_data_to_string(details["layers_by_id"])}', + ) + + return details def generate_thumbnail(project: QgsProject, thumbnail_filename: Path) -> None: diff --git a/docker-qgis/requirements.txt b/docker-qgis/requirements.txt index 5d28f8f0c..dd819318e 100644 --- a/docker-qgis/requirements.txt +++ b/docker-qgis/requirements.txt @@ -1,4 +1,5 @@ jsonschema>=3.2.0,<3.3 typing-extensions>=3.7.4.3,<3.7.5 boto3>=1.16.28,<1.17 +tabulate==v0.8.9 sentry-sdk diff --git a/docker-qgis/tests/test_qgis.py b/docker-qgis/tests/test_qgis.py index 4f19b1f4f..eb9d1e7b3 100644 --- a/docker-qgis/tests/test_qgis.py +++ b/docker-qgis/tests/test_qgis.py @@ -6,7 +6,7 @@ class QfcTestCase(unittest.TestCase): - def test_export(self): + def test_package(self): project_directory = self.data_directory_path("simple_project") output_directory = tempfile.mkdtemp() @@ -21,7 +21,7 @@ def test_export(self): "qgis", "bash", "-c", - "./entrypoint.sh export /io/project/project.qgs /io/output", + "./entrypoint.sh package /io/project/project.qgs /io/output", ] subprocess.check_call( diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index 1ef2749a6..57c54da03 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -1,4 +1,5 @@ import atexit +import inspect import json import logging import os @@ -13,6 +14,7 @@ from pathlib import Path from typing import IO, Any, Callable, Dict, List, Optional, Union +from libqfieldsync.layer import LayerSource from qgis.core import ( Qgis, QgsApplication, @@ -22,6 +24,7 @@ QgsProviderRegistry, ) from qgis.PyQt import QtCore, QtGui +from tabulate import tabulate qgs_stderr_logger = logging.getLogger("QGIS_STDERR") qgs_stderr_logger.setLevel(logging.DEBUG) @@ -109,7 +112,9 @@ def start_app(): global QGISAPP if QGISAPP is None: - qgs_stderr_logger.info("Starting QGIS app...") + qgs_stderr_logger.info( + f"Starting QGIS app version {Qgis.versionInt()} ({Qgis.devVersion()})..." + ) argvb = [] # Note: QGIS_PREFIX_PATH is evaluated in QgsApplication - @@ -118,14 +123,13 @@ def start_app(): QGISAPP = QgsApplication(argvb, gui_flag) QtCore.qInstallMessageHandler(_qt_message_handler) - os.environ["QGIS_CUSTOM_CONFIG_PATH"] = tempfile.mkdtemp( - "", "QGIS-PythonTestConfigPath" - ) + os.environ["QGIS_CUSTOM_CONFIG_PATH"] = tempfile.mkdtemp("", "QGIS_CONFIG") QGISAPP.initQgis() QtCore.qInstallMessageHandler(_qt_message_handler) QgsApplication.messageLog().messageReceived.connect(_write_log_message) + # make sure the app is closed, otherwise the container exists with non-zero @atexit.register def exitQgis(): stop_app() @@ -139,33 +143,122 @@ def stop_app(): """ global QGISAPP - QGISAPP.exitQgis() - del QGISAPP + # note that if this function is called from @atexit.register, the globals are cleaned up + if "QGISAPP" not in globals(): + return + + if QGISAPP is not None: + qgs_stderr_logger.info("Stopping QGIS app...") + QGISAPP.exitQgis() + del QGISAPP + + +class WorkflowValidationException(Exception): + ... + + +class Workflow: + def __init__( + self, + id: str, + version: str, + name: str, + steps: List["Step"], + description: str = "", + ): + self.id = id + self.version = version + self.name = name + self.description = description + self.steps = steps + + self.validate() + + def validate(self): + if not self.steps: + raise WorkflowValidationException( + f'The workflow "{self.id}" should contain at least one step.' + ) + + all_step_returns = {} + for step in self.steps: + param_names = [] + sig = inspect.signature(step.method) + for param in sig.parameters.values(): + if ( + param.kind != inspect.Parameter.KEYWORD_ONLY + and param.kind != inspect.Parameter.POSITIONAL_OR_KEYWORD + ): + raise WorkflowValidationException( + f'The workflow "{self.id}" method "{step.method.__name__}" has a non keyword parameter "{param.name}".' + ) + + if param.name not in step.arguments: + raise WorkflowValidationException( + f'The workflow "{self.id}" method "{step.method.__name__}" has an argument "{param.name}" that is not available in the step definition "arguments", expected one of {list(step.arguments.keys())}.' + ) + + param_names.append(param.name) + + for name, value in step.arguments.items(): + if isinstance(value, StepOutput): + if value.step_id not in all_step_returns: + raise WorkflowValidationException( + f'The workflow "{self.id}" has step "{step.id}" that requires a non-existing step return value "{value.step_id}.{value.return_name}" for argument "{name}". Previous step with that id does not exist.' + ) + + if value.return_name not in all_step_returns[value.step_id]: + raise WorkflowValidationException( + f'The workflow "{self.id}" has step "{step.id}" that requires a non-existing step return value "{value.step_id}.{value.return_name}" for argument "{name}". Previous step with that id found, but returns no value with such name.' + ) + + if name not in param_names: + raise WorkflowValidationException( + f'The workflow "{self.id}" method "{step.method.__name__}" receives a parameter "{name}" that is not available in the method definition, expected one of {param_names}.' + ) + + all_step_returns[step.id] = all_step_returns.get(step.id, step.return_names) class Step: def __init__( self, + id: str, name: str, method: Callable, arguments: Dict[str, Any] = {}, - arg_names: List[str] = [], return_names: List[str] = [], - output_names: List[str] = [], - public_returns: List[str] = [], + outputs: List[str] = [], ): + self.id = id self.name = name self.method = method self.arguments = arguments - self.arg_names = arg_names # names of method return values self.return_names = return_names - # names of method return values that will be part of the outputs - self.output_names = output_names - # names of method return values that will be available in arg_names for the next steps - self.public_returns = public_returns + # names of method return values that will be part of the outputs. They are assumed to be safe to be shown to the user. + self.outputs = outputs self.stage = 0 - self.outputs = {} + + +class StepOutput: + def __init__(self, step_id: str, return_name: str): + self.step_id = step_id + self.return_name = return_name + + +class WorkDirPath: + def __init__(self, *parts: str, mkdir: bool = False) -> None: + self.parts = parts + self.mkdir = mkdir + + def eval(self, root: Path) -> Path: + path = root.joinpath(*self.parts) + + if self.mkdir: + path.mkdir(parents=True, exist_ok=True) + + return path class BaseException(Exception): @@ -198,17 +291,20 @@ def is_localhost(hostname: str, port: int = None) -> bool: """returns True if the hostname points to the localhost, otherwise False.""" if port is None: port = 22 # no port specified, lets just use the ssh port - hostname = socket.getfqdn(hostname) - if hostname in ("localhost", "0.0.0.0"): - return True - localhost = socket.gethostname() - localaddrs = socket.getaddrinfo(localhost, port) - targetaddrs = socket.getaddrinfo(hostname, port) - for (_family, _socktype, _proto, _canonname, sockaddr) in localaddrs: - for (_rfamily, _rsocktype, _rproto, _rcanonname, rsockaddr) in targetaddrs: - if rsockaddr[0] == sockaddr[0]: - return True - return False + try: + hostname = socket.getfqdn(hostname) + if hostname in ("localhost", "0.0.0.0"): + return True + localhost = socket.gethostname() + localaddrs = socket.getaddrinfo(localhost, port) + targetaddrs = socket.getaddrinfo(hostname, port) + for (_family, _socktype, _proto, _canonname, sockaddr) in localaddrs: + for (_rfamily, _rsocktype, _rproto, _rcanonname, rsockaddr) in targetaddrs: + if rsockaddr[0] == sockaddr[0]: + return True + return False + except Exception: + return False def has_ping(hostname: str) -> bool: @@ -275,8 +371,17 @@ def on_project_read(doc): return details -def run_task( - steps: List[Step], +def json_default(obj): + obj_str = type(obj).__qualname__ + try: + obj_str += f" {str(obj)}" + except Exception: + obj_str += " " + return f"" + + +def run_workflow( + workflow: Workflow, feedback_filename: Optional[Union[IO, Path]], ) -> Dict: """Executes the steps required to run a task and return structured feedback from the execution @@ -288,55 +393,181 @@ def run_task( Some return values can used as arguments for next steps, as defined in `public_returns`. Args: - steps (List[Step]): ordered steps to be executed + workflow (Workflow): workflow to be executed feedback_filename (Optional[Union[IO, Path]]): write feedback to an IO device, to Path filename, or don't write it """ - feedback = {} + feedback: Dict[str, Any] = { + "feedback_version": "2.0", + "workflow_version": workflow.version, + "workflow_id": workflow.id, + "workflow_name": workflow.name, + } # it may be modified after the successful completion of each step. - returned_arguments = {} + step_returns = {} try: - for step in steps: + root_workdir = Path(tempfile.mkdtemp()) + for step in workflow.steps: with logger_context(step): arguments = { - **returned_arguments, **step.arguments, } - args = [arguments[arg_name] for arg_name in step.arg_names] - return_values = step.method(*args) + for name, value in arguments.items(): + if isinstance(value, StepOutput): + arguments[name] = step_returns[value.step_id][value.return_name] + elif isinstance(value, WorkDirPath): + arguments[name] = value.eval(root_workdir) + + return_values = step.method(**arguments) return_values = ( return_values if len(step.return_names) > 1 else (return_values,) ) - return_map = {} + step_returns[step.id] = {} for name, value in zip(step.return_names, return_values): - return_map[name] = value - - for output_name in step.output_names: - step.outputs[output_name] = return_map[output_name] - - for return_name in step.public_returns: - returned_arguments[return_name] = return_map[return_name] + step_returns[step.id][name] = value except Exception as err: feedback["error"] = str(err) (_type, _value, tb) = sys.exc_info() feedback["error_stack"] = traceback.format_tb(tb) finally: - feedback["steps"] = [ - { + feedback["steps"] = [] + feedback["outputs"] = {} + + for step in workflow.steps: + step_feedback = { + "id": step.id, "name": step.name, "stage": step.stage, - "outputs": step.outputs, + "returns": {}, } - for step in steps - ] + + if step.stage == 2: + step_feedback["returns"] = step_returns[step.id] + feedback["outputs"][step.id] = {} + for output_name in step.outputs: + feedback["outputs"][step.id][output_name] = step_returns[step.id][ + output_name + ] + + feedback["steps"].append(step_feedback) if feedback_filename in [sys.stderr, sys.stdout]: print("Feedback:") - print(json.dump(feedback, feedback_filename, indent=2, sort_keys=True)) + print( + json.dump( + feedback, + feedback_filename, + indent=2, + sort_keys=True, + default=json_default, + ) + ) elif isinstance(feedback_filename, Path): with open(feedback_filename, "w") as f: - json.dump(feedback, f, indent=2, sort_keys=True) + json.dump(feedback, f, indent=2, sort_keys=True, default=json_default) return feedback + + +def get_layers_data(project: QgsProject) -> Dict[str, Dict]: + layers_by_id = {} + + for layer in project.mapLayers().values(): + error = layer.error() + layer_id = layer.id() + layer_source = LayerSource(layer) + layers_by_id[layer_id] = { + "id": layer_id, + "name": layer.name(), + "crs": layer.crs().authid() if layer.crs() else None, + "is_valid": layer.isValid(), + "datasource": layer.dataProvider().uri().uri() + if layer.dataProvider() + else None, + "type": layer.type(), + "type_name": layer.type().name, + "error_code": "no_error", + "error_summary": error.summary() if error.messageList() else "", + "error_message": layer.error().message(), + "filename": layer_source.filename, + "provider_error_summary": None, + "provider_error_message": None, + } + + if layers_by_id[layer_id]["is_valid"]: + continue + + data_provider = layer.dataProvider() + + if data_provider: + data_provider_error = data_provider.error() + + if data_provider.isValid(): + # there might be another reason why the layer is not valid, other than the data provider + layers_by_id[layer_id]["error_code"] = "invalid_layer" + else: + layers_by_id[layer_id]["error_code"] = "invalid_dataprovider" + + layers_by_id[layer_id]["provider_error_summary"] = ( + data_provider_error.summary() + if data_provider_error.messageList() + else "" + ) + layers_by_id[layer_id][ + "provider_error_message" + ] = data_provider_error.message() + + if not layers_by_id[layer_id]["provider_error_summary"]: + service = data_provider.uri().service() + if service: + layers_by_id[layer_id][ + "provider_error_summary" + ] = f'Unable to connect to service "{service}"' + + host = data_provider.uri().host() + port = ( + int(data_provider.uri().port()) + if data_provider.uri().port() + else None + ) + if host and (is_localhost(host, port) or has_ping(host)): + layers_by_id[layer_id][ + "provider_error_summary" + ] = f'Unable to connect to host "{host}"' + + else: + layers_by_id[layer_id]["error_code"] = "missing_dataprovider" + layers_by_id[layer_id][ + "provider_error_summary" + ] = "No data provider available" + + return layers_by_id + + +def layers_data_to_string(layers_by_id): + # Print layer check results + table = [ + [ + d["name"], + f'...{d["id"][-6:]}', + d["is_valid"], + d["error_code"], + d["error_summary"], + d["provider_error_summary"], + ] + for d in layers_by_id.values() + ] + + return tabulate( + table, + headers=[ + "Layer Name", + "Layer Id", + "Is Valid", + "Status", + "Error Summary", + "Provider Summary", + ], + ) diff --git a/docker-redis/redis.conf b/docker-redis/redis.conf index 7a5b9e44c..6669be89e 100644 --- a/docker-redis/redis.conf +++ b/docker-redis/redis.conf @@ -307,6 +307,7 @@ always-show-logo yes save 900 1 save 300 10 save 60 10000 +save "" # By default Redis will stop accepting writes if RDB snapshots are enabled # (at least one save point) and the latest background save failed. @@ -321,7 +322,7 @@ save 60 10000 # and persistence, you may want to disable this feature so that Redis will # continue to work as usual even if there are problems with disk, # permissions, and so forth. -stop-writes-on-bgsave-error yes +stop-writes-on-bgsave-error no # Compress string objects using LZF when dump .rdb databases? # For default that's set to 'yes' as it's almost always a win. diff --git a/.github/check_envvars/check_envvars.py b/scripts/check_envvars.py similarity index 89% rename from .github/check_envvars/check_envvars.py rename to scripts/check_envvars.py index 01a78b4d4..a5fcf9fe9 100755 --- a/.github/check_envvars/check_envvars.py +++ b/scripts/check_envvars.py @@ -1,4 +1,4 @@ -#!/bin/env python +#!/bin/env python3 import argparse import re from pathlib import Path @@ -18,7 +18,13 @@ def get_env_varnames_from_envfile(filename: str) -> Set[str]: if len(line.strip()) == 0: continue - result.add(line.strip().split("=")[0]) + variable_name = line.strip().split("=")[0] + + # not settings + if variable_name in ["COMPOSE_FILE", "COMPOSE_PATH_SEPARATOR"]: + continue + + result.add(variable_name) return result @@ -113,10 +119,17 @@ def get_env_varnames_from_k8s_environments(search_path: str) -> Dict[str, List[s type=str, help="Directory containing k8s configuration and secret files.", ) + parser.add_argument( + "--ignored-varnames", + type=str, + nargs="*", + help="Ignored varnames.", + ) args = parser.parse_args() problems = [] envfile_vars = get_env_varnames_from_envfile(args.envfile) + ignored_varnames = args.ignored_varnames or [] if args.docker_compose_dir: dockercompose_vars = get_env_varnames_from_docker_compose_files( @@ -124,6 +137,9 @@ def get_env_varnames_from_k8s_environments(search_path: str) -> Dict[str, List[s ) for varname in envfile_vars.difference(set(dockercompose_vars.keys())): + if varname in ignored_varnames: + continue + if varname in envfile_vars: problems.append( f'Envvar "{varname}" is defined in the .env file, but not found in any docker-compose file.' @@ -141,6 +157,9 @@ def get_env_varnames_from_k8s_environments(search_path: str) -> Dict[str, List[s ] for varname in envfile_vars.difference(set(k8s_vars.keys())): + if varname in ignored_varnames: + continue + if varname in envfile_vars: problems.append( f'Envvar "{varname}" is defined in the .env file, but not found in the any k8s configuration(s) and secret(s).' @@ -152,8 +171,12 @@ def get_env_varnames_from_k8s_environments(search_path: str) -> Dict[str, List[s ) for varname, occurrences in k8s_vars.items(): + if varname in ignored_varnames: + continue + for environment in occurrences: if environment not in k8s_environments: + problems.append( f'Envvar "{varname}" should be in all k8s environments, but missing not found neither in configuration or secrets of "{environment}".' ) diff --git a/scripts/check_envvars.sh b/scripts/check_envvars.sh new file mode 100755 index 000000000..92858c0e4 --- /dev/null +++ b/scripts/check_envvars.sh @@ -0,0 +1,4 @@ +#!/bin/bash -e + +pipenv run pip install pyyaml +pipenv run python scripts/check_envvars.py .env.example --docker-compose-dir . diff --git a/scripts/init_letsencrypt.sh b/scripts/init_letsencrypt.sh new file mode 100755 index 000000000..693f96172 --- /dev/null +++ b/scripts/init_letsencrypt.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +set -e + +if ! [ -x "$(command -v docker-compose)" ]; then + echo 'Error: docker-compose is not installed.' >&2 + exit 1 +fi + +eval $(egrep "^[^#;]" .env | xargs -d'\n' -n1 | sed -E 's/(\w+)=(.*)/export \1='"'"'\2'"'"'/g') + +CONFIG_PATH="./conf/nginx" + +if [ ! -e "$CONFIG_PATH/options-ssl-nginx.conf" ] || [ ! -e "$CONFIG_PATH/ssl-dhparams.pem" ]; then + echo "### Downloading recommended TLS parameters ..." + curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot-nginx/certbot_nginx/_internal/tls_configs/options-ssl-nginx.conf > "$CONFIG_PATH/options-ssl-nginx.conf" + curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot/certbot/ssl-dhparams.pem > "$CONFIG_PATH/ssl-dhparams.pem" + echo +fi + +echo "### Requesting Let's Encrypt certificate for $QFIELDCLOUD_HOST ..." +domain_args="-d ${QFIELDCLOUD_HOST}" + +# Enable staging mode if needed +if [ $LETSENCRYPT_STAGING != "0" ]; then staging_arg="--staging"; fi + +docker-compose run --rm --entrypoint "\ + certbot certonly --webroot -w /var/www/certbot \ + $staging_arg \ + $domain_args \ + --email $LETSENCRYPT_EMAIL \ + --rsa-key-size $LETSENCRYPT_RSA_KEY_SIZE \ + --agree-tos \ + --force-renewal" certbot +echo + +echo "### Copy the certificate and key to their final destination ..." +cp conf/certbot/conf/live/${QFIELDCLOUD_HOST}/fullchain.pem conf/nginx/certs/${QFIELDCLOUD_HOST}.pem +cp conf/certbot/conf/live/${QFIELDCLOUD_HOST}/privkey.pem conf/nginx/certs/${QFIELDCLOUD_HOST}-key.pem +echo + +echo "### Reloading nginx ..." +docker-compose exec nginx nginx -s reload