From 7b0bb1f31f8c6508b6716c99b34507df2039e50b Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 5 Nov 2021 02:29:34 +0200 Subject: [PATCH 001/185] Add check_envvars script as a pre-commit hook --- .github/workflows/test.yml | 6 +----- .pre-commit-config.yaml | 11 +++++++++++ {.github/check_envvars => scripts}/check_envvars.py | 2 +- scripts/check_envvars.sh | 3 +++ 4 files changed, 16 insertions(+), 6 deletions(-) rename {.github/check_envvars => scripts}/check_envvars.py (99%) create mode 100755 scripts/check_envvars.sh diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0d3513206..1a60a9a11 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,12 +20,8 @@ jobs: ln -s docker-compose.override.local.yml docker-compose.override.yml - name: Check code formatting run: | - pipenv install pre_commit + pipenv install pre_commit pyyaml pipenv run python -m pre_commit run --all-files - - name: Check environment variables - run: | - pipenv install pyyaml - pipenv run python .github/check_envvars/check_envvars.py .env.example --docker-compose-dir . - name: Export the env variables file run: | cp .env.example .env diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 867574959..0494ecc37 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,3 +39,14 @@ repos: rev: "3.9.0" hooks: - id: flake8 + + # Make sure the envvars from the .env.example are available in the docker-compose configurations + - repo: local + hooks: + - id: check_envvars + name: Check envvars + description: Make sure the envvars from the .env.example are available in the docker-compose configurations + entry: scripts/check_envvars.sh + language: script + pass_filenames: false + types: [text] diff --git a/.github/check_envvars/check_envvars.py b/scripts/check_envvars.py similarity index 99% rename from .github/check_envvars/check_envvars.py rename to scripts/check_envvars.py index 01a78b4d4..1623644f4 100755 --- a/.github/check_envvars/check_envvars.py +++ b/scripts/check_envvars.py @@ -1,4 +1,4 @@ -#!/bin/env python +#!/bin/env python3 import argparse import re from pathlib import Path diff --git a/scripts/check_envvars.sh b/scripts/check_envvars.sh new file mode 100755 index 000000000..37598dc75 --- /dev/null +++ b/scripts/check_envvars.sh @@ -0,0 +1,3 @@ +#!/bin/bash -e + +python3 scripts/check_envvars.py .env.example --docker-compose-dir . From 6bc17ab81bed1b91cfd89191889f7bb85c87af68 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 16 Nov 2021 11:34:14 +0200 Subject: [PATCH 002/185] Allow packaging projects that have no extent set --- docker-qgis/entrypoint.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 794a5cad4..41805aa8e 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -221,6 +221,7 @@ def _call_qfieldsync_exporter(project_filepath: Path, export_dir: Path) -> Dict: vl_extent = qfieldcloud.qgis.utils.extract_project_details(project)[ "extent" ] + vl_extent = QgsRectangle.fromWkt(vl_extent) except Exception as err: logger.error( "Failed to get the project extent from the current map canvas.", From 1c2b76ebcf0d59201a69cd3c64720174ce17a916 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 19 Nov 2021 15:42:30 +0200 Subject: [PATCH 003/185] Only suggest teams that are part of the project-owning organization --- docker-app/qfieldcloud/core/querysets_utils.py | 9 +++++++++ docker-app/qfieldcloud/core/tests/test_queryset.py | 12 +++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/docker-app/qfieldcloud/core/querysets_utils.py b/docker-app/qfieldcloud/core/querysets_utils.py index 59d0cd11e..a86e32941 100644 --- a/docker-app/qfieldcloud/core/querysets_utils.py +++ b/docker-app/qfieldcloud/core/querysets_utils.py @@ -89,6 +89,15 @@ def get_users( if exclude_teams: users = users.exclude(user_type=User.TYPE_TEAM) + else: + if project: + users = users.filter( + ~Q(user_type=User.TYPE_TEAM) + | ( + Q(user_type=User.TYPE_TEAM) + & Q(pk__in=Team.objects.filter(team_organization=project.owner)) + ) + ) # one day conditions can be more than just pk check, please keep it for now conditions = [] diff --git a/docker-app/qfieldcloud/core/tests/test_queryset.py b/docker-app/qfieldcloud/core/tests/test_queryset.py index a19cbd291..7ba8e1db2 100644 --- a/docker-app/qfieldcloud/core/tests/test_queryset.py +++ b/docker-app/qfieldcloud/core/tests/test_queryset.py @@ -156,20 +156,26 @@ def test_get_users(self): # should get all the users, that are not members or owner of a project queryset = querysets_utils.get_users("", project=self.project1) - self.assertEqual(len(queryset), 4) + self.assertEqual(len(queryset), 3) self.assertTrue(self.user2 in queryset) self.assertTrue(self.user3 in queryset) self.assertTrue(self.organization1.user_ptr in queryset) + + # should get all the users, that are not members or owner of a project + queryset = querysets_utils.get_users("", project=self.project5) + self.assertEqual(len(queryset), 4) + self.assertTrue(self.user1 in queryset) + self.assertTrue(self.user2 in queryset) + self.assertTrue(self.user3 in queryset) self.assertTrue(self.team1.user_ptr in queryset) # should get all the users, that are not members or owner of a project and are not an organization queryset = querysets_utils.get_users( "", project=self.project1, exclude_organizations=True ) - self.assertEqual(len(queryset), 3) + self.assertEqual(len(queryset), 2) self.assertTrue(self.user2 in queryset) self.assertTrue(self.user3 in queryset) - self.assertTrue(self.team1.user_ptr in queryset) def test_projects_roles_and_role_origins(self): """ From f6cb1ac79c24f95d6439036f9fdd0ed6ab8dd095 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 18 Nov 2021 13:47:42 +0200 Subject: [PATCH 004/185] Add last_apply_attempt_at and last_apply_attempt_by to deltas --- .../migrations/0049_auto_20211117_1843.py | 45 +++++++++++++++++++ docker-app/qfieldcloud/core/models.py | 6 +++ docker-app/worker_wrapper/wrapper.py | 2 + 3 files changed, 53 insertions(+) create mode 100644 docker-app/qfieldcloud/core/migrations/0049_auto_20211117_1843.py diff --git a/docker-app/qfieldcloud/core/migrations/0049_auto_20211117_1843.py b/docker-app/qfieldcloud/core/migrations/0049_auto_20211117_1843.py new file mode 100644 index 000000000..c842fd287 --- /dev/null +++ b/docker-app/qfieldcloud/core/migrations/0049_auto_20211117_1843.py @@ -0,0 +1,45 @@ +# Generated by Django 3.2.9 on 2021-11-17 17:43 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +def fill_in_last_apply_attempt_at(apps, schema_editor): + # Old values in output field of delta table where string instead of json + Delta = apps.get_model("core", "Delta") + ApplyJobDelta = apps.get_model("core", "ApplyJobDelta") + + for delta in Delta.objects.all(): + jobs_qs = ApplyJobDelta.objects.filter(delta=delta) + + if jobs_qs.count(): + job_delta = jobs_qs.latest("apply_job__started_at") + delta.last_apply_attempt_at = job_delta.apply_job.started_at + delta.last_apply_attempt_by = job_delta.apply_job.created_by + delta.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0048_useraccount_notifs_frequency"), + ] + + operations = [ + migrations.AddField( + model_name="delta", + name="last_apply_attempt_at", + field=models.DateTimeField(null=True), + ), + migrations.AddField( + model_name="delta", + name="last_apply_attempt_by", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.RunPython(fill_in_last_apply_attempt_at, migrations.RunPython.noop), + ] diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 289356e31..cd23297e6 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -1011,6 +1011,12 @@ class Status(models.TextChoices): ) last_feedback = JSONField(null=True) last_modified_pk = models.TextField(null=True) + last_apply_attempt_at = models.DateTimeField(null=True) + last_apply_attempt_by = models.ForeignKey( + User, + on_delete=models.CASCADE, + null=True, + ) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) created_by = models.ForeignKey( diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index fece62850..45d958e16 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -331,6 +331,8 @@ def after_docker_run(self) -> None: last_status=status, last_feedback=feedback, last_modified_pk=modified_pk, + last_apply_attempt_at=self.job.started_at, + last_apply_attempt_by=self.job.created_by, ) ApplyJobDelta.objects.filter( From 7b0927a76aaa0d53a9a434af67a0f511523e7f17 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 27 Oct 2021 12:18:06 +0300 Subject: [PATCH 005/185] Added step ids --- docker-qgis/entrypoint.py | 12 ++++++++++++ docker-qgis/utils.py | 2 ++ 2 files changed, 14 insertions(+) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 41805aa8e..3c0f3f0ca 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -262,6 +262,7 @@ def cmd_export_project(args): steps: List[Step] = [ Step( + id="download_project_directory", name="Download Project Directory", arguments={ "tmpdir": tmpdir, @@ -273,6 +274,7 @@ def cmd_export_project(args): public_returns=["tmp_project_dir"], ), Step( + id="export_project", name="Export Project", arguments={ "project_filename": tmpdir.joinpath("files", args.project_file), @@ -284,6 +286,7 @@ def cmd_export_project(args): method=_call_qfieldsync_exporter, ), Step( + id="upload_exported_project", name="Upload Exported Project", arguments={ "project_id": args.projectid, @@ -306,6 +309,7 @@ def _apply_delta(args): files_dir = tmpdir.joinpath("files") steps: List[Step] = [ Step( + id="download_project_directory", name="Download Project Directory", arguments={ "project_id": args.projectid, @@ -317,6 +321,7 @@ def _apply_delta(args): public_returns=["tmp_project_dir"], ), Step( + id="apply_deltas", name="Apply Deltas", arguments={ "project_filename": tmpdir.joinpath("files", args.project_file), @@ -335,6 +340,7 @@ def _apply_delta(args): output_names=["delta_feedback"], ), Step( + id="upload_exported_project", name="Upload Exported Project", arguments={ "project_id": args.projectid, @@ -360,6 +366,7 @@ def cmd_process_projectfile(args): project_filename = tmpdir.joinpath("files", project_file) steps: List[Step] = [ Step( + id="download_project_directory", name="Download Project Directory", arguments={ "project_id": project_id, @@ -371,6 +378,7 @@ def cmd_process_projectfile(args): public_returns=["tmp_project_dir"], ), Step( + id="project_validity_check", name="Project Validity Check", arguments={ "project_filename": project_filename, @@ -379,6 +387,7 @@ def cmd_process_projectfile(args): method=qfieldcloud.qgis.process_projectfile.check_valid_project_file, ), Step( + id="opening_check", name="Opening Check", arguments={ "project_filename": project_filename, @@ -389,6 +398,7 @@ def cmd_process_projectfile(args): public_returns=["project"], ), Step( + id="project_details", name="Project Details", arg_names=["project"], method=qfieldcloud.qgis.process_projectfile.extract_project_details, @@ -396,6 +406,7 @@ def cmd_process_projectfile(args): output_names=["project_details"], ), Step( + id="layer_validity_check", name="Layer Validity Check", arg_names=["project"], method=qfieldcloud.qgis.process_projectfile.check_layer_validity, @@ -403,6 +414,7 @@ def cmd_process_projectfile(args): output_names=["layers_summary"], ), Step( + id="generate_thumbnail_image", name="Generate Thumbnail Image", arguments={ "thumbnail_filename": Path("/io/thumbnail.png"), diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index 1ef2749a6..460735f09 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -146,6 +146,7 @@ def stop_app(): class Step: def __init__( self, + id: str, name: str, method: Callable, arguments: Dict[str, Any] = {}, @@ -154,6 +155,7 @@ def __init__( output_names: List[str] = [], public_returns: List[str] = [], ): + self.id = id self.name = name self.method = method self.arguments = arguments From 3815917dd55e1294e66c884b6959509af62a5dab Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 27 Oct 2021 12:20:27 +0300 Subject: [PATCH 006/185] Merge layer_validity and extract_project_details steps --- docker-qgis/entrypoint.py | 8 ------- docker-qgis/process_projectfile.py | 38 ++++++++++++++++-------------- 2 files changed, 20 insertions(+), 26 deletions(-) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 3c0f3f0ca..d2974d520 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -405,14 +405,6 @@ def cmd_process_projectfile(args): return_names=["project_details"], output_names=["project_details"], ), - Step( - id="layer_validity_check", - name="Layer Validity Check", - arg_names=["project"], - method=qfieldcloud.qgis.process_projectfile.check_layer_validity, - return_names=["layers_summary"], - output_names=["layers_summary"], - ), Step( id="generate_thumbnail_image", name="Generate Thumbnail Image", diff --git a/docker-qgis/process_projectfile.py b/docker-qgis/process_projectfile.py index ab242f579..e1e5e6dc4 100644 --- a/docker-qgis/process_projectfile.py +++ b/docker-qgis/process_projectfile.py @@ -1,16 +1,11 @@ import logging import sys from pathlib import Path -from typing import Dict, List +from typing import Dict from xml.etree import ElementTree -from qfieldcloud.qgis.utils import ( - BaseException, - get_layer_filename, - has_ping, - is_localhost, - start_app, -) +from libqfieldsync.layer import LayerSource +from qfieldcloud.qgis.utils import BaseException, has_ping, is_localhost, start_app from qgis.core import QgsMapRendererParallelJob, QgsMapSettings, QgsProject from qgis.PyQt.QtCore import QEventLoop, QSize from qgis.PyQt.QtGui import QColor @@ -82,9 +77,11 @@ def extract_project_details(project: QgsProject) -> Dict[str, str]: """Extract project details""" logging.info("Extract project details...") - map_settings = QgsMapSettings() details = {} + logging.info("Reading QGIS project file...") + map_settings = QgsMapSettings() + def on_project_read(doc): r, _success = project.readNumEntry("Gui", "/CanvasColorRedPart", 255) g, _success = project.readNumEntry("Gui", "/CanvasColorGreenPart", 255) @@ -116,17 +113,15 @@ def on_project_read(doc): details["crs"] = project.crs().authid() details["project_name"] = project.title() - return details - - -def check_layer_validity(project: QgsProject) -> List: - logging.info("Check layer and datasource validity...") + logging.info("Extracting layer and datasource details...") has_invalid_layers = False + has_online_layers = False layers_summary = [] for layer in project.mapLayers().values(): error = layer.error() + layer_source = LayerSource(layer) layer_data = { "id": layer.name(), "name": layer.name(), @@ -136,12 +131,15 @@ def check_layer_validity(project: QgsProject) -> List: else None, "error_summary": error.summary() if error.messageList() else "", "error_message": layer.error().message(), - "filename": get_layer_filename(layer), + "filename": layer_source.filename, "provider_error_summary": None, "provider_error_message": None, } layers_summary.append(layer_data) + if not layer_data["filename"]: + has_online_layers = True + if layer_data["is_valid"]: continue @@ -176,13 +174,17 @@ def check_layer_validity(project: QgsProject) -> List: "provider_error_summary" ] = f'Unable to connect to host "{host}"' + logging.info( + f'Layer "{layer.name()}" seems to be invalid: {layer_data["provider_error_summary"]}' + ) else: layer_data["provider_error_summary"] = "No data provider available" - if has_invalid_layers: - raise InvalidLayersException(layers_summary=layers_summary) + details["layers"] = layers_summary + details["has_online_layers"] = has_online_layers + details["has_invalid_layers"] = has_invalid_layers - return layers_summary + return details def generate_thumbnail(project: QgsProject, thumbnail_filename: Path) -> None: From b359d0b3cc3179bbbeeb204fa3b0e2c77809c35f Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 27 Oct 2021 13:07:46 +0300 Subject: [PATCH 007/185] Added a new packaging endpoint --- .github/workflows/test.yml | 3 +- docker-app/qfieldcloud/core/admin.py | 6 +- docker-app/qfieldcloud/core/exceptions.py | 8 +- .../core/management/commands/dequeue.py | 4 +- .../migrations/0050_auto_20211118_1150.py | 50 +++ docker-app/qfieldcloud/core/models.py | 53 ++- .../qfieldcloud/core/permissions_utils.py | 2 +- docker-app/qfieldcloud/core/serializers.py | 131 ++++++- .../qfieldcloud/core/tests/test_packages.py | 339 ++++++++++++++++++ docker-app/qfieldcloud/core/urls.py | 36 +- docker-app/qfieldcloud/core/utils.py | 73 ++-- docker-app/qfieldcloud/core/utils2/jobs.py | 42 ++- .../qfieldcloud/core/views/files_views.py | 32 +- .../qfieldcloud/core/views/jobs_views.py | 73 ++++ .../qfieldcloud/core/views/package_views.py | 108 ++++++ .../core/views/qfield_files_views.py | 60 ++-- docker-app/requirements.txt | 1 + docker-app/worker_wrapper/wrapper.py | 29 +- docker-qgis/entrypoint.py | 38 +- docker-qgis/process_projectfile.py | 43 +-- docker-qgis/tests/test_qgis.py | 4 +- scripts/check_envvars.sh | 2 +- 22 files changed, 1009 insertions(+), 128 deletions(-) create mode 100644 docker-app/qfieldcloud/core/migrations/0050_auto_20211118_1150.py create mode 100644 docker-app/qfieldcloud/core/tests/test_packages.py create mode 100644 docker-app/qfieldcloud/core/views/jobs_views.py create mode 100644 docker-app/qfieldcloud/core/views/package_views.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1a60a9a11..07b255fa6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,7 +20,8 @@ jobs: ln -s docker-compose.override.local.yml docker-compose.override.yml - name: Check code formatting run: | - pipenv install pre_commit pyyaml + pipenv install pre_commit + pipenv install pyyaml pipenv run python -m pre_commit run --all-files - name: Export the env variables file run: | diff --git a/docker-app/qfieldcloud/core/admin.py b/docker-app/qfieldcloud/core/admin.py index 261be8841..dce260500 100644 --- a/docker-app/qfieldcloud/core/admin.py +++ b/docker-app/qfieldcloud/core/admin.py @@ -16,10 +16,10 @@ ApplyJob, ApplyJobDelta, Delta, - ExportJob, Geodb, Organization, OrganizationMember, + PackageJob, ProcessProjectfileJob, Project, ProjectCollaborator, @@ -487,7 +487,7 @@ def response_change(self, request, delta): return super().response_change(request, delta) -class ExportJobAdmin(admin.ModelAdmin): +class PackageJobAdmin(admin.ModelAdmin): list_display = ( "id", "project__owner", @@ -738,7 +738,7 @@ def save_model(self, request, obj, form, change): admin.site.register(Project, ProjectAdmin) admin.site.register(Delta, DeltaAdmin) admin.site.register(ApplyJob, ApplyJobAdmin) -admin.site.register(ExportJob, ExportJobAdmin) +admin.site.register(PackageJob, PackageJobAdmin) admin.site.register(ProcessProjectfileJob, ProcessProjectfileJobAdmin) admin.site.register(Geodb, GeodbAdmin) diff --git a/docker-app/qfieldcloud/core/exceptions.py b/docker-app/qfieldcloud/core/exceptions.py index 4e6b5e1ec..23dd0945f 100644 --- a/docker-app/qfieldcloud/core/exceptions.py +++ b/docker-app/qfieldcloud/core/exceptions.py @@ -134,11 +134,11 @@ class InvalidJobError(QFieldCloudException): status_code = status.HTTP_400_BAD_REQUEST -class QGISExportError(QFieldCloudException): - """Raised when the QGIS export of a project fails""" +class QGISPackageError(QFieldCloudException): + """Raised when the QGIS package of a project fails""" - code = "qgis_export_error" - message = "QGIS export failed" + code = "qgis_package_error" + message = "QGIS package failed" status_code = status.HTTP_500_INTERNAL_SERVER_ERROR if "Unable to open file with QGIS" in message: diff --git a/docker-app/qfieldcloud/core/management/commands/dequeue.py b/docker-app/qfieldcloud/core/management/commands/dequeue.py index ef8204519..21b7fbb2e 100644 --- a/docker-app/qfieldcloud/core/management/commands/dequeue.py +++ b/docker-app/qfieldcloud/core/management/commands/dequeue.py @@ -9,7 +9,7 @@ from qfieldcloud.core.utils2.db import use_test_db_if_exists from worker_wrapper.wrapper import ( DeltaApplyJobRun, - ExportJobRun, + PackageJobRun, ProcessProjectfileJobRun, ) @@ -105,7 +105,7 @@ def run(self, job_id, *args, **options): def _run(self, job: Job): job_run_classes = { - Job.Type.EXPORT: ExportJobRun, + Job.Type.PACKAGE: PackageJobRun, Job.Type.DELTA_APPLY: DeltaApplyJobRun, Job.Type.PROCESS_PROJECTFILE: ProcessProjectfileJobRun, } diff --git a/docker-app/qfieldcloud/core/migrations/0050_auto_20211118_1150.py b/docker-app/qfieldcloud/core/migrations/0050_auto_20211118_1150.py new file mode 100644 index 000000000..6576284d7 --- /dev/null +++ b/docker-app/qfieldcloud/core/migrations/0050_auto_20211118_1150.py @@ -0,0 +1,50 @@ +# Generated by Django 3.2.8 on 2021-10-27 09:50 + +from django.db import migrations, models + + +def rename_export_to_package(apps, schema_editor): + Job = apps.get_model("core", "Job") + Job.objects.filter(type="export").update(type="package") + + +def rename_package_to_export(apps, schema_editor): + Job = apps.get_model("core", "Job") + Job.objects.filter(type="package").update(type="export") + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0049_auto_20211117_1843"), + ] + + operations = [ + migrations.AddField( + model_name="project", + name="data_last_packaged_at", + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name="project", + name="data_last_updated_at", + field=models.DateTimeField(blank=True, null=True), + ), + migrations.RenameModel( + old_name="ExportJob", + new_name="PackageJob", + ), + migrations.AlterField( + model_name="job", + name="type", + field=models.CharField( + choices=[ + ("package", "Package"), + ("delta_apply", "Delta Apply"), + ("process_projectfile", "Process QGIS Project File"), + ], + max_length=32, + ), + ), + migrations.RunPython(rename_export_to_package, rename_package_to_export), + ] diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index cd23297e6..9830c4b96 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -20,6 +20,7 @@ from django.dispatch import receiver from django.urls import reverse_lazy from django.utils.translation import gettext as _ +from model_utils.managers import InheritanceManager from qfieldcloud.core import geodb_utils, utils, validators from qfieldcloud.core.utils import get_s3_object_url from timezone_field import TimeZoneField @@ -866,6 +867,11 @@ class Meta: ) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) + + # NOTE we can track only the file based layers, WFS, WMS, PostGIS etc are impossible to track + data_last_updated_at = models.DateTimeField(blank=True, null=True) + data_last_packaged_at = models.DateTimeField(blank=True, null=True) + overwrite_conflicts = models.BooleanField( default=True, help_text=_( @@ -915,6 +921,40 @@ def files_count(self): def users(self): return User.objects.for_project(self) + @property + def has_online_vector_data(self) -> bool: + if not self.project_details: + return False + + layers_by_id = self.project_details.get("layers_by_id", {}) + has_online_vector_layers = False + + for layer_data in layers_by_id.values(): + if layer_data.get("type_name") == "VectorLayer" and not layer_data.get( + "filename", "" + ): + has_online_vector_layers = True + break + + return has_online_vector_layers + + @property + def can_repackage(self) -> bool: + return True + + @property + def needs_repackaging(self) -> bool: + if ( + not self.has_online_vector_data + and self.data_last_updated_at + and self.data_last_packaged_at + ): + # if all vector layers are file based and have been packaged after the last update, it is safe to say there are no modifications + return self.data_last_packaged_at > self.data_last_updated_at + else: + # if the project has online vector layers (PostGIS/WFS/etc) we cannot be sure if there are modification or not, so better say there are + return True + @receiver(pre_delete, sender=Project) def delete_project(sender: Type[Project], instance: Project, **kwargs: Any) -> None: @@ -1057,8 +1097,11 @@ def method(self): class Job(models.Model): + + objects = InheritanceManager() + class Type(models.TextChoices): - EXPORT = "export", _("Export") + PACKAGE = "package", _("Package") DELTA_APPLY = "delta_apply", _("Delta Apply") PROCESS_PROJECTFILE = "process_projectfile", _("Process QGIS Project File") @@ -1093,14 +1136,14 @@ def short_id(self): return str(self.id)[0:8] -class ExportJob(Job): +class PackageJob(Job): def save(self, *args, **kwargs): - self.type = self.Type.EXPORT + self.type = self.Type.PACKAGE return super().save(*args, **kwargs) class Meta: - verbose_name = "Job: export" - verbose_name_plural = "Jobs: export" + verbose_name = "Job: package" + verbose_name_plural = "Jobs: package" class ProcessProjectfileJob(Job): diff --git a/docker-app/qfieldcloud/core/permissions_utils.py b/docker-app/qfieldcloud/core/permissions_utils.py index a7640d103..5100de072 100644 --- a/docker-app/qfieldcloud/core/permissions_utils.py +++ b/docker-app/qfieldcloud/core/permissions_utils.py @@ -362,7 +362,7 @@ def can_delete_collaborators(user: QfcUser, project: Project) -> bool: ) -def can_read_exportations(user: QfcUser, project: Project) -> bool: +def can_read_packages(user: QfcUser, project: Project) -> bool: return user_has_project_roles( user, project, diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index cddd7c0fe..08c4ddff4 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -1,11 +1,14 @@ from django.contrib.auth import get_user_model from qfieldcloud.authentication.models import AuthToken +from qfieldcloud.core import exceptions from qfieldcloud.core.models import ( + ApplyJob, Delta, - ExportJob, Job, Organization, OrganizationMember, + PackageJob, + ProcessProjectfileJob, Project, ProjectCollaborator, Team, @@ -62,6 +65,10 @@ class Meta: "is_public", "created_at", "updated_at", + "data_last_packaged_at", + "data_last_updated_at", + "can_repackage", + "needs_repackaging", "user_role", "user_role_origin", ) @@ -305,5 +312,125 @@ def get_status(self, obj): return "STATUS_ERROR" class Meta: - model = ExportJob + model = PackageJob fields = ("status", "layers", "output") + + +class JobMixin: + project_id = serializers.PrimaryKeyRelatedField(queryset=Project.objects.all()) + + def to_internal_value(self, data): + internal_data = super().to_internal_value(data) + internal_data["created_by"] = self.context["request"].user + internal_data["project"] = Project.objects.get(pk=data.get("project_id")) + + return internal_data + + def check_create_new_job(self): + ModelClass: Job = self.Meta.model + last_active_job = ( + ModelClass.objects.filter( + status__in=[Job.Status.PENDING, Job.Status.QUEUED, Job.Status.STARTED] + ) + .only("id") + .order_by("-started_at", "-created_at") + .last() + ) + + # check if there are other jobs already active + if last_active_job: + raise exceptions.APIError("Job of this type is already running.") + + class Meta: + model = PackageJob + fields = ( + "id", + "created_at", + "created_by", + "finished_at", + "project_id", + "started_at", + "status", + "type", + "updated_at", + "feedback", + "output", + ) + + read_only_fields = ( + "id", + "created_at", + "created_by", + "finished_at", + "started_at", + "status", + "updated_at", + "feedback", + "output", + ) + + +class PackageJobSerializer(JobMixin, serializers.ModelSerializer): + def check_create_new_job(self): + super().check_create_new_job() + internal_value = self.to_internal_value(self.initial_data) + + if not internal_value["project"].project_filename: + raise exceptions.NoQGISProjectError() + + class Meta(JobMixin.Meta): + model = PackageJob + + +class ApplyJobSerializer(JobMixin, serializers.ModelSerializer): + class Meta(JobMixin.Meta): + model = ApplyJob + + +class ProcessProjectfileJobSerializer(JobMixin, serializers.ModelSerializer): + class Meta(JobMixin.Meta): + model = ProcessProjectfileJob + + +class JobSerializer(serializers.ModelSerializer): + def check_create_new_job(self): + return True + + def get_fields(self, *args, **kwargs): + fields = super().get_fields(*args, **kwargs) + request = self.context.get("request") + + if request and "job_id" not in request.parser_context.get("kwargs", {}): + fields.pop("output", None) + fields.pop("feedback", None) + fields.pop("layers", None) + + return fields + + class Meta: + model = Job + fields = ( + "id", + "created_at", + "created_by", + "finished_at", + "project_id", + "started_at", + "status", + "type", + "updated_at", + "feedback", + "output", + ) + read_only_fields = ( + "id", + "created_at", + "created_by", + "finished_at", + "started_at", + "status", + "updated_at", + "feedback", + "output", + ) + order_by = "-created_at" diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py new file mode 100644 index 000000000..7dec59df9 --- /dev/null +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -0,0 +1,339 @@ +import logging +import os +import tempfile +import time +from typing import List, Tuple + +import psycopg2 +import requests +from django.http.response import HttpResponseRedirect +from django.utils import timezone +from qfieldcloud.authentication.models import AuthToken +from qfieldcloud.core.geodb_utils import delete_db_and_role +from qfieldcloud.core.models import Geodb, Job, Project, User +from rest_framework import status +from rest_framework.test import APITransactionTestCase + +from .utils import testdata_path + +logging.disable(logging.CRITICAL) + + +class QfcTestCase(APITransactionTestCase): + def setUp(self): + # Create a user + self.user1 = User.objects.create_user(username="user1", password="abc123") + + self.user2 = User.objects.create_user(username="user2", password="abc123") + + self.token1 = AuthToken.objects.get_or_create(user=self.user1)[0] + + # Create a project + self.project1 = Project.objects.create( + name="project1", is_public=False, owner=self.user1 + ) + + try: + delete_db_and_role("test", self.user1.username) + except Exception: + pass + + self.geodb = Geodb.objects.create( + user=self.user1, + dbname="test", + hostname="geodb", + port=5432, + ) + + self.conn = psycopg2.connect( + dbname="test", + user=os.environ.get("GEODB_USER"), + password=os.environ.get("GEODB_PASSWORD"), + host="geodb", + port=5432, + ) + + def tearDown(self): + self.conn.close() + + # Remove all projects avoiding bulk delete in order to use + # the overrided delete() function in the model + for p in Project.objects.all(): + p.delete() + + User.objects.all().delete() + # Remove credentials + self.client.credentials() + + def upload_files( + self, + token: str, + project: Project, + files: List[Tuple[str, str]], + ): + self.client.credentials(HTTP_AUTHORIZATION=f"Token {token}") + for local_filename, remote_filename in files: + if not local_filename: + continue + + file = testdata_path(local_filename) + response = self.client.post( + f"/api/v1/files/{project.id}/{remote_filename}/", + {"file": open(file, "rb")}, + format="multipart", + ) + self.assertTrue(status.is_success(response.status_code)) + + def upload_files_and_check_package( + self, + token: str, + project: Project, + files: List[Tuple[str, str]], + expected_files: List[str], + job_create_error: Tuple[int, str] = None, + tempdir: str = None, + invalid_layers: List[str] = [], + ): + self.client.credentials(HTTP_AUTHORIZATION=f"Token {token}") + + self.upload_files(token, project, files) + + before_started_ts = timezone.now() + + response = self.client.post( + "/api/v1/jobs/", + { + "project_id": project.id, + "type": Job.Type.PACKAGE, + }, + ) + + if job_create_error: + self.assertEqual(response.status_code, job_create_error[0]) + self.assertEqual(response.json()["code"], job_create_error[1]) + return + else: + self.assertTrue(status.is_success(response.status_code)) + + job_id = response.json().get("id") + + # Wait for the worker to finish + for _ in range(20): + time.sleep(3) + response = self.client.get(f"/api/v1/jobs/{job_id}/") + payload = response.json() + + if payload["status"] == Job.Status.FINISHED: + response = self.client.get(f"/api/v1/packages/{project.id}/latest/") + package_payload = response.json() + + self.assertLess( + package_payload["packaged_at"], timezone.now().isoformat() + ) + self.assertGreater( + package_payload["packaged_at"], + before_started_ts.isoformat(), + ) + + sorted_downloaded_files = [ + f["name"] + for f in sorted(package_payload["files"], key=lambda k: k["name"]) + ] + sorted_expected_files = sorted(expected_files) + + self.assertListEqual(sorted_downloaded_files, sorted_expected_files) + + if tempdir: + for filename in expected_files: + response = self.client.get( + f"/api/v1/qfield-files/{self.project1.id}/project_qfield.qgs/" + ) + local_file = os.path.join(tempdir, filename) + + self.assertIsInstance(response, HttpResponseRedirect) + + # We cannot use the self.client HTTP client, since it does not support + # requests outside the current Django App + # Using the rest_api_framework.RequestsClient is not much better, so better + # use the `requests` module + with requests.get(response.url, stream=True) as r: + with open(local_file, "wb") as f: + for chunk in r.iter_content(): + f.write(chunk) + + for layer_id in package_payload["layers"]: + layer_data = package_payload["layers"][layer_id] + + if layer_id in invalid_layers: + self.assertFalse(layer_data["valid"], layer_id) + else: + self.assertTrue(layer_data["valid"], layer_id) + + return + elif payload["status"] == Job.Status.FAILED: + self.fail("Worker failed with error") + + self.fail("Worker didn't finish") + + def test_list_files_for_qfield(self): + cur = self.conn.cursor() + cur.execute("CREATE TABLE point (id integer, geometry geometry(point, 2056))") + self.conn.commit() + cur.execute( + "INSERT INTO point(id, geometry) VALUES(1, ST_GeomFromText('POINT(2725505 1121435)', 2056))" + ) + self.conn.commit() + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/project2.qgs", "project.qgs"), + ("delta/points.geojson", "points.geojson"), + ], + expected_files=["data.gpkg", "project_qfield.qgs"], + ) + + def test_list_files_missing_project_filename(self): + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/points.geojson", "points.geojson"), + ], + job_create_error=(400, "no_qgis_project"), + expected_files=[], + ) + + def test_project_never_packaged(self): + self.upload_files( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/project2.qgs", "project.qgs"), + ], + ) + + response = self.client.get(f"/api/v1/packages/{self.project1.id}/latest/") + + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json()["code"], "invalid_job") + + def test_download_file_for_qfield(self): + tempdir = tempfile.mkdtemp() + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ("delta/testdata.gpkg", "testdata.gpkg"), + ("delta/points.geojson", "points.geojson"), + ("delta/polygons.geojson", "polygons.geojson"), + ("delta/project.qgs", "project.qgs"), + ], + expected_files=[ + "data.gpkg", + "project_qfield.qgs", + ], + tempdir=tempdir, + ) + + local_file = os.path.join(tempdir, "project_qfield.qgs") + with open(local_file, "r") as f: + self.assertEqual( + f.readline().strip(), + "", + ) + + def test_list_files_for_qfield_broken_file(self): + self.upload_files( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/broken.qgs", "broken.qgs"), + ], + ) + + response = self.client.post( + "/api/v1/jobs/", + { + "project_id": self.project1.id, + "type": Job.Type.PACKAGE, + }, + ) + + self.assertTrue(status.is_success(response.status_code)) + job_id = response.json().get("id") + + # Wait for the worker to finish + for _ in range(10): + time.sleep(3) + response = self.client.get( + f"/api/v1/jobs/{job_id}/", + ) + if response.json()["status"] == "failed": + return + + self.fail("Worker didn't finish") + + def test_downloaded_file_has_canvas_name(self): + tempdir = tempfile.mkdtemp() + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ("delta/testdata.gpkg", "testdata.gpkg"), + ("delta/points.geojson", "points.geojson"), + ("delta/polygons.geojson", "polygons.geojson"), + ("delta/project.qgs", "project.qgs"), + ], + expected_files=[ + "data.gpkg", + "project_qfield.qgs", + ], + tempdir=tempdir, + ) + + local_file = os.path.join(tempdir, "project_qfield.qgs") + with open(local_file, "r") as f: + for line in f: + if 'name="theMapCanvas"' in line: + return + + def test_download_project_with_broken_layer_datasources(self): + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/points.geojson", "points.geojson"), + ( + "delta/project_broken_datasource.qgs", + "project_broken_datasource.qgs", + ), + ], + expected_files=[ + "data.gpkg", + "project_broken_datasource_qfield.qgs", + ], + invalid_layers=["surfacestructure_35131bca_337c_483b_b09e_1cf77b1dfb16"], + ) + + def test_filename_with_whitespace(self): + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ("delta/testdata.gpkg", "testdata.gpkg"), + ("delta/points.geojson", "points.geojson"), + ("delta/polygons.geojson", "polygons.geojson"), + ("delta/project.qgs", "project.qgs"), + ], + expected_files=[ + "data.gpkg", + "project_qfield.qgs", + ], + ) diff --git a/docker-app/qfieldcloud/core/urls.py b/docker-app/qfieldcloud/core/urls.py index 9ec369c9e..51f7eb6b0 100644 --- a/docker-app/qfieldcloud/core/urls.py +++ b/docker-app/qfieldcloud/core/urls.py @@ -3,7 +3,9 @@ collaborators_views, deltas_views, files_views, + jobs_views, members_views, + package_views, projects_views, qfield_files_views, status_views, @@ -13,6 +15,29 @@ router = DefaultRouter() router.register(r"projects", projects_views.ProjectViewSet, basename="project") +router.register(r"jobs", jobs_views.JobViewSet, basename="jobs") + +""" +TODO future URL refactor +projects/ +projects/ +projects// +projects//files/ +projects//files// +projects//jobs/ +projects//jobs// +projects//packages/ +projects//packages/latest/files/ +projects//packages/latest/files// +projects//deltas/ +projects//deltas// +projects//collaborators/ +organizations/ +organizations// +organizations//members/ +organizations//teams/ +organizations//teams//members/ +""" urlpatterns = [ path("projects/public/", projects_views.PublicProjectsListView.as_view()), @@ -37,13 +62,22 @@ files_views.DownloadPushDeleteFileView.as_view(), name="project_file_download", ), + path( + "packages//latest/", + package_views.LatestPackageView.as_view(), + ), + path( + "packages//latest/files/", + package_views.LatestPackageDownloadFilesView.as_view(), + ), path("qfield-files//", qfield_files_views.ListFilesView.as_view()), path( "qfield-files///", qfield_files_views.DownloadFileView.as_view(), ), path( - "qfield-files/export//", qfield_files_views.ExportView.as_view() + "qfield-files/export//", + qfield_files_views.PackageView.as_view(), ), path("members//", members_views.ListCreateMembersView.as_view()), path( diff --git a/docker-app/qfieldcloud/core/utils.py b/docker-app/qfieldcloud/core/utils.py index 6e8ab5b01..f562c1439 100644 --- a/docker-app/qfieldcloud/core/utils.py +++ b/docker-app/qfieldcloud/core/utils.py @@ -5,7 +5,7 @@ import posixpath from datetime import datetime from pathlib import PurePath -from typing import IO, Iterable, List, NamedTuple, Optional, TypedDict, Union +from typing import IO, Iterable, List, NamedTuple, Optional, Union import boto3 import jsonschema @@ -23,10 +23,11 @@ class S3PrefixPath(NamedTuple): class S3Object(NamedTuple): - Key: str - LastModified: datetime - Size: int - ETag: str + name: str + key: str + last_modified: datetime + size: int + etag: str class S3ObjectVersion: @@ -261,22 +262,6 @@ def get_s3_project_size(project_id: str) -> int: return round(total_size / (1024 * 1024), 3) -class ProjectFileVersion(TypedDict): - name: str - size: int - sha256: str - last_modified: datetime - is_latest: bool - - -class ProjectFile(TypedDict): - name: str - size: int - sha256: str - last_modified: datetime - versions: List[ProjectFileVersion] - - def get_project_files_with_versions(project_id: str) -> Iterable[S3ObjectWithVersions]: """Returns a list of files and their versions. @@ -292,6 +277,21 @@ def get_project_files_with_versions(project_id: str) -> Iterable[S3ObjectWithVer return list_files_with_versions(bucket, prefix, strip_prefix=True) +def get_project_package_files(project_id: str) -> Iterable[S3Object]: + """Returns a list of package files. + + Args: + project_id (str): the project id + + Returns: + Iterable[S3ObjectWithVersions]: the list of package files + """ + bucket = get_s3_bucket() + prefix = f"projects/{project_id}/export/" + + return list_files(bucket, prefix, strip_prefix=True) + + def get_project_files_count(project_id: str) -> int: """Returns the number of files within a project.""" bucket = get_s3_bucket() @@ -301,6 +301,15 @@ def get_project_files_count(project_id: str) -> int: return len(files) +def get_project_package_files_count(project_id: str) -> int: + """Returns the number of package files within a project.""" + bucket = get_s3_bucket() + prefix = f"projects/{project_id}/export/" + files = list(bucket.objects.filter(Prefix=prefix)) + + return len(files) + + def get_s3_object_url( key: str, bucket: mypy_boto3_s3.service_resource.Bucket = get_s3_bucket() ) -> str: @@ -316,6 +325,28 @@ def get_s3_object_url( return f"{settings.STORAGE_ENDPOINT_URL_EXTERNAL}/{bucket.name}/{key}" +def list_files( + bucket: mypy_boto3_s3.service_resource.Bucket, + prefix: str, + strip_prefix: bool = True, +) -> Iterable[S3Object]: + """Iterator that lists a bucket's objects under prefix.""" + for f in bucket.objects.filter(Prefix=prefix): + if strip_prefix: + start_idx = len(prefix) + name = f.key[start_idx:] + else: + name = f.key + + yield S3Object( + name=name, + key=f.key, + last_modified=f.last_modified, + size=f.size, + etag=f.e_tag, + ) + + def list_versions( bucket: mypy_boto3_s3.service_resource.Bucket, prefix: str, diff --git a/docker-app/qfieldcloud/core/utils2/jobs.py b/docker-app/qfieldcloud/core/utils2/jobs.py index 455ec0674..14303370c 100644 --- a/docker-app/qfieldcloud/core/utils2/jobs.py +++ b/docker-app/qfieldcloud/core/utils2/jobs.py @@ -1,7 +1,9 @@ import logging from typing import Optional -from qfieldcloud.core.models import ApplyJob, Delta, Job +from django.db.models import Q +from qfieldcloud.core import exceptions +from qfieldcloud.core.models import ApplyJob, Delta, Job, PackageJob, Project, User logger = logging.getLogger(__name__) @@ -44,3 +46,41 @@ def apply_deltas( ) return apply_job + + +def repackage(project: Project, user: User) -> PackageJob: + """Returns an unfinished or freshly created package job. + + Checks if there is already an unfinished package job and returns it, + or creates a new package job and returns it. + """ + if not project.project_filename: + raise exceptions.NoQGISProjectError() + + # Check if active package job already exists + query = Q(project=project) & ( + Q(status=PackageJob.Status.PENDING) + | Q(status=PackageJob.Status.QUEUED) + | Q(status=PackageJob.Status.STARTED) + ) + + if PackageJob.objects.filter(query).count(): + return PackageJob.objects.get(query) + + package_job = PackageJob.objects.create(project=project, created_by=user) + + return package_job + + +def repackage_if_needed(project: Project, user: User) -> PackageJob: + if not project.project_filename: + raise exceptions.NoQGISProjectError() + + if project.needs_repackaging: + package_job = repackage(project, user) + else: + package_job = ( + PackageJob.objects.filter(project=project).order_by("started_at").get() + ) + + return package_job diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index eaa1d62b5..9547b9365 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -1,6 +1,7 @@ from pathlib import PurePath from django.http.response import HttpResponseRedirect +from django.utils import timezone from qfieldcloud.core import exceptions, permissions_utils, utils from qfieldcloud.core.models import ProcessProjectfileJob, Project from rest_framework import permissions, status, views @@ -130,20 +131,16 @@ def post(self, request, projectid, filename, format=None): if "file" not in request.data: raise exceptions.EmptyContentError() + is_qgis_project_file = utils.is_qgis_project_file(filename) # check only one qgs/qgz file per project - if utils.is_qgis_project_file(filename): - if project.project_filename is not None and PurePath(filename) != PurePath( - project.project_filename - ): - raise exceptions.MultipleProjectsError( - "Only one QGIS project per project allowed" - ) - else: - project.project_filename = filename - project.save() - ProcessProjectfileJob.objects.create( - project=project, created_by=self.request.user - ) + if ( + is_qgis_project_file + and project.project_filename is not None + and PurePath(filename) != PurePath(project.project_filename) + ): + raise exceptions.MultipleProjectsError( + "Only one QGIS project per project allowed" + ) request_file = request.FILES.get("file") @@ -155,6 +152,15 @@ def post(self, request, projectid, filename, format=None): bucket.upload_fileobj(request_file, key, ExtraArgs={"Metadata": metadata}) + if is_qgis_project_file: + project.project_filename = filename + ProcessProjectfileJob.objects.create( + project=project, created_by=self.request.user + ) + + project.data_last_updated_at = timezone.now() + project.save() + return Response(status=status.HTTP_201_CREATED) def delete(self, request, projectid, filename): diff --git a/docker-app/qfieldcloud/core/views/jobs_views.py b/docker-app/qfieldcloud/core/views/jobs_views.py new file mode 100644 index 000000000..68a0c53d5 --- /dev/null +++ b/docker-app/qfieldcloud/core/views/jobs_views.py @@ -0,0 +1,73 @@ +from django.core.exceptions import ObjectDoesNotExist +from qfieldcloud.core import permissions_utils, serializers +from qfieldcloud.core.models import Job, Project +from rest_framework import generics, permissions, viewsets +from rest_framework.response import Response +from rest_framework.status import HTTP_201_CREATED + + +class JobPermissions(permissions.BasePermission): + def has_permission(self, request, view): + project_id = permissions_utils.get_param_from_request(request, "project_id") + + try: + project = Project.objects.get(id=project_id) + except ObjectDoesNotExist: + return False + + return permissions_utils.can_read_files(request.user, project) + + +class JobViewSet(viewsets.ReadOnlyModelViewSet): + + serializer_class = serializers.JobSerializer + lookup_url_kwarg = "job_id" + permission_classes = [permissions.IsAuthenticated] + + def get_serializer_by_job_type(self, job_type, *args, **kwargs): + if job_type == Job.Type.DELTA_APPLY: + return serializers.ApplyJobSerializer(*args, **kwargs) + elif job_type == Job.Type.PACKAGE: + return serializers.PackageJobSerializer(*args, **kwargs) + elif job_type == Job.Type.PROCESS_PROJECTFILE: + return serializers.ProcessProjectfileJobSerializer(*args, **kwargs) + else: + raise NotImplementedError(f'Unknown job type "{job_type}"') + + def get_serializer(self, *args, **kwargs): + kwargs.setdefault("context", self.get_serializer_context()) + + if self.action in ("create"): + job_type = kwargs["data"]["type"] + + return self.get_serializer_by_job_type(job_type, *args, **kwargs) + + if self.action in ("retrieve",): + job_type = args[0].type + + return self.get_serializer_by_job_type(job_type, *args, **kwargs) + + return serializers.JobSerializer(*args, **kwargs) + + def create(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + + if bool(int(request.data.get("force", 0))): + serializer.is_valid(raise_exception=True) + serializer.save() + else: + serializer.is_valid(raise_exception=True) + serializer.check_create_new_job() + serializer.save() + + return Response(serializer.data, status=HTTP_201_CREATED) + + def get_queryset(self): + qs = Job.objects.select_subclasses() + + if self.action == "list": + project_id = self.request.data.get("project_id") + project = generics.get_object_or_404(Project, pk=project_id) + qs = qs.filter(project=project) + + return qs diff --git a/docker-app/qfieldcloud/core/views/package_views.py b/docker-app/qfieldcloud/core/views/package_views.py new file mode 100644 index 000000000..f89729c97 --- /dev/null +++ b/docker-app/qfieldcloud/core/views/package_views.py @@ -0,0 +1,108 @@ +from django.core.exceptions import ObjectDoesNotExist +from django.http.response import HttpResponseRedirect +from qfieldcloud.core import exceptions, permissions_utils, utils +from qfieldcloud.core.models import PackageJob, Project +from qfieldcloud.core.utils import check_s3_key, get_project_package_files +from rest_framework import permissions, views +from rest_framework.response import Response + + +class PackageViewPermissions(permissions.BasePermission): + def has_permission(self, request, view): + try: + project_id = request.parser_context["kwargs"].get("project_id") + project = Project.objects.get(pk=project_id) + return permissions_utils.can_read_project(request.user, project) + except ObjectDoesNotExist: + return False + + +class LatestPackageView(views.APIView): + + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] + + def get(self, request, project_id): + """Get last project package status and file list.""" + project = Project.objects.get(id=project_id) + last_job = ( + PackageJob.objects.filter( + project=project, status=PackageJob.Status.FINISHED + ) + .order_by("started_at") + .last() + ) + + # Check if the project was packaged at least once + if not last_job: + raise exceptions.InvalidJobError( + "Packaging has never been triggered or successful for this project." + ) + + files = [] + for f in get_project_package_files(project_id): + files.append( + { + "name": f.name, + "size": f.size, + "last_modified": f.last_modified, + "sha256": check_s3_key(f.key), + } + ) + + if not files: + raise exceptions.InvalidJobError("Empty project package.") + + steps = last_job.feedback.get("steps", []) + layers = ( + steps[1]["outputs"]["layer_checks"] + if len(steps) > 2 and steps[1].get("stage", 1) == 2 + else None + ) + + return Response( + { + "files": files, + "layers": layers, + "status": last_job.status, + "package_id": last_job.pk, + "packaged_at": last_job.project.data_last_packaged_at, + "data_last_updated_at": last_job.project.data_last_updated_at, + } + ) + + +class LatestPackageDownloadFilesView(views.APIView): + + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] + + def get(self, request, project_id, filename): + """Download package file. + + Raises: + exceptions.InvalidJobError: [description] + """ + project = Project.objects.get(id=project_id) + last_job = PackageJob.objects.filter( + project=project, status=PackageJob.Status.FINISHED + ).latest("started_at") + + # Check if the project was packaged at least once + if not last_job: + raise exceptions.InvalidJobError( + "Packaging has never been triggered or successful for this project." + ) + + file_key = f"projects/{project_id}/export/{filename}" + url = utils.get_s3_client().generate_presigned_url( + "get_object", + Params={ + "Key": file_key, + "Bucket": utils.get_s3_bucket().name, + "ResponseContentType": "application/force-download", + "ResponseContentDisposition": f'attachment;filename="{filename}"', + }, + ExpiresIn=60, + HttpMethod="GET", + ) + + return HttpResponseRedirect(url) diff --git a/docker-app/qfieldcloud/core/views/qfield_files_views.py b/docker-app/qfieldcloud/core/views/qfield_files_views.py index fba873b38..2e713ac8d 100644 --- a/docker-app/qfieldcloud/core/views/qfield_files_views.py +++ b/docker-app/qfieldcloud/core/views/qfield_files_views.py @@ -6,12 +6,12 @@ from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from qfieldcloud.core import exceptions, permissions_utils, serializers, utils -from qfieldcloud.core.models import ExportJob, Project +from qfieldcloud.core.models import PackageJob, Project from rest_framework import permissions, views from rest_framework.response import Response -class ExportViewPermissions(permissions.BasePermission): +class PackageViewPermissions(permissions.BasePermission): def has_permission(self, request, view): projectid = permissions_utils.get_param_from_request(request, "projectid") try: @@ -25,20 +25,20 @@ def has_permission(self, request, view): @method_decorator( name="post", decorator=swagger_auto_schema( - operation_description="Launch QField export project", - operation_id="Launch qfield export", + operation_description="Launch QField packaging project", + operation_id="Launch qfield packaging", ), ) @method_decorator( name="get", decorator=swagger_auto_schema( - operation_description="Get QField export status", - operation_id="Get qfield export status", + operation_description="Get QField packaging status", + operation_id="Get qfield packaging status", ), ) -class ExportView(views.APIView): +class PackageView(views.APIView): - permission_classes = [permissions.IsAuthenticated, ExportViewPermissions] + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] def post(self, request, projectid): @@ -47,22 +47,30 @@ def post(self, request, projectid): if not project_obj.project_filename: raise exceptions.NoQGISProjectError() - # Check if active export job already exists + # Check if active packaging job already exists # TODO: !!!!!!!!!!!! cache results for some minutes query = Q(project=project_obj) & ( - Q(status=ExportJob.Status.PENDING) - | Q(status=ExportJob.Status.QUEUED) - | Q(status=ExportJob.Status.STARTED) + Q(status=PackageJob.Status.PENDING) + | Q(status=PackageJob.Status.QUEUED) + | Q(status=PackageJob.Status.STARTED) ) # NOTE uncomment to enforce job creation - # ExportJob.objects.filter(query).delete() - - if ExportJob.objects.filter(query).exists(): - serializer = serializers.ExportJobSerializer(ExportJob.objects.get(query)) + # PackageJob.objects.filter(query).delete() + + if not project_obj.needs_repackaging: + export_job = PackageJob.objects.filter( + status=PackageJob.Status.FINISHED + ).latest("started_at") + if export_job: + serializer = serializers.ExportJobSerializer(export_job) + return serializers.ExportJobSerializer(serializer.data) + + if PackageJob.objects.filter(query).exists(): + serializer = serializers.ExportJobSerializer(PackageJob.objects.get(query)) return Response(serializer.data) - export_job = ExportJob.objects.create( + export_job = PackageJob.objects.create( project=project_obj, created_by=self.request.user ) @@ -75,7 +83,7 @@ def get(self, request, projectid): project_obj = Project.objects.get(id=projectid) export_job = ( - ExportJob.objects.filter(project=project_obj).order_by("updated_at").last() + PackageJob.objects.filter(project=project_obj).order_by("updated_at").last() ) serializer = serializers.ExportJobSerializer(export_job) @@ -91,23 +99,23 @@ def get(self, request, projectid): ) class ListFilesView(views.APIView): - permission_classes = [permissions.IsAuthenticated, ExportViewPermissions] + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] def get(self, request, projectid): project_obj = Project.objects.get(id=projectid) # Check if the project was exported at least once - if not ExportJob.objects.filter( - project=project_obj, status=ExportJob.Status.FINISHED + if not PackageJob.objects.filter( + project=project_obj, status=PackageJob.Status.FINISHED ): raise exceptions.InvalidJobError( "Project files have not been exported for the provided project id" ) export_job = ( - ExportJob.objects.filter( - project=project_obj, status=ExportJob.Status.FINISHED + PackageJob.objects.filter( + project=project_obj, status=PackageJob.Status.FINISHED ) .order_by("updated_at") .last() @@ -167,16 +175,16 @@ def get(self, request, projectid): ) class DownloadFileView(views.APIView): - permission_classes = [permissions.IsAuthenticated, ExportViewPermissions] + permission_classes = [permissions.IsAuthenticated, PackageViewPermissions] def get(self, request, projectid, filename): project_obj = Project.objects.get(id=projectid) # Check if the project was exported at least once - if not ExportJob.objects.filter( + if not PackageJob.objects.filter( project=project_obj, - status=ExportJob.Status.FINISHED, + status=PackageJob.Status.FINISHED, ): raise exceptions.InvalidJobError( "Project files have not been exported for the provided project id" diff --git a/docker-app/requirements.txt b/docker-app/requirements.txt index 31848b49f..630cdcc20 100644 --- a/docker-app/requirements.txt +++ b/docker-app/requirements.txt @@ -26,3 +26,4 @@ django-currentuser==0.5.3 django-axes>=5.26.0,<6.0.0 mkdocs>=1.2.3 django-timezone-field>=4.2.1 +django-model-utils>=4.2.0 diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index 45d958e16..f41c70207 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -18,8 +18,8 @@ ApplyJob, ApplyJobDelta, Delta, - ExportJob, Job, + PackageJob, ProcessProjectfileJob, ) @@ -38,7 +38,7 @@ class QgisException(Exception): class JobRun: - container_timeout_secs = 3 * 60 + container_timeout_secs = 10 * 60 job_class = Job command = [] @@ -242,9 +242,20 @@ def _run_docker( return response["StatusCode"], logs -class ExportJobRun(JobRun): - job_class = ExportJob - command = ["export", "%(project__id)s", "%(project__project_filename)s"] +class PackageJobRun(JobRun): + job_class = PackageJob + command = ["package", "%(project__id)s", "%(project__project_filename)s"] + data_last_packaged_at = None + + def before_docker_run(self) -> None: + # at the start of docker we assume we make the snapshot of the data + self.data_last_packaged_at = timezone.now() + + def after_docker_run(self) -> None: + # only successfully finished packaging jobs should update the Project.data_last_packaged_at + if self.job.status == Job.Status.FINISHED: + self.job.project.data_last_packaged_at = self.data_last_packaged_at + self.job.project.save() class DeltaApplyJobRun(JobRun): @@ -312,6 +323,7 @@ def before_docker_run(self) -> None: def after_docker_run(self) -> None: delta_feedback = self.job.feedback["steps"][1]["outputs"]["delta_feedback"] + is_data_modified = True for feedback in delta_feedback: delta_id = feedback["delta_id"] @@ -320,12 +332,15 @@ def after_docker_run(self) -> None: if status == "status_applied": status = Delta.Status.APPLIED + is_data_modified = True elif status == "status_conflict": status = Delta.Status.CONFLICT elif status == "status_apply_failed": status = Delta.Status.NOT_APPLIED else: status = Delta.Status.ERROR + # not certain what happened + is_data_modified = True Delta.objects.filter(pk=delta_id).update( last_status=status, @@ -344,6 +359,10 @@ def after_docker_run(self) -> None: modified_pk=modified_pk, ) + if is_data_modified: + self.job.project.data_last_updated_at = timezone.now() + self.job.project.save() + def after_docker_exception(self) -> None: Delta.objects.filter( id__in=self.delta_ids, diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index d2974d520..e6eac871f 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -104,15 +104,15 @@ def _upload_project_directory( """Upload the files in the local_dir to the storage""" bucket = _get_s3_bucket() - # either "files" or "export" + # either "files" or "package" subdir = local_dir.parts[-1] prefix = "/".join(["projects", project_id, subdir]) if should_delete: - # Remove existing export directory on the storage + # Remove existing package directory on the storage bucket.objects.filter(Prefix=prefix).delete() - # Loop recursively in the local export directory + # Loop recursively in the local package directory for elem in Path(local_dir).rglob("*.*"): # Don't upload .qgs~ and .qgz~ files if str(elem).endswith("~"): @@ -142,8 +142,8 @@ def _upload_project_directory( bucket.upload_file(str(elem), key, ExtraArgs={"Metadata": metadata}) -def _call_qfieldsync_exporter(project_filepath: Path, export_dir: Path) -> Dict: - """Call the function of QFieldSync to export a project for QField""" +def _call_qfieldsync_packager(project_filepath: Path, package_dir: Path) -> Dict: + """Call the function of QFieldSync to package a project for QField""" argvb = list(map(os.fsencode, [""])) qgis_app = QgsApplication(argvb, True) @@ -237,7 +237,7 @@ def _call_qfieldsync_exporter(project_filepath: Path, export_dir: Path) -> Dict: offline_editing = QgsOfflineEditing() offline_converter = OfflineConverter( project, - str(export_dir), + str(package_dir), vl_extent, vl_extent_crs, offline_editing, @@ -255,10 +255,10 @@ def _call_qfieldsync_exporter(project_filepath: Path, export_dir: Path) -> Dict: return layer_checks -def cmd_export_project(args): +def cmd_package_project(args): tmpdir = Path(tempfile.mkdtemp()) - exportdir = tmpdir.joinpath("export") - exportdir.mkdir() + packagedir = tmpdir.joinpath("export") + packagedir.mkdir() steps: List[Step] = [ Step( @@ -275,22 +275,22 @@ def cmd_export_project(args): ), Step( id="export_project", - name="Export Project", + name="Package Project", arguments={ "project_filename": tmpdir.joinpath("files", args.project_file), - "exportdir": exportdir, + "exportdir": packagedir, }, arg_names=["project_filename", "exportdir"], return_names=["layer_checks"], output_names=["layer_checks"], - method=_call_qfieldsync_exporter, + method=_call_qfieldsync_packager, ), Step( id="upload_exported_project", - name="Upload Exported Project", + name="Upload Packaged Project", arguments={ "project_id": args.projectid, - "exportdir": exportdir, + "exportdir": packagedir, "should_delete": True, }, arg_names=["project_id", "exportdir", "should_delete"], @@ -341,7 +341,7 @@ def _apply_delta(args): ), Step( id="upload_exported_project", - name="Upload Exported Project", + name="Upload Project", arguments={ "project_id": args.projectid, "files_dir": files_dir, @@ -435,10 +435,10 @@ def cmd_process_projectfile(args): subparsers = parser.add_subparsers(dest="cmd") - parser_export = subparsers.add_parser("export", help="Export a project") - parser_export.add_argument("projectid", type=str, help="projectid") - parser_export.add_argument("project_file", type=str, help="QGIS project file path") - parser_export.set_defaults(func=cmd_export_project) + parser_package = subparsers.add_parser("package", help="Package a project") + parser_package.add_argument("projectid", type=str, help="projectid") + parser_package.add_argument("project_file", type=str, help="QGIS project file path") + parser_package.set_defaults(func=cmd_package_project) parser_delta = subparsers.add_parser("delta_apply", help="Apply deltafile") parser_delta.add_argument("projectid", type=str, help="projectid") diff --git a/docker-qgis/process_projectfile.py b/docker-qgis/process_projectfile.py index e1e5e6dc4..64e6546be 100644 --- a/docker-qgis/process_projectfile.py +++ b/docker-qgis/process_projectfile.py @@ -115,51 +115,51 @@ def on_project_read(doc): logging.info("Extracting layer and datasource details...") - has_invalid_layers = False - has_online_layers = False - layers_summary = [] + ordered_layer_ids = [] + layers_by_id = {} for layer in project.mapLayers().values(): error = layer.error() + layer_id = layer.id() layer_source = LayerSource(layer) - layer_data = { - "id": layer.name(), + ordered_layer_ids.append(layer_id) + layers_by_id[layer_id] = { + "id": layer_id, "name": layer.name(), "is_valid": layer.isValid(), "datasource": layer.dataProvider().uri().uri() if layer.dataProvider() else None, + "type": layer.type(), + "type_name": layer.type().name, "error_summary": error.summary() if error.messageList() else "", "error_message": layer.error().message(), "filename": layer_source.filename, "provider_error_summary": None, "provider_error_message": None, } - layers_summary.append(layer_data) - if not layer_data["filename"]: - has_online_layers = True - - if layer_data["is_valid"]: + if layers_by_id[layer_id]["is_valid"]: continue - has_invalid_layers = True data_provider = layer.dataProvider() if data_provider: data_provider_error = data_provider.error() - layer_data["provider_error_summary"] = ( + layers_by_id[layer_id]["provider_error_summary"] = ( data_provider_error.summary() if data_provider_error.messageList() else "" ) - layer_data["provider_error_message"] = data_provider_error.message() + layers_by_id[layer_id][ + "provider_error_message" + ] = data_provider_error.message() - if not layer_data["provider_error_summary"]: + if not layers_by_id[layer_id]["provider_error_summary"]: service = data_provider.uri().service() if service: - layer_data[ + layers_by_id[layer_id][ "provider_error_summary" ] = f'Unable to connect to service "{service}"' @@ -170,19 +170,20 @@ def on_project_read(doc): else None ) if host and (is_localhost(host, port) or has_ping(host)): - layer_data[ + layers_by_id[layer_id][ "provider_error_summary" ] = f'Unable to connect to host "{host}"' logging.info( - f'Layer "{layer.name()}" seems to be invalid: {layer_data["provider_error_summary"]}' + f'Layer "{layer.name()}" seems to be invalid: {layers_by_id[layer_id]["provider_error_summary"]}' ) else: - layer_data["provider_error_summary"] = "No data provider available" + layers_by_id[layer_id][ + "provider_error_summary" + ] = "No data provider available" - details["layers"] = layers_summary - details["has_online_layers"] = has_online_layers - details["has_invalid_layers"] = has_invalid_layers + details["layers_by_id"] = layers_by_id + details["ordered_layer_ids"] = ordered_layer_ids return details diff --git a/docker-qgis/tests/test_qgis.py b/docker-qgis/tests/test_qgis.py index 4f19b1f4f..eb9d1e7b3 100644 --- a/docker-qgis/tests/test_qgis.py +++ b/docker-qgis/tests/test_qgis.py @@ -6,7 +6,7 @@ class QfcTestCase(unittest.TestCase): - def test_export(self): + def test_package(self): project_directory = self.data_directory_path("simple_project") output_directory = tempfile.mkdtemp() @@ -21,7 +21,7 @@ def test_export(self): "qgis", "bash", "-c", - "./entrypoint.sh export /io/project/project.qgs /io/output", + "./entrypoint.sh package /io/project/project.qgs /io/output", ] subprocess.check_call( diff --git a/scripts/check_envvars.sh b/scripts/check_envvars.sh index 37598dc75..8afdd334c 100755 --- a/scripts/check_envvars.sh +++ b/scripts/check_envvars.sh @@ -1,3 +1,3 @@ #!/bin/bash -e -python3 scripts/check_envvars.py .env.example --docker-compose-dir . +pipenv run python3 scripts/check_envvars.py .env.example --docker-compose-dir . From e75a83767547778df024064d02444e4b30c247bf Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 27 Nov 2021 18:20:13 +0200 Subject: [PATCH 008/185] Added the layer CRS when processing project file --- docker-qgis/process_projectfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-qgis/process_projectfile.py b/docker-qgis/process_projectfile.py index 64e6546be..be804c46a 100644 --- a/docker-qgis/process_projectfile.py +++ b/docker-qgis/process_projectfile.py @@ -126,6 +126,7 @@ def on_project_read(doc): layers_by_id[layer_id] = { "id": layer_id, "name": layer.name(), + "crs": layer.crs().authid() if layer.crs() else None, "is_valid": layer.isValid(), "datasource": layer.dataProvider().uri().uri() if layer.dataProvider() From 7698ba0606edc91fa678c506055da74a179bde90 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 27 Nov 2021 19:31:57 +0200 Subject: [PATCH 009/185] Switch to PostGIS --- docker-app/Dockerfile | 2 +- docker-app/qfieldcloud/settings.py | 2 +- docker-compose.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker-app/Dockerfile b/docker-app/Dockerfile index 4f3ca8e12..1096cf50f 100644 --- a/docker-app/Dockerfile +++ b/docker-app/Dockerfile @@ -14,7 +14,7 @@ ENV PYTHONUNBUFFERED 1 # install dependencies RUN apt update \ - && apt install -y netcat-openbsd + && apt install -y netcat python3-gdal # install dependencies COPY ./requirements.txt . diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 6f1a25be5..761df7bf1 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -128,7 +128,7 @@ DATABASES = { "default": { - "ENGINE": "django.db.backends.postgresql", + "ENGINE": "django.contrib.gis.db.backends.postgis", "NAME": os.environ.get("SQL_DATABASE"), "USER": os.environ.get("SQL_USER"), "PASSWORD": os.environ.get("SQL_PASSWORD"), diff --git a/docker-compose.yml b/docker-compose.yml index 70e1a85f1..20cd5bee0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,7 +9,7 @@ x-logging: services: db: - image: postgres:11 + image: postgis/postgis:13-3.1-alpine restart: unless-stopped environment: POSTGRES_DB: ${POSTGRES_DB} From bbcc0471f215c227fdce35fce93ca771265721aa Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 27 Nov 2021 20:28:25 +0200 Subject: [PATCH 010/185] Add old_geom, new_geom and srid in delta table --- .../migrations/0051_auto_20211125_0444.py | 94 +++++++++++++++++++ docker-app/qfieldcloud/core/models.py | 5 +- 2 files changed, 98 insertions(+), 1 deletion(-) create mode 100644 docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py diff --git a/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py new file mode 100644 index 000000000..05f475e74 --- /dev/null +++ b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py @@ -0,0 +1,94 @@ +# Generated by Django 3.2.9 on 2021-11-25 03:44 + +import django.contrib.gis.db.models.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0050_auto_20211118_1150"), + ] + + operations = [ + migrations.AlterModelOptions( + name="packagejob", + options={ + "verbose_name": "Job: package", + "verbose_name_plural": "Jobs: package", + }, + ), + migrations.AddField( + model_name="delta", + name="new_geom", + field=django.contrib.gis.db.models.fields.GeometryField( + dim=4, null=True, srid=0 + ), + ), + migrations.AddField( + model_name="delta", + name="old_geom", + field=django.contrib.gis.db.models.fields.GeometryField( + dim=4, null=True, srid=0 + ), + ), + migrations.AddField( + model_name="delta", + name="srid", + field=models.PositiveIntegerField(null=True), + ), + migrations.RunSQL( + """ + UPDATE core_delta + SET + srid = COALESCE(NULLIF(REGEXP_REPLACE(jsonb_extract_path_text('{"localLayerCrs": ""}', 'localLayerCrs'), '\D*', '', 'g'), ''), '0')::int, + old_geom = ST_GeomFromText( + jsonb_extract_path_text(content, 'old', 'geometry') + ), + new_geom = ST_GeomFromText( + jsonb_extract_path_text(content, 'new', 'geometry') + ) + """, + migrations.RunSQL.noop, + ), + migrations.RunSQL( + """ + CREATE FUNCTION core_delta_geom_trigger_func() + RETURNS trigger + AS + $$ + BEGIN + NEW.srid := COALESCE(NULLIF(REGEXP_REPLACE(jsonb_extract_path_text(NEW.content, 'localLayerCrs'), '\D*', '', 'g'), ''), '0')::int; + NEW.old_geom := ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'old', 'geometry') ); + NEW.new_geom := ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'new', 'geometry') ); + RETURN NEW; + END; + $$ + LANGUAGE PLPGSQL + """, + """ + DROP FUNCTION core_delta_geom_trigger_func(); + """, + ), + migrations.RunSQL( + """ + CREATE TRIGGER core_delta_geom_update_trigger BEFORE UPDATE ON core_delta + FOR EACH ROW + WHEN (OLD.content IS DISTINCT FROM NEW.content) + EXECUTE FUNCTION core_delta_geom_trigger_func() + """, + """ + DROP TRIGGER core_delta_geom_update_trigger ON core_delta; + """, + ), + migrations.RunSQL( + """ + CREATE TRIGGER core_delta_geom_insert_trigger BEFORE INSERT ON core_delta + FOR EACH ROW + EXECUTE FUNCTION core_delta_geom_trigger_func() + """, + """ + DROP TRIGGER core_delta_geom_insert_trigger ON core_delta + """, + ), + ] diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 9830c4b96..876a0c81e 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -8,9 +8,9 @@ import qfieldcloud.core.utils2.storage from django.contrib.auth.models import AbstractUser, UserManager +from django.contrib.gis.db import models from django.core.exceptions import ValidationError from django.core.validators import RegexValidator -from django.db import models from django.db.models import Case, Exists, OuterRef, Q from django.db.models import Value as V from django.db.models import When @@ -1064,6 +1064,9 @@ class Status(models.TextChoices): on_delete=models.CASCADE, related_name="uploaded_deltas", ) + old_geom = models.GeometryField(null=True, srid=0, dim=4) + new_geom = models.GeometryField(null=True, srid=0, dim=4) + srid = models.PositiveIntegerField(null=True) def __str__(self): return str(self.id) + ", project: " + str(self.project.id) From cf0d44a53fe0aeda32b6b098792c498d130b249a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 27 Nov 2021 20:30:31 +0200 Subject: [PATCH 011/185] Remove old outdated script --- dev-start-docker-up-and-sass.sh | 2 -- 1 file changed, 2 deletions(-) delete mode 100755 dev-start-docker-up-and-sass.sh diff --git a/dev-start-docker-up-and-sass.sh b/dev-start-docker-up-and-sass.sh deleted file mode 100755 index 78a85debc..000000000 --- a/dev-start-docker-up-and-sass.sh +++ /dev/null @@ -1,2 +0,0 @@ -# following command starts up local containers and then starts sass processer which compiles scss files to css. -docker-compose up -d && sass --watch docker-app/qfieldcloud/core/web/staticfiles/scss/qfieldcloud.scss docker-app/qfieldcloud/core/web/staticfiles/css/qfieldcloud.css From ae95e73e33a557ef15e9d585f9ab2b3fbed1ef23 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 1 Dec 2021 19:24:14 +0200 Subject: [PATCH 012/185] Add remove srid column and add store the srid in the geom column --- .../migrations/0051_auto_20211125_0444.py | 80 +++++++++++++------ docker-app/qfieldcloud/core/models.py | 1 - 2 files changed, 55 insertions(+), 26 deletions(-) diff --git a/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py index 05f475e74..d9399aebf 100644 --- a/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py +++ b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py @@ -1,7 +1,7 @@ # Generated by Django 3.2.9 on 2021-11-25 03:44 import django.contrib.gis.db.models.fields -from django.db import migrations, models +from django.db import migrations class Migration(migrations.Migration): @@ -22,72 +22,102 @@ class Migration(migrations.Migration): model_name="delta", name="new_geom", field=django.contrib.gis.db.models.fields.GeometryField( - dim=4, null=True, srid=0 + dim=4, null=True, srid=4326 ), ), migrations.AddField( model_name="delta", name="old_geom", field=django.contrib.gis.db.models.fields.GeometryField( - dim=4, null=True, srid=0 + dim=4, null=True, srid=4326 ), ), - migrations.AddField( - model_name="delta", - name="srid", - field=models.PositiveIntegerField(null=True), - ), migrations.RunSQL( - """ + r""" + WITH subquery AS ( + SELECT + id, + CASE + WHEN jsonb_extract_path_text(content, 'localLayerCrs') ~ '^EPSG:\d{1,10}$' + THEN + REGEXP_REPLACE(jsonb_extract_path_text(content, 'localLayerCrs'), '\D*', '', 'g')::int + ELSE + NULL + END AS srid + FROM core_delta + ) UPDATE core_delta SET - srid = COALESCE(NULLIF(REGEXP_REPLACE(jsonb_extract_path_text('{"localLayerCrs": ""}', 'localLayerCrs'), '\D*', '', 'g'), ''), '0')::int, - old_geom = ST_GeomFromText( - jsonb_extract_path_text(content, 'old', 'geometry') - ), - new_geom = ST_GeomFromText( - jsonb_extract_path_text(content, 'new', 'geometry') - ) + old_geom = + ST_Transform( + ST_SetSRID( + ST_GeomFromText( + jsonb_extract_path_text(core_delta.content, 'old', 'geometry') + ), + subquery.srid + ), + 4326 + ), + new_geom = + ST_Transform( + ST_SetSRID( + ST_GeomFromText( + jsonb_extract_path_text(core_delta.content, 'new', 'geometry') + ), + subquery.srid + ), + 4326 + ) + FROM subquery + WHERE core_delta.id = subquery.id """, migrations.RunSQL.noop, ), migrations.RunSQL( - """ + r""" CREATE FUNCTION core_delta_geom_trigger_func() RETURNS trigger AS $$ + DECLARE + srid int; BEGIN - NEW.srid := COALESCE(NULLIF(REGEXP_REPLACE(jsonb_extract_path_text(NEW.content, 'localLayerCrs'), '\D*', '', 'g'), ''), '0')::int; - NEW.old_geom := ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'old', 'geometry') ); - NEW.new_geom := ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'new', 'geometry') ); + SELECT CASE + WHEN jsonb_extract_path_text(NEW.content, 'localLayerCrs') ~ '^EPSG:\d{1,10}$' + THEN + REGEXP_REPLACE(jsonb_extract_path_text(NEW.content, 'localLayerCrs'), '\D*', '', 'g')::int + ELSE + NULL + END INTO srid; + NEW.old_geom := ST_Transform( ST_SetSRID( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'old', 'geometry') ), srid ), 4326 ); + NEW.new_geom := ST_Transform( ST_SetSRID( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'new', 'geometry') ), srid ), 4326 ); RETURN NEW; END; $$ LANGUAGE PLPGSQL """, - """ + r""" DROP FUNCTION core_delta_geom_trigger_func(); """, ), migrations.RunSQL( - """ + r""" CREATE TRIGGER core_delta_geom_update_trigger BEFORE UPDATE ON core_delta FOR EACH ROW WHEN (OLD.content IS DISTINCT FROM NEW.content) EXECUTE FUNCTION core_delta_geom_trigger_func() """, - """ + r""" DROP TRIGGER core_delta_geom_update_trigger ON core_delta; """, ), migrations.RunSQL( - """ + r""" CREATE TRIGGER core_delta_geom_insert_trigger BEFORE INSERT ON core_delta FOR EACH ROW EXECUTE FUNCTION core_delta_geom_trigger_func() """, - """ + r""" DROP TRIGGER core_delta_geom_insert_trigger ON core_delta """, ), diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 876a0c81e..cd69eff13 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -1066,7 +1066,6 @@ class Status(models.TextChoices): ) old_geom = models.GeometryField(null=True, srid=0, dim=4) new_geom = models.GeometryField(null=True, srid=0, dim=4) - srid = models.PositiveIntegerField(null=True) def __str__(self): return str(self.id) + ", project: " + str(self.project.id) From e1fd3f53a72fe2c8e3287ecd42b812501721830d Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 2 Dec 2021 14:23:29 +0200 Subject: [PATCH 013/185] Use nginx instead of caddy for local development --- .env.example | 1 + README.md | 32 +++++++++++++++++++ conf/nginx/conf.d/default.conf | 52 +++++++++++++++++++++++++++++++ docker-compose.override.local.yml | 17 +++++++--- docker-compose.yml | 42 +++++++++++++++++-------- 5 files changed, 127 insertions(+), 17 deletions(-) create mode 100644 conf/nginx/conf.d/default.conf diff --git a/.env.example b/.env.example index be8c3e13e..fa33a47fb 100644 --- a/.env.example +++ b/.env.example @@ -2,6 +2,7 @@ DEBUG=1 QFIELDCLOUD_HOST=localhost DJANGO_SETTINGS_MODULE=qfieldcloud.settings +DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 0.0.0.0 SECRET_KEY=change_me diff --git a/README.md b/README.md index 254ef0aee..6ee1ab532 100644 --- a/README.md +++ b/README.md @@ -67,6 +67,38 @@ To run only a test module (e.g. `test_permission.py`) docker-compose run app python manage.py test qfieldcloud.core.tests.test_permission +## Add root certificate + +QFieldCloud will automatically generate a certificate and it's root certificate in `./config/nginx/certs`. However, you need to trust the root certificate first, so other programs (e.g. curl) can create secure connection to the local QFieldCloud instance. + +On Debian/Ubuntu, copy the root certificate to the directory with trusted certificates. Note the extension has been changed to `.crt`: + + sudo mv ./config/nginx/certs/rootCA.pem /usr/local/share/ca-certificates/rootCA.crt + +Trust the newly added certificate: + + sudo update-ca-certificates + +Connecting with `curl` should return no errors: + curl https://localhost:8002/ + +### Remove the root certificate + +If you want to remove or change the root certificate, you need to remove the root certificate file and refresh the list of certificates: + + rm /usr/local/share/ca-certificates/rootCA.crt + sudo update-ca-certificates --fresh + +Now connecting with `curl` should fail with a similar error: + + $ curl https://localhost:8002/ + + curl: (60) SSL certificate problem: unable to get local issuer certificate + More details here: https://curl.haxx.se/docs/sslcerts.html + + curl failed to verify the legitimacy of the server and therefore could not + establish a secure connection to it. To learn more about this situation and + how to fix it, please visit the web page mentioned above. ### Code style diff --git a/conf/nginx/conf.d/default.conf b/conf/nginx/conf.d/default.conf new file mode 100644 index 000000000..127013a4b --- /dev/null +++ b/conf/nginx/conf.d/default.conf @@ -0,0 +1,52 @@ +upstream django { + server app:8000 fail_timeout=0; +} + +# server { + +# server_name example.com www.example.com; +# } + +server { + listen 80; + listen 443 ssl; + + ssl_certificate certs/localhost.pem; + ssl_certificate_key certs/localhost-key.pem; + + server_name localhost; + client_max_body_size 10G; + keepalive_timeout 5; + + # path for static files (only needed for serving local staticfiles) + root /var/www/html/; + + # checks for static file, if not found proxy to app + location / { + try_files $uri @proxy_to_app; + } + + location @proxy_to_app { + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Host $http_host; + + proxy_redirect off; + proxy_pass http://django; + } + + # location /minio { + # proxy_set_header X-Real-IP $remote_addr; + # proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + # proxy_set_header X-Forwarded-Proto $scheme; + # proxy_set_header Host $http_host; + + # proxy_connect_timeout 300; + # # Default is HTTP/1, keepalive is only enabled in HTTP/1.1 + # proxy_http_version 1.1; + # proxy_set_header Connection ""; + # chunked_transfer_encoding off; + + # proxy_pass http://minio:9000; + # } +} diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 7ca4b9169..e86a749f4 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -54,12 +54,21 @@ services: ports: - ${GEODB_PORT}:5432 - web: + # web: + # environment: + # CADDY_IMPORT_GLOB: Caddyfile.proxy-minio + # volumes: + # # Include config for reverse proxying caddy + # - ./docker-caddy/Caddyfile.proxy-minio:/etc/caddy/Caddyfile.proxy-minio + + mkcert: + image: vishnunair/docker-mkcert environment: - CADDY_IMPORT_GLOB: Caddyfile.proxy-minio + domain: ${QFIELDCLOUD_HOST} volumes: - # Include config for reverse proxying caddy - - ./docker-caddy/Caddyfile.proxy-minio:/etc/caddy/Caddyfile.proxy-minio + - ./conf/nginx/certs/:/root/.local/share/mkcert/ + command: /bin/sh -c 'mkcert -install && for i in $$(echo $$domain | sed "s/,/ /g"); do [ ! -f /root/.local/share/mkcert/$$i.pem ] && mkcert $$i; done && tail -f -n0 /etc/hosts' + s3: image: minio/minio:RELEASE.2021-07-27T02-40-15Z diff --git a/docker-compose.yml b/docker-compose.yml index 20cd5bee0..d85ef3514 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -30,7 +30,7 @@ services: - static_volume:/usr/src/app/staticfiles - media_volume:/usr/src/app/mediafiles/ environment: - DJANGO_ALLOWED_HOSTS: ${QFIELDCLOUD_HOST} + DJANGO_ALLOWED_HOSTS: ${DJANGO_ALLOWED_HOSTS} DJANGO_SETTINGS_MODULE: ${DJANGO_SETTINGS_MODULE} SECRET_KEY: ${SECRET_KEY} DEBUG: ${DEBUG} @@ -81,23 +81,39 @@ services: ofelia.job-exec.runcrons.schedule: 0 * * * * * ofelia.job-exec.runcrons.command: python manage.py runcrons - web: - build: - context: ./docker-caddy - restart: unless-stopped - environment: - QFIELDCLOUD_HOST: ${QFIELDCLOUD_HOST} - CADDY_ACME_CA: ${CADDY_ACME_CA} - CADDY_IMPORT_GLOB: ${CADDY_IMPORT_GLOB} - ACME_AGREE: "true" + # web: + # build: + # context: ./docker-caddy + # restart: unless-stopped + # environment: + # QFIELDCLOUD_HOST: ${QFIELDCLOUD_HOST} + # CADDY_ACME_CA: ${CADDY_ACME_CA} + # CADDY_IMPORT_GLOB: ${CADDY_IMPORT_GLOB} + # ACME_AGREE: "true" + # volumes: + # - caddy_data:/data + # ports: + # - ${WEB_HTTP_PORT}:80 + # - ${WEB_HTTPS_PORT}:443 + # logging: *default-logging + + nginx: + image: nginx:stable volumes: - - static_volume:/home/app/web/staticfiles/ - - media_volume:/home/app/web/mediafiles/ - - caddy_data:/data + - static_volume:/var/www/html/staticfiles/ + - media_volume:/var/www/html/mediafiles/ + - ./conf/nginx/conf.d/:/etc/nginx/conf.d/ + - ./conf/nginx/certs/:/etc/nginx/certs/:ro ports: - ${WEB_HTTP_PORT}:80 - ${WEB_HTTPS_PORT}:443 + command: [nginx-debug, '-g', 'daemon off;'] logging: *default-logging + depends_on: + - app + + # certbot: + # image: certbot/certbot qgis: build: From a5b1bfa5daf67a16af57d8a7080336d91ed1ddd4 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sun, 5 Dec 2021 14:36:00 +0200 Subject: [PATCH 014/185] Make sure pyyaml is installed dependency --- scripts/check_envvars.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/check_envvars.sh b/scripts/check_envvars.sh index 8afdd334c..92858c0e4 100755 --- a/scripts/check_envvars.sh +++ b/scripts/check_envvars.sh @@ -1,3 +1,4 @@ #!/bin/bash -e -pipenv run python3 scripts/check_envvars.py .env.example --docker-compose-dir . +pipenv run pip install pyyaml +pipenv run python scripts/check_envvars.py .env.example --docker-compose-dir . From 21661052737af5bf0e2f139ca3a649b57cea09a9 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 4 Dec 2021 01:43:32 +0200 Subject: [PATCH 015/185] Add certbot --- .gitignore | 1 + README.md | 5 ++++ conf/nginx/conf.d/default.conf | 5 ++++ conf/nginx/options-ssl-nginx.conf | 14 +++++++++ conf/nginx/ssl-dhparams.pem | 8 +++++ docker-compose.override.local.yml | 29 +++++------------- docker-compose.yml | 37 ++++++++++++----------- scripts/init_letsencrypt.sh | 50 +++++++++++++++++++++++++++++++ 8 files changed, 109 insertions(+), 40 deletions(-) create mode 100644 conf/nginx/options-ssl-nginx.conf create mode 100644 conf/nginx/ssl-dhparams.pem create mode 100644 scripts/init_letsencrypt.sh diff --git a/.gitignore b/.gitignore index 2dad1b4c2..f2d032161 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ __pycache__/ .env docker-compose.override.yml client/projects +conf/nginx/certs/* diff --git a/README.md b/README.md index 6ee1ab532..f7db501d3 100644 --- a/README.md +++ b/README.md @@ -151,6 +151,11 @@ Run the django database migrations docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml exec app python manage.py migrate +## Create a certificate using Let's Encrypt + + + ./scripts/init_letsencrypt.sh + ### Infrastructure Based on this example diff --git a/conf/nginx/conf.d/default.conf b/conf/nginx/conf.d/default.conf index 127013a4b..0b7458671 100644 --- a/conf/nginx/conf.d/default.conf +++ b/conf/nginx/conf.d/default.conf @@ -21,6 +21,11 @@ server { # path for static files (only needed for serving local staticfiles) root /var/www/html/; + # cerbot + location /.well-known/acme-challenge/ { + root /var/www/certbot; + } + # checks for static file, if not found proxy to app location / { try_files $uri @proxy_to_app; diff --git a/conf/nginx/options-ssl-nginx.conf b/conf/nginx/options-ssl-nginx.conf new file mode 100644 index 000000000..978e6e8ab --- /dev/null +++ b/conf/nginx/options-ssl-nginx.conf @@ -0,0 +1,14 @@ +# This file contains important security parameters. If you modify this file +# manually, Certbot will be unable to automatically provide future security +# updates. Instead, Certbot will print and log an error message with a path to +# the up-to-date file that you will need to refer to when manually updating +# this file. + +ssl_session_cache shared:le_nginx_SSL:10m; +ssl_session_timeout 1440m; +ssl_session_tickets off; + +ssl_protocols TLSv1.2 TLSv1.3; +ssl_prefer_server_ciphers off; + +ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384"; diff --git a/conf/nginx/ssl-dhparams.pem b/conf/nginx/ssl-dhparams.pem new file mode 100644 index 000000000..9b182b720 --- /dev/null +++ b/conf/nginx/ssl-dhparams.pem @@ -0,0 +1,8 @@ +-----BEGIN DH PARAMETERS----- +MIIBCAKCAQEA//////////+t+FRYortKmq/cViAnPTzx2LnFg84tNpWp4TZBFGQz ++8yTnc4kmz75fS/jY2MMddj2gbICrsRhetPfHtXV/WVhJDP1H18GbtCFY2VVPe0a +87VXE15/V8k1mE8McODmi3fipona8+/och3xWKE2rec1MKzKT0g6eXq8CrGCsyT7 +YdEIqUuyyOP7uWrat2DX9GgdT0Kj3jlN9K5W7edjcrsZCwenyO4KbXCeAvzhzffi +7MA0BM0oNC9hkXL+nOmFg/+OTxIy7vKBg8P+OxtMb61zO7X8vC7CIAXFjvGDfRaD +ssbzSibBsu/6iGtCOGEoXJf//////////wIBAg== +-----END DH PARAMETERS----- diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index e86a749f4..975dbff76 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -54,22 +54,6 @@ services: ports: - ${GEODB_PORT}:5432 - # web: - # environment: - # CADDY_IMPORT_GLOB: Caddyfile.proxy-minio - # volumes: - # # Include config for reverse proxying caddy - # - ./docker-caddy/Caddyfile.proxy-minio:/etc/caddy/Caddyfile.proxy-minio - - mkcert: - image: vishnunair/docker-mkcert - environment: - domain: ${QFIELDCLOUD_HOST} - volumes: - - ./conf/nginx/certs/:/root/.local/share/mkcert/ - command: /bin/sh -c 'mkcert -install && for i in $$(echo $$domain | sed "s/,/ /g"); do [ ! -f /root/.local/share/mkcert/$$i.pem ] && mkcert $$i; done && tail -f -n0 /etc/hosts' - - s3: image: minio/minio:RELEASE.2021-07-27T02-40-15Z restart: unless-stopped @@ -85,12 +69,13 @@ services: command: server /data{1...4} --console-address :9001 healthcheck: test: [ - "CMD", - "curl", - "-A", - "Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0", - "-f", - "${STORAGE_ENDPOINT_URL}/minio/index.html"] + "CMD", + "curl", + "-A", + "Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0", + "-f", + "${STORAGE_ENDPOINT_URL}/minio/index.html" + ] interval: 5s timeout: 20s retries: 5 diff --git a/docker-compose.yml b/docker-compose.yml index d85ef3514..5c28ca3a2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -81,22 +81,6 @@ services: ofelia.job-exec.runcrons.schedule: 0 * * * * * ofelia.job-exec.runcrons.command: python manage.py runcrons - # web: - # build: - # context: ./docker-caddy - # restart: unless-stopped - # environment: - # QFIELDCLOUD_HOST: ${QFIELDCLOUD_HOST} - # CADDY_ACME_CA: ${CADDY_ACME_CA} - # CADDY_IMPORT_GLOB: ${CADDY_IMPORT_GLOB} - # ACME_AGREE: "true" - # volumes: - # - caddy_data:/data - # ports: - # - ${WEB_HTTP_PORT}:80 - # - ${WEB_HTTPS_PORT}:443 - # logging: *default-logging - nginx: image: nginx:stable volumes: @@ -104,6 +88,9 @@ services: - media_volume:/var/www/html/mediafiles/ - ./conf/nginx/conf.d/:/etc/nginx/conf.d/ - ./conf/nginx/certs/:/etc/nginx/certs/:ro + - ./conf/nginx/options-ssl-nginx.conf:/etc/nginx/options-ssl-nginx.conf + - ./conf/nginx/ssl-dhparams.pem:/etc/nginx/ssl-dhparams.pem + - certbot_www:/var/www/certbot ports: - ${WEB_HTTP_PORT}:80 - ${WEB_HTTPS_PORT}:443 @@ -112,8 +99,21 @@ services: depends_on: - app - # certbot: - # image: certbot/certbot + mkcert: + image: vishnunair/docker-mkcert + environment: + domain: ${QFIELDCLOUD_HOST} + volumes: + - ./conf/nginx/certs/:/root/.local/share/mkcert/ + command: /bin/sh -c 'mkcert -install && for i in $$(echo $$domain | sed "s/,/ /g"); do [ ! -f /root/.local/share/mkcert/$$i.pem ] && mkcert $$i; done && tail -f -n0 /etc/hosts' + + certbot: + image: certbot/certbot + restart: unless-stopped + volumes: + - ./conf/certbot/conf:/etc/letsencrypt + - certbot_www:/var/www/certbot + entrypoint: "/bin/sh -c 'trap exit TERM; while :; do certbot renew; sleep 12h & wait $${!}; done;'" qgis: build: @@ -172,3 +172,4 @@ volumes: media_volume: caddy_data: transformation_grids: + certbot_www: diff --git a/scripts/init_letsencrypt.sh b/scripts/init_letsencrypt.sh new file mode 100644 index 000000000..458da6717 --- /dev/null +++ b/scripts/init_letsencrypt.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +set -e + +if ! [ -x "$(command -v docker-compose)" ]; then + echo 'Error: docker-compose is not installed.' >&2 + exit 1 +fi + +domains=(dev.qfield.cloud) +rsa_key_size=4096 +config_path="./conf/nginx" +email="info@opengis.ch" # Adding a valid address is strongly recommended +staging=1 # Set to 1 if you're testing your setup to avoid hitting request limits + +if [ ! -e "$config_path/options-ssl-nginx.conf" ] || [ ! -e "$config_path/ssl-dhparams.pem" ]; then + echo "### Downloading recommended TLS parameters ..." + curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot-nginx/certbot_nginx/_internal/tls_configs/options-ssl-nginx.conf > "$config_path/options-ssl-nginx.conf" + curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot/certbot/ssl-dhparams.pem > "$config_path/ssl-dhparams.pem" + echo +fi + +echo "### Starting nginx ..." +docker-compose up --force-recreate -d nginx +echo + +exit + +echo "### Requesting Let's Encrypt certificate for $domains ..." +#Join $domains to -d args +domain_args="" +for domain in "${domains[@]}"; do + domain_args="$domain_args -d $domain" +done + +# Enable staging mode if needed +if [ $staging != "0" ]; then staging_arg="--staging"; fi + +docker-compose run --rm --entrypoint "\ + certbot certonly --webroot -w /var/www/certbot \ + $staging_arg \ + $domain_args \ + --email $email \ + --rsa-key-size $rsa_key_size \ + --agree-tos \ + --force-renewal" certbot +echo + +echo "### Reloading nginx ..." +docker-compose exec nginx nginx -s reload From 07cd97076f3dc26e79cd6832c3a6a20c350312ba Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 6 Dec 2021 03:34:29 +0200 Subject: [PATCH 016/185] nginx template --- .env.example | 10 +++-- .github/workflows/test.yml | 2 +- .../default.conf.template} | 26 +++++++------ docker-compose.yml | 9 ++++- scripts/init_letsencrypt.sh | 39 ++++++++----------- 5 files changed, 45 insertions(+), 41 deletions(-) rename conf/nginx/{conf.d/default.conf => templates/default.conf.template} (76%) diff --git a/.env.example b/.env.example index fa33a47fb..317445fd6 100644 --- a/.env.example +++ b/.env.example @@ -6,6 +6,11 @@ DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 0.0.0.0 SECRET_KEY=change_me +LETSENCRYPT_EMAIL=info@opengis.ch +LETSENCRYPT_RSA_KEY_SIZE=4096 +# Set to 1 if you're testing your setup to avoid hitting request limits +LETSENCRYPT_STAGING=1 + STORAGE_ACCESS_KEY_ID=minioadmin STORAGE_SECRET_ACCESS_KEY=minioadmin STORAGE_BUCKET_NAME=qfieldcloud-local @@ -30,12 +35,9 @@ HOST_POSTGRES_PORT=5433 GEODB_HOST=geodb GEODB_PORT=5432 GEODB_USER=postgres -GEODB_PASSWORD=KUAa7h!G&wQEmkS3 +GEODB_PASSWORD='KUAa7h!G&wQEmkS3' GEODB_DB=postgres -CADDY_ACME_CA=https://acme-staging-v02.api.letsencrypt.org/directory -CADDY_IMPORT_GLOB=(*(N)) - SENTRY_DSN= REDIS_PASSWORD=change_me_with_a_very_loooooooooooong_password diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 07b255fa6..830873100 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -26,7 +26,7 @@ jobs: - name: Export the env variables file run: | cp .env.example .env - export $(egrep -v '^#' .env | xargs) + set -a; source .env; set +a - name: Pull docker containers run: docker-compose pull - name: Build and run docker containers diff --git a/conf/nginx/conf.d/default.conf b/conf/nginx/templates/default.conf.template similarity index 76% rename from conf/nginx/conf.d/default.conf rename to conf/nginx/templates/default.conf.template index 0b7458671..fa754b6e5 100644 --- a/conf/nginx/conf.d/default.conf +++ b/conf/nginx/templates/default.conf.template @@ -2,30 +2,32 @@ upstream django { server app:8000 fail_timeout=0; } -# server { +server { + listen 80; + server_name ${QFIELDCLOUD_HOST}; + + # cerbot + location /.well-known/acme-challenge/ { + root /var/www/certbot; + } -# server_name example.com www.example.com; -# } + # use $WEB_HTTPS_PORT in the configuration, since it might be different from 443 (e.g. localhost) + return 302 https://$host:${WEB_HTTPS_PORT}$request_uri; +} server { - listen 80; listen 443 ssl; - ssl_certificate certs/localhost.pem; - ssl_certificate_key certs/localhost-key.pem; + ssl_certificate certs/${QFIELDCLOUD_HOST}.pem; + ssl_certificate_key certs/${QFIELDCLOUD_HOST}-key.pem; - server_name localhost; + server_name ${QFIELDCLOUD_HOST}; client_max_body_size 10G; keepalive_timeout 5; # path for static files (only needed for serving local staticfiles) root /var/www/html/; - # cerbot - location /.well-known/acme-challenge/ { - root /var/www/certbot; - } - # checks for static file, if not found proxy to app location / { try_files $uri @proxy_to_app; diff --git a/docker-compose.yml b/docker-compose.yml index 5c28ca3a2..440727238 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -86,7 +86,7 @@ services: volumes: - static_volume:/var/www/html/staticfiles/ - media_volume:/var/www/html/mediafiles/ - - ./conf/nginx/conf.d/:/etc/nginx/conf.d/ + - ./conf/nginx/templates/:/etc/nginx/templates/ - ./conf/nginx/certs/:/etc/nginx/certs/:ro - ./conf/nginx/options-ssl-nginx.conf:/etc/nginx/options-ssl-nginx.conf - ./conf/nginx/ssl-dhparams.pem:/etc/nginx/ssl-dhparams.pem @@ -94,6 +94,13 @@ services: ports: - ${WEB_HTTP_PORT}:80 - ${WEB_HTTPS_PORT}:443 + environment: + QFIELDCLOUD_HOST: ${QFIELDCLOUD_HOST} + WEB_HTTP_PORT: ${WEB_HTTP_PORT} + WEB_HTTPS_PORT: ${WEB_HTTPS_PORT} + LETSENCRYPT_EMAIL: ${LETSENCRYPT_EMAIL} + LETSENCRYPT_STAGING: ${LETSENCRYPT_STAGING} + LETSENCRYPT_RSA_KEY_SIZE: ${LETSENCRYPT_RSA_KEY_SIZE} command: [nginx-debug, '-g', 'daemon off;'] logging: *default-logging depends_on: diff --git a/scripts/init_letsencrypt.sh b/scripts/init_letsencrypt.sh index 458da6717..ebf6f1904 100644 --- a/scripts/init_letsencrypt.sh +++ b/scripts/init_letsencrypt.sh @@ -7,44 +7,37 @@ if ! [ -x "$(command -v docker-compose)" ]; then exit 1 fi -domains=(dev.qfield.cloud) -rsa_key_size=4096 -config_path="./conf/nginx" -email="info@opengis.ch" # Adding a valid address is strongly recommended -staging=1 # Set to 1 if you're testing your setup to avoid hitting request limits +set -a; source .env; set +a -if [ ! -e "$config_path/options-ssl-nginx.conf" ] || [ ! -e "$config_path/ssl-dhparams.pem" ]; then +CONFIG_PATH="./conf/nginx" + +if [ ! -e "$CONFIG_PATH/options-ssl-nginx.conf" ] || [ ! -e "$CONFIG_PATH/ssl-dhparams.pem" ]; then echo "### Downloading recommended TLS parameters ..." - curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot-nginx/certbot_nginx/_internal/tls_configs/options-ssl-nginx.conf > "$config_path/options-ssl-nginx.conf" - curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot/certbot/ssl-dhparams.pem > "$config_path/ssl-dhparams.pem" + curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot-nginx/certbot_nginx/_internal/tls_configs/options-ssl-nginx.conf > "$CONFIG_PATH/options-ssl-nginx.conf" + curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot/certbot/ssl-dhparams.pem > "$CONFIG_PATH/ssl-dhparams.pem" echo fi -echo "### Starting nginx ..." -docker-compose up --force-recreate -d nginx -echo - -exit - -echo "### Requesting Let's Encrypt certificate for $domains ..." -#Join $domains to -d args -domain_args="" -for domain in "${domains[@]}"; do - domain_args="$domain_args -d $domain" -done +echo "### Requesting Let's Encrypt certificate for $QFIELDCLOUD_HOST ..." +domain_args="-d ${QFIELDCLOUD_HOST}" # Enable staging mode if needed -if [ $staging != "0" ]; then staging_arg="--staging"; fi +if [ $LETSENCRYPT_STAGING != "0" ]; then staging_arg="--staging"; fi docker-compose run --rm --entrypoint "\ certbot certonly --webroot -w /var/www/certbot \ $staging_arg \ $domain_args \ - --email $email \ - --rsa-key-size $rsa_key_size \ + --email $LETSENCRYPT_EMAIL \ + --rsa-key-size $LETSENCRYPT_RSA_KEY_SIZE \ --agree-tos \ --force-renewal" certbot echo +echo "### Copy the certificate and key to their final destination ..." +cp conf/certbot/conf/live/${QFIELDCLOUD_HOST}/fullchain.pem conf/nginx/certs/${QFIELDCLOUD_HOST}.pem +cp conf/certbot/conf/live/${QFIELDCLOUD_HOST}/privkey.pem conf/nginx/certs/${QFIELDCLOUD_HOST}-key.pem +echo + echo "### Reloading nginx ..." docker-compose exec nginx nginx -s reload From 42df346de18d57d09a7dd08b3b7aadd619d34518 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 6 Dec 2021 04:50:12 +0200 Subject: [PATCH 017/185] Remove caddy --- .env.example | 2 +- .github/workflows/build.yml | 19 ------------------- docker-caddy/Caddyfile | 27 --------------------------- docker-caddy/Caddyfile.proxy-minio | 12 ------------ docker-caddy/Dockerfile | 4 ---- docker-compose.dockerhub.yml | 17 ----------------- docker-compose.override.local.yml | 2 +- docker-compose.yml | 1 - 8 files changed, 2 insertions(+), 82 deletions(-) delete mode 100644 docker-caddy/Caddyfile delete mode 100644 docker-caddy/Caddyfile.proxy-minio delete mode 100644 docker-caddy/Dockerfile diff --git a/.env.example b/.env.example index 317445fd6..47a24a85b 100644 --- a/.env.example +++ b/.env.example @@ -2,7 +2,7 @@ DEBUG=1 QFIELDCLOUD_HOST=localhost DJANGO_SETTINGS_MODULE=qfieldcloud.settings -DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 0.0.0.0 +DJANGO_ALLOWED_HOSTS='localhost 127.0.0.1 0.0.0.0' SECRET_KEY=change_me diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ea7777b77..67bf8ff98 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -44,25 +44,6 @@ jobs: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - # Caddy - - name: Docker Test Caddy - id: docker_test_caddy - uses: docker/build-push-action@v2 - with: - builder: ${{ steps.buildx.outputs.name }} - context: ./docker-caddy - file: ./docker-caddy/Dockerfile - - - name: Docker Build and Push Caddy - id: docker_build_and_push_caddy - uses: docker/build-push-action@v2 - with: - builder: ${{ steps.buildx.outputs.name }} - context: ./docker-caddy - file: ./docker-caddy/Dockerfile - push: ${{ github.event_name != 'pull_request' }} - tags: opengisch/qfieldcloud-caddy:${{ steps.prepare.outputs.tag }} - # Application - name: Docker Test Application id: docker_test_application diff --git a/docker-caddy/Caddyfile b/docker-caddy/Caddyfile deleted file mode 100644 index 7fe2c19a4..000000000 --- a/docker-caddy/Caddyfile +++ /dev/null @@ -1,27 +0,0 @@ -{ - acme_ca {$CADDY_ACME_CA} - email info@opengis.ch -} - -http://{$QFIELDCLOUD_HOST} https://{$QFIELDCLOUD_HOST} { - log { - level debug - format json - } - - root * /home/app/web/ - file_server - - # Caddy imports. Currently imports the minio configuration from `Caddyfile.proxy-minio`. - # PROD Since we are using a remote S3, `CADDY_IMPORT_GLOB` should be an empty glob ("(*(N))"). - # DEV Use local minio. The file is mounted in docker-compose and CADDY_IMPORT_GLOB is set to the filename. - import {$CADDY_IMPORT_GLOB} - - @notStatic { - not { - path /staticfiles/* - path /mediafiles/* - } - } - reverse_proxy @notStatic app:8000 -} diff --git a/docker-caddy/Caddyfile.proxy-minio b/docker-caddy/Caddyfile.proxy-minio deleted file mode 100644 index c109e66cd..000000000 --- a/docker-caddy/Caddyfile.proxy-minio +++ /dev/null @@ -1,12 +0,0 @@ -# This is to be included in the Caddyfile when using local minio S3 (development/testing) -# On production, an empty file should be included instead - - - handle_path /minio/* { - rewrite * {path} - reverse_proxy { - to s3:9000 - header_up Host {upstream_hostport} - header_up X-Forwarded-Host {host} - } - } diff --git a/docker-caddy/Dockerfile b/docker-caddy/Dockerfile deleted file mode 100644 index 2c94bf454..000000000 --- a/docker-caddy/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM caddy:2.1.0-alpine - -ADD ./Caddyfile /etc/caddy/Caddyfile -RUN touch /etc/caddy/Caddyfile.proxy-minio diff --git a/docker-compose.dockerhub.yml b/docker-compose.dockerhub.yml index 0a894fd9c..1acf9fe9b 100644 --- a/docker-compose.dockerhub.yml +++ b/docker-compose.dockerhub.yml @@ -56,22 +56,6 @@ services: depends_on: - db - web: - image: opengischprivate/qfieldcloud-caddy:latest - restart: unless-stopped - environment: - QFIELDCLOUD_HOST: ${QFIELDCLOUD_HOST} - CADDY_ACME_CA: ${CADDY_ACME_CA} - CADDY_IMPORT_GLOB: ${CADDY_IMPORT_GLOB} - ACME_AGREE: "true" - volumes: - - static_volume:/home/app/web/staticfiles/ - - media_volume:/home/app/web/mediafiles/ - - caddy_data:/data - ports: - - ${WEB_HTTP_PORT}:80 - - ${WEB_HTTPS_PORT}:443 - qgis: image: opengischprivate/qfieldcloud-qgis:latest tty: true @@ -90,4 +74,3 @@ volumes: postgres_data: static_volume: media_volume: - caddy_data: diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 975dbff76..efe1d8f86 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -4,7 +4,7 @@ services: app: ports: - # allow direct access without caddy + # allow direct access without nginx - "8000:8000" volumes: # mount the source for live reload diff --git a/docker-compose.yml b/docker-compose.yml index 440727238..961a46235 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -177,6 +177,5 @@ volumes: postgres_data: static_volume: media_volume: - caddy_data: transformation_grids: certbot_www: From 69972b3663620e0679d6942cf93f179adf7650b6 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 6 Dec 2021 16:12:57 +0200 Subject: [PATCH 018/185] Final --- README.md | 3 +++ conf/nginx/templates/default.conf.template | 4 +++- scripts/init_letsencrypt.sh | 0 3 files changed, 6 insertions(+), 1 deletion(-) mode change 100644 => 100755 scripts/init_letsencrypt.sh diff --git a/README.md b/README.md index f7db501d3..78916e1c8 100644 --- a/README.md +++ b/README.md @@ -153,9 +153,12 @@ Run the django database migrations ## Create a certificate using Let's Encrypt +If you are running the server on a server with a public domain, you can install Let's Encrypt certificate by running the following command: ./scripts/init_letsencrypt.sh +Note you may want to change the `LETSENCRYPT_EMAIL`, `LETSENCRYPT_RSA_KEY_SIZE` and `LETSENCRYPT_STAGING` variables. + ### Infrastructure Based on this example diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index fa754b6e5..35f0349f5 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -12,7 +12,9 @@ server { } # use $WEB_HTTPS_PORT in the configuration, since it might be different from 443 (e.g. localhost) - return 302 https://$host:${WEB_HTTPS_PORT}$request_uri; + location / { + return 302 https://$host:${WEB_HTTPS_PORT}$request_uri; + } } server { diff --git a/scripts/init_letsencrypt.sh b/scripts/init_letsencrypt.sh old mode 100644 new mode 100755 From 729c5eac0e7a98b586c3281325d493503a1c14d5 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 7 Dec 2021 01:38:14 +0200 Subject: [PATCH 019/185] Fix minio in the new nginx setup However, downloading is still not working, but the access to the console is there --- .env.example | 2 +- conf/nginx/templates/default.conf.template | 29 +++++++++++----------- docker-compose.override.local.yml | 22 ++++++++-------- 3 files changed, 27 insertions(+), 26 deletions(-) diff --git a/.env.example b/.env.example index 47a24a85b..838c1c48b 100644 --- a/.env.example +++ b/.env.example @@ -16,7 +16,7 @@ STORAGE_SECRET_ACCESS_KEY=minioadmin STORAGE_BUCKET_NAME=qfieldcloud-local STORAGE_REGION_NAME= # Internal URL to the storage endpoint (from python code) -STORAGE_ENDPOINT_URL=http://s3:9000 +STORAGE_ENDPOINT_URL=http://minio:9000 # Public URL to the storage endpoint (external storage should be equivalent to STORAGE_ENDPOINT_URL, local development only, no trailing slash) STORAGE_ENDPOINT_URL_EXTERNAL=http://localhost:80/minio # Public port to the storage endpoint browser (local development only) diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index 35f0349f5..a2c6ccccf 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -44,18 +44,19 @@ server { proxy_pass http://django; } - # location /minio { - # proxy_set_header X-Real-IP $remote_addr; - # proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - # proxy_set_header X-Forwarded-Proto $scheme; - # proxy_set_header Host $http_host; - - # proxy_connect_timeout 300; - # # Default is HTTP/1, keepalive is only enabled in HTTP/1.1 - # proxy_http_version 1.1; - # proxy_set_header Connection ""; - # chunked_transfer_encoding off; - - # proxy_pass http://minio:9000; - # } + location /minio/ { + proxy_buffering off; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Host $http_host; + + proxy_connect_timeout 300; + # Default is HTTP/1, keepalive is only enabled in HTTP/1.1 + proxy_http_version 1.1; + proxy_set_header Connection ""; + chunked_transfer_encoding off; + + proxy_pass http://minio:9000/; + } } diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index efe1d8f86..a5ff42857 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -17,7 +17,7 @@ services: command: python3 manage.py runserver 0.0.0.0:8000 depends_on: - geodb - - s3 + - minio - smtp4dev worker_wrapper: @@ -54,14 +54,14 @@ services: ports: - ${GEODB_PORT}:5432 - s3: + minio: image: minio/minio:RELEASE.2021-07-27T02-40-15Z restart: unless-stopped volumes: - - s3_data1:/data1 - - s3_data2:/data2 - - s3_data3:/data3 - - s3_data4:/data4 + - minio_data1:/data1 + - minio_data2:/data2 + - minio_data3:/data3 + - minio_data4:/data4 environment: MINIO_ROOT_USER: ${STORAGE_ACCESS_KEY_ID} MINIO_ROOT_PASSWORD: ${STORAGE_SECRET_ACCESS_KEY} @@ -85,7 +85,7 @@ services: createbuckets: image: minio/mc depends_on: - s3: + minio: condition: service_healthy entrypoint: > /bin/sh -c " @@ -99,7 +99,7 @@ services: volumes: geodb_data: smtp4dev_data: - s3_data1: - s3_data2: - s3_data3: - s3_data4: + minio_data1: + minio_data2: + minio_data3: + minio_data4: From 5b7d0624842003a583ee64f9f146526fbebea900 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 8 Dec 2021 23:11:51 +0200 Subject: [PATCH 020/185] Update the JSON schemas to reflect the recent QField delta additions --- docker-app/qfieldcloud/core/deltafile_01.json | 18 ++++++++++++++++++ docker-qgis/schemas/deltafile_01.json | 18 ++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/docker-app/qfieldcloud/core/deltafile_01.json b/docker-app/qfieldcloud/core/deltafile_01.json index 7b93ff973..8c92cd305 100644 --- a/docker-app/qfieldcloud/core/deltafile_01.json +++ b/docker-app/qfieldcloud/core/deltafile_01.json @@ -165,6 +165,24 @@ "e933b5aa-6ccb-416f-83e7-5a2ece85cf1a_LayerName" ] }, + "localLayerCrs": { + "type": "string", + "title": "Local Layer CRS", + "description": "The layer CRS as on QField device.", + "examples": [ + "EPSG:32635", + "PROJCS[\"WGS 84 / UTM zone 35N\",GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",\"8901\"]],UNIT[\"degree\",0.0174532925199433,AUTHORITY[\"EPSG\",\"9122\"]],AUTHORITY[\"EPSG\",\"4326\"]],PROJECTION[\"Transverse_Mercator\"],PARAMETER[\"latitude_of_origin\",0],PARAMETER[\"central_meridian\",27],PARAMETER[\"scale_factor\",0.9996],PARAMETER[\"false_easting\",500000],PARAMETER[\"false_northing\",0],UNIT[\"metre\",1,AUTHORITY[\"EPSG\",\"9001\"]],AXIS[\"Easting\",EAST],AXIS[\"Northing\",NORTH],AUTHORITY[\"EPSG\",\"32635\"]]" + ] + }, + "localLayerName": { + "type": "string", + "title": "Local Layer Name", + "description": "The layer name as shown ot the QField User.", + "examples": [ + "trees", + "Дървета" + ] + }, "sourceLayerId": { "type": "string", "title": "Source Layer ID", diff --git a/docker-qgis/schemas/deltafile_01.json b/docker-qgis/schemas/deltafile_01.json index 5ab88637b..039b06d75 100644 --- a/docker-qgis/schemas/deltafile_01.json +++ b/docker-qgis/schemas/deltafile_01.json @@ -165,6 +165,24 @@ "e933b5aa-6ccb-416f-83e7-5a2ece85cf1a_LayerName" ] }, + "localLayerCrs": { + "type": "string", + "title": "Local Layer CRS", + "description": "The layer CRS as on QField device.", + "examples": [ + "EPSG:32635", + "PROJCS[\"WGS 84 / UTM zone 35N\",GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",\"8901\"]],UNIT[\"degree\",0.0174532925199433,AUTHORITY[\"EPSG\",\"9122\"]],AUTHORITY[\"EPSG\",\"4326\"]],PROJECTION[\"Transverse_Mercator\"],PARAMETER[\"latitude_of_origin\",0],PARAMETER[\"central_meridian\",27],PARAMETER[\"scale_factor\",0.9996],PARAMETER[\"false_easting\",500000],PARAMETER[\"false_northing\",0],UNIT[\"metre\",1,AUTHORITY[\"EPSG\",\"9001\"]],AXIS[\"Easting\",EAST],AXIS[\"Northing\",NORTH],AUTHORITY[\"EPSG\",\"32635\"]]" + ] + }, + "localLayerName": { + "type": "string", + "title": "Local Layer Name", + "description": "The layer name as shown ot the QField User.", + "examples": [ + "trees", + "Дървета" + ] + }, "sourceLayerId": { "type": "string", "title": "Source Layer ID", From 78c5001f5ddb20bfd12f4607175c8ddbc6c73ee2 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 10 Dec 2021 16:45:42 +0200 Subject: [PATCH 021/185] Increase logs stored for app --- docker-compose.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 961a46235..3beeb1e7e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -75,7 +75,11 @@ services: depends_on: - db - redis - logging: *default-logging + logging: + driver: "json-file" + options: + max-size: "1000m" + max-file: "5" labels: ofelia.enabled: "true" ofelia.job-exec.runcrons.schedule: 0 * * * * * From fcd742e554144235fb88f6a45a0ce740ba8d41af Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 10 Dec 2021 19:48:50 +0200 Subject: [PATCH 022/185] Limit logs at 1000 chars, not matter if JSON or not --- .../core/middleware/request_response_log.py | 27 +++++++++---------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/docker-app/qfieldcloud/core/middleware/request_response_log.py b/docker-app/qfieldcloud/core/middleware/request_response_log.py index e2d320e68..bc064c533 100644 --- a/docker-app/qfieldcloud/core/middleware/request_response_log.py +++ b/docker-app/qfieldcloud/core/middleware/request_response_log.py @@ -18,7 +18,7 @@ logger = logging.getLogger("qfieldcloud.request_response_log") -MAX_RESPONSE_BODY_LENGTH = 100 +MAX_RESPONSE_BODY_LENGTH = 1000 CENSOR_DATA_KEYS = [ "password", "token", @@ -68,27 +68,24 @@ def extract_log_info(self, request, response=None, exception=None): if response: if response.get("content-type") == "application/json": + response_string = "" if hasattr(response, "data"): - log_data["response_body"] = response.data - else: - # TODO in theory there should be a way without reparsing the response content, should use the content before being returned try: - log_data["response_body"] = json.loads(response.content) + response_string = json.dumps( + response.data, sort_keys=True, indent=1 + ) except Exception as err: response_string = str(response.content, "utf-8") - log_data["response_body"] = response_string[ - :MAX_RESPONSE_BODY_LENGTH - ] - log_data["json_parse_error"] = str(err) - - if len(response_string) > MAX_RESPONSE_BODY_LENGTH: - log_data["response_trimmed"] = MAX_RESPONSE_BODY_LENGTH + log_data["json_serialize_error"] = str(err) + else: + response_string = str(response.content, "utf-8") else: response_string = str(response.content, "utf-8") - log_data["response_body"] = response_string[:MAX_RESPONSE_BODY_LENGTH] - if len(response_string) > MAX_RESPONSE_BODY_LENGTH: - log_data["response_trimmed"] = MAX_RESPONSE_BODY_LENGTH + log_data["response_body"] = response_string[:MAX_RESPONSE_BODY_LENGTH] + + if len(response_string) > MAX_RESPONSE_BODY_LENGTH: + log_data["response_trimmed"] = MAX_RESPONSE_BODY_LENGTH log_data["response_headers"] = {**response.headers} log_data["status_code"] = response.status_code From da21df07766520216e49d0e41db4fed119fbc083 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 10 Dec 2021 23:54:30 +0200 Subject: [PATCH 023/185] Use external database by default --- docker-app/wait_for_services.py | 3 ++- docker-compose.override.local.yml | 18 ++++++++++++++++++ docker-compose.yml | 15 --------------- 3 files changed, 20 insertions(+), 16 deletions(-) diff --git a/docker-app/wait_for_services.py b/docker-app/wait_for_services.py index b855087cf..e457a485d 100644 --- a/docker-app/wait_for_services.py +++ b/docker-app/wait_for_services.py @@ -19,7 +19,8 @@ def wait_for_postgres(): "dbname": os.environ.get("SQL_DATABASE"), "user": os.environ.get("SQL_USER"), "password": os.environ.get("SQL_PASSWORD"), - "host": "db", + "host": os.environ.get("SQL_HOST"), + "port": os.environ.get("SQL_PORT"), } start_time = time() while time() - start_time < TIMEOUT: diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index a5ff42857..70a342a1b 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -16,6 +16,7 @@ services: STORAGE_BROWSER_PORT: ${STORAGE_BROWSER_PORT} command: python3 manage.py runserver 0.0.0.0:8000 depends_on: + - db - geodb - minio - smtp4dev @@ -25,6 +26,10 @@ services: # mount the source for live reload - ./docker-app/qfieldcloud:/usr/src/app/qfieldcloud - ./docker-app/worker_wrapper:/usr/src/app/worker_wrapper + depends_on: + - db + - redis + - app smtp4dev: image: rnwood/smtp4dev:v3 @@ -42,6 +47,18 @@ services: # Specifies the server hostname. Used in auto-generated TLS certificate if enabled. - ServerOptions__HostName=smtp4dev + db: + image: postgis/postgis:13-3.1-alpine + restart: unless-stopped + environment: + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + volumes: + - postgres_data:/var/lib/postgresql/data/ + ports: + - ${HOST_POSTGRES_PORT}:5432 + geodb: image: postgis/postgis:12-3.0 restart: unless-stopped @@ -97,6 +114,7 @@ services: " volumes: + postgres_data: geodb_data: smtp4dev_data: minio_data1: diff --git a/docker-compose.yml b/docker-compose.yml index 3beeb1e7e..78f827fe2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,18 +8,6 @@ x-logging: max-file: "10" services: - db: - image: postgis/postgis:13-3.1-alpine - restart: unless-stopped - environment: - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_USER: ${POSTGRES_USER} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - volumes: - - postgres_data:/var/lib/postgresql/data/ - ports: - - ${HOST_POSTGRES_PORT}:5432 - logging: *default-logging app: &default-django build: @@ -73,7 +61,6 @@ services: WEB_HTTPS_PORT: ${WEB_HTTPS_PORT} TRANSFORMATION_GRIDS_VOLUME_NAME: ${COMPOSE_PROJECT_NAME}_transformation_grids depends_on: - - db - redis logging: driver: "json-file" @@ -157,7 +144,6 @@ services: - ${TMP_DIRECTORY}:/tmp logging: *default-logging depends_on: - - db - redis - app @@ -178,7 +164,6 @@ services: - transformation_grids:/transformation_grids volumes: - postgres_data: static_volume: media_volume: transformation_grids: From c3994e0cf06deaf78be7d4a77ecfb6222d8d52a1 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 11 Dec 2021 01:06:08 +0200 Subject: [PATCH 024/185] Add SSL_MODE setting --- .env.example | 1 + docker-app/qfieldcloud/settings.py | 1 + docker-app/wait_for_services.py | 1 + docker-compose.yml | 1 + 4 files changed, 4 insertions(+) diff --git a/.env.example b/.env.example index 838c1c48b..2a285270e 100644 --- a/.env.example +++ b/.env.example @@ -30,6 +30,7 @@ POSTGRES_PASSWORD=3shJDd2r7Twwkehb POSTGRES_DB=qfieldcloud_db POSTGRES_HOST=db POSTGRES_PORT=5432 +POSTGRES_SSLMODE=prefer # "prefer" OR "require" most of the times HOST_POSTGRES_PORT=5433 GEODB_HOST=geodb diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 761df7bf1..b0bfdf264 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -134,6 +134,7 @@ "PASSWORD": os.environ.get("SQL_PASSWORD"), "HOST": os.environ.get("SQL_HOST"), "PORT": os.environ.get("SQL_PORT"), + "OPTIONS": {"sslmode": os.environ.get("SQL_SSLMODE")}, } } diff --git a/docker-app/wait_for_services.py b/docker-app/wait_for_services.py index e457a485d..6324f70e4 100644 --- a/docker-app/wait_for_services.py +++ b/docker-app/wait_for_services.py @@ -21,6 +21,7 @@ def wait_for_postgres(): "password": os.environ.get("SQL_PASSWORD"), "host": os.environ.get("SQL_HOST"), "port": os.environ.get("SQL_PORT"), + "sslmode": os.environ.get("SQL_SSLMODE"), } start_time = time() while time() - start_time < TIMEOUT: diff --git a/docker-compose.yml b/docker-compose.yml index 78f827fe2..8b521d229 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,6 +27,7 @@ services: SQL_PASSWORD: ${POSTGRES_PASSWORD} SQL_HOST: ${POSTGRES_HOST} SQL_PORT: ${POSTGRES_PORT} + SQL_SSLMODE: ${POSTGRES_SSLMODE} STORAGE_ACCESS_KEY_ID: ${STORAGE_ACCESS_KEY_ID} STORAGE_SECRET_ACCESS_KEY: ${STORAGE_SECRET_ACCESS_KEY} STORAGE_BUCKET_NAME: ${STORAGE_BUCKET_NAME} From 14b15e0ceef1a04978eb47c7b04d328b2da1ae7a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 11 Dec 2021 01:22:33 +0200 Subject: [PATCH 025/185] Fix missing minio at startup in non-local environment --- conf/nginx/templates/default.conf.template | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index a2c6ccccf..2ba618ad9 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -57,6 +57,7 @@ server { proxy_set_header Connection ""; chunked_transfer_encoding off; - proxy_pass http://minio:9000/; + set $target http://minio1:9000/; + proxy_pass $target; } } From fa515e024f00df39a311dd55235e5ce482ea78af Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 11 Dec 2021 19:11:58 +0200 Subject: [PATCH 026/185] Add a way to ignore certain envvars, since k8s and docker-compose have different requirements --- scripts/check_envvars.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/scripts/check_envvars.py b/scripts/check_envvars.py index 1623644f4..6293b881c 100755 --- a/scripts/check_envvars.py +++ b/scripts/check_envvars.py @@ -113,10 +113,17 @@ def get_env_varnames_from_k8s_environments(search_path: str) -> Dict[str, List[s type=str, help="Directory containing k8s configuration and secret files.", ) + parser.add_argument( + "--ignored-varnames", + type=str, + nargs="*", + help="Ignored varnames.", + ) args = parser.parse_args() problems = [] envfile_vars = get_env_varnames_from_envfile(args.envfile) + ignored_varnames = args.ignored_varnames or [] if args.docker_compose_dir: dockercompose_vars = get_env_varnames_from_docker_compose_files( @@ -124,6 +131,9 @@ def get_env_varnames_from_k8s_environments(search_path: str) -> Dict[str, List[s ) for varname in envfile_vars.difference(set(dockercompose_vars.keys())): + if varname in ignored_varnames: + continue + if varname in envfile_vars: problems.append( f'Envvar "{varname}" is defined in the .env file, but not found in any docker-compose file.' @@ -141,6 +151,9 @@ def get_env_varnames_from_k8s_environments(search_path: str) -> Dict[str, List[s ] for varname in envfile_vars.difference(set(k8s_vars.keys())): + if varname in ignored_varnames: + continue + if varname in envfile_vars: problems.append( f'Envvar "{varname}" is defined in the .env file, but not found in the any k8s configuration(s) and secret(s).' @@ -152,8 +165,12 @@ def get_env_varnames_from_k8s_environments(search_path: str) -> Dict[str, List[s ) for varname, occurrences in k8s_vars.items(): + if varname in ignored_varnames: + continue + for environment in occurrences: if environment not in k8s_environments: + problems.append( f'Envvar "{varname}" should be in all k8s environments, but missing not found neither in configuration or secrets of "{environment}".' ) From 1bd12e2daf3458a648786c878881e7f1ef58a1aa Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 13 Dec 2021 12:40:48 +0200 Subject: [PATCH 027/185] Dangerous race condition when invalidating rest of the tokens, it also invalidated the current token --- docker-app/qfieldcloud/authentication/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/authentication/models.py b/docker-app/qfieldcloud/authentication/models.py index a1121099e..71ebd31d7 100644 --- a/docker-app/qfieldcloud/authentication/models.py +++ b/docker-app/qfieldcloud/authentication/models.py @@ -96,5 +96,5 @@ def save(self, *args, **kwargs) -> None: user=self.user, client_type=self.client_type, expires_at__gt=now, - ).update(expires_at=now) + ).exclude(pk=self.pk).update(expires_at=now) return super().save(*args, **kwargs) From 3f9675838be0478863f44f3c086db3d4fa2a5d9b Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 14 Dec 2021 14:02:15 +0200 Subject: [PATCH 028/185] Fix admin saving deltas --- docker-app/qfieldcloud/core/models.py | 4 ++-- docker-app/qfieldcloud/settings.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index cd69eff13..12ecf116a 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -1064,8 +1064,8 @@ class Status(models.TextChoices): on_delete=models.CASCADE, related_name="uploaded_deltas", ) - old_geom = models.GeometryField(null=True, srid=0, dim=4) - new_geom = models.GeometryField(null=True, srid=0, dim=4) + old_geom = models.GeometryField(null=True, srid=4326, dim=4) + new_geom = models.GeometryField(null=True, srid=4326, dim=4) def __str__(self): return str(self.id) + ", project: " + str(self.project.id) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index b0bfdf264..ddad69235 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -48,6 +48,7 @@ "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", + "django.contrib.gis", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", From bab1465f646ef37d32887b63f1367af00db0aeff Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 14 Dec 2021 15:08:15 +0200 Subject: [PATCH 029/185] Bump QGIS from 3.20.2 to 3.22.1 --- docker-qgis/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-qgis/Dockerfile b/docker-qgis/Dockerfile index 55f122c5e..b6dd29bb9 100644 --- a/docker-qgis/Dockerfile +++ b/docker-qgis/Dockerfile @@ -1,4 +1,4 @@ -FROM qgis/qgis:final-3_20_2 +FROM qgis/qgis:final-3_22_1 RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install -y \ From eb7c6002d9b261e353126ddea2f083e89801af9a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 14 Dec 2021 21:49:04 +0200 Subject: [PATCH 030/185] Get the modified_pk from the feature even in transaction mode --- docker-qgis/apply_deltas.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/docker-qgis/apply_deltas.py b/docker-qgis/apply_deltas.py index 06cfde155..4742542f9 100755 --- a/docker-qgis/apply_deltas.py +++ b/docker-qgis/apply_deltas.py @@ -33,6 +33,7 @@ QgsProject, QgsProviderRegistry, QgsVectorLayer, + QgsVectorLayerEditPassthrough, QgsVectorLayerUtils, ) from qgis.PyQt.QtCore import QCoreApplication @@ -439,11 +440,20 @@ def apply_deltas_without_transaction( provider_errors=layer.dataProvider().errors(), ) + has_edit_buffer = layer.editBuffer() and not isinstance( + layer.editBuffer(), QgsVectorLayerEditPassthrough + ) delta = inverse_delta(delta) if inverse else delta if delta["method"] == str(DeltaMethod.CREATE): # don't use the returned feature as the PK might contain the "Autogenerated" string value, instead the real one - create_feature(layer, delta, overwrite_conflicts=overwrite_conflicts) + created_feature = create_feature( + layer, delta, overwrite_conflicts=overwrite_conflicts + ) + + # apparently the only way to obtain the feature if there is no edit buffer is use the returned created_feature + if not has_edit_buffer: + feature = created_feature elif delta["method"] == str(DeltaMethod.PATCH): feature = patch_feature( layer, @@ -461,7 +471,7 @@ def apply_deltas_without_transaction( else: raise DeltaException("Unknown delta method") - def commited_features_added_cb(layer_id, features): + def committed_features_added_cb(layer_id, features): if len(features) != 0 and len(features) != 1: raise DeltaException( f"Expected only one feature, but actually {len(features)} were added." @@ -475,8 +485,9 @@ def commited_features_added_cb(layer_id, features): nonlocal feature feature = features[0] - # in QGIS the only way to get the real features that have been added after commit is to use this signal. - layer.committedFeaturesAdded.connect(commited_features_added_cb) + if has_edit_buffer: + # in QGIS the only way to get the real features that have been added after commit, if edit buffer is present, is to use this signal. + layer.committedFeaturesAdded.connect(committed_features_added_cb) if not layer.commitChanges(): raise DeltaException( @@ -484,7 +495,9 @@ def commited_features_added_cb(layer_id, features): provider_errors=layer.dataProvider().errors(), ) - QCoreApplication.processEvents() + if has_edit_buffer: + QCoreApplication.processEvents() + layer.committedFeaturesAdded.disconnect(committed_features_added_cb) logger.info(f'Successfully applied delta on layer "{layer_id}"') From af270f3bf2eaa99cb57158f3db48f9d1a56ee8fb Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 14 Dec 2021 21:49:25 +0200 Subject: [PATCH 031/185] Minor comment typo fix --- docker-app/qfieldcloud/core/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/admin.py b/docker-app/qfieldcloud/core/admin.py index dce260500..16eccd742 100644 --- a/docker-app/qfieldcloud/core/admin.py +++ b/docker-app/qfieldcloud/core/admin.py @@ -479,7 +479,7 @@ def response_change(self, request, delta): self.message_user(request, "Delta application started") - # we need to sleep 1 second, just to make surethe apply delta started + # we need to sleep 1 second, just to make sure the apply delta started time.sleep(1) return HttpResponseRedirect(".") From 75a66e96f6dfabffe619f7a3cfcb44abf2435368 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 14 Dec 2021 21:49:34 +0200 Subject: [PATCH 032/185] Fix applying deltas from admin --- docker-app/qfieldcloud/core/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/admin.py b/docker-app/qfieldcloud/core/admin.py index 16eccd742..8354f1ffd 100644 --- a/docker-app/qfieldcloud/core/admin.py +++ b/docker-app/qfieldcloud/core/admin.py @@ -463,7 +463,7 @@ def set_status_unpermitted(self, request, queryset): def response_change(self, request, delta): if "_apply_delta_btn" in request.POST: - if delta.project.project_filename: + if not delta.project.project_filename: self.message_user(request, "Missing project file") raise exceptions.NoQGISProjectError() From 3f1f097021e66d08a35a42f185dbf330d2622a89 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 14 Dec 2021 21:49:53 +0200 Subject: [PATCH 033/185] Show last_modified_pk field in the admin --- docker-app/qfieldcloud/core/admin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-app/qfieldcloud/core/admin.py b/docker-app/qfieldcloud/core/admin.py index 8354f1ffd..0fa81f999 100644 --- a/docker-app/qfieldcloud/core/admin.py +++ b/docker-app/qfieldcloud/core/admin.py @@ -404,6 +404,7 @@ class DeltaAdmin(admin.ModelAdmin): "project", "deltafile_id", "last_feedback__pre", + "last_modified_pk", "created_by", "created_at", "updated_at", @@ -417,6 +418,7 @@ class DeltaAdmin(admin.ModelAdmin): "updated_at", "content", "last_feedback__pre", + "last_modified_pk", ) search_fields = ( "project__name__iexact", From c657f6b7ab0a646bf7918934ec62e5ce50221989 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 14 Dec 2021 20:51:02 +0200 Subject: [PATCH 034/185] Fix process_projectfile job on freshly uploaded project file --- docker-app/worker_wrapper/wrapper.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index f41c70207..61e5729b6 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -22,6 +22,7 @@ PackageJob, ProcessProjectfileJob, ) +from qfieldcloud.core.utils import get_qgis_project_file logger = logging.getLogger(__name__) @@ -384,6 +385,16 @@ class ProcessProjectfileJobRun(JobRun): "%(project__project_filename)s", ] + def get_context(self, *args) -> Dict[str, Any]: + context = super().get_context(*args) + + if not context.get("project__project_filename"): + context["project__project_filename"] = get_qgis_project_file( + context["project__id"] + ) + + return context + def after_docker_run(self) -> None: project = self.job.project From 659ddb42578a965f1b8ab8b674705d4f0e77cf40 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 16 Dec 2021 23:47:33 +0200 Subject: [PATCH 035/185] Bump backport version --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 52fcec698..ae6427173 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -11,6 +11,6 @@ jobs: name: Backport steps: - name: Backport - uses: m-kuhn/backport@v1.2.3 + uses: m-kuhn/backport@v1.2.4 with: github_token: ${{ secrets.GITHUB_TOKEN }} From 0b75410782a1c5717f5a9df4329d7abdc7ea1a70 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 17 Dec 2021 14:21:11 +0200 Subject: [PATCH 036/185] Use fairy for backport --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index ae6427173..2a487ad90 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -13,4 +13,4 @@ jobs: - name: Backport uses: m-kuhn/backport@v1.2.4 with: - github_token: ${{ secrets.GITHUB_TOKEN }} + github_token: ${{ secrets.FAIRY_TOKEN }} From cea8aacf72107a868d0e34a69ae5149925acc3f7 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 18 Dec 2021 03:54:31 +0200 Subject: [PATCH 037/185] Add project.status field that shows whether the project is ready to use --- docker-app/qfieldcloud/core/models.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 12ecf116a..3329b2a0e 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -820,6 +820,12 @@ class Project(models.Model): The owner of a project is an Organization. """ + # NOTE the status is NOT stored in the db, because it might be refactored + class Status(models.TextChoices): + OK = "ok", _("Ok") + BUSY = "busy", _("Busy") + FAILED = "failed", _("Failed") + objects = ProjectQueryset.as_manager() _cache_files_count = None @@ -955,6 +961,21 @@ def needs_repackaging(self) -> bool: # if the project has online vector layers (PostGIS/WFS/etc) we cannot be sure if there are modification or not, so better say there are return True + @property + def status(self) -> Status: + # NOTE the status is NOT stored in the db, because it might be refactored + if ( + Job.objects.filter( + project=self, status__in=[Job.Status.QUEUED, Job.Status.STARTED] + ).count() + > 0 + ): + return Project.Status.BUSY + elif not self.project_filename: + return Project.Status.FAILED + else: + return Project.Status.OK + @receiver(pre_delete, sender=Project) def delete_project(sender: Type[Project], instance: Project, **kwargs: Any) -> None: From e462e88331a9cda1797f835e851f6f9476ece7e3 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 16 Dec 2021 15:47:40 +0200 Subject: [PATCH 038/185] Simplify delta related permissions --- .../qfieldcloud/core/permissions_utils.py | 43 +++++++++++++++++++ docker-app/requirements.txt | 1 + 2 files changed, 44 insertions(+) diff --git a/docker-app/qfieldcloud/core/permissions_utils.py b/docker-app/qfieldcloud/core/permissions_utils.py index 5100de072..27100efda 100644 --- a/docker-app/qfieldcloud/core/permissions_utils.py +++ b/docker-app/qfieldcloud/core/permissions_utils.py @@ -1,5 +1,6 @@ from typing import List, Union +from deprecated import deprecated from qfieldcloud.core.models import ( Delta, Organization, @@ -202,6 +203,18 @@ def can_read_deltas(user: QfcUser, project: Project) -> bool: ) +def can_apply_pending_deltas_for_project(user: QfcUser, project: Project) -> bool: + return user_has_project_roles( + user, + project, + [ + ProjectCollaborator.Roles.ADMIN, + ProjectCollaborator.Roles.MANAGER, + ], + ) + + +@deprecated("Use `can_set_delta_status_for_project` instead") def can_apply_deltas(user: QfcUser, project: Project) -> bool: return user_has_project_roles( user, @@ -215,6 +228,7 @@ def can_apply_deltas(user: QfcUser, project: Project) -> bool: ) +@deprecated("Use `can_set_delta_status_for_project` instead") def can_overwrite_deltas(user: QfcUser, project: Project) -> bool: return user_has_project_roles( user, @@ -227,6 +241,32 @@ def can_overwrite_deltas(user: QfcUser, project: Project) -> bool: ) +def can_set_delta_status_for_project(user: QfcUser, project: Project) -> bool: + return user_has_project_roles( + user, + project, + [ + ProjectCollaborator.Roles.ADMIN, + ProjectCollaborator.Roles.MANAGER, + ], + ) + + +def can_set_delta_status(user: QfcUser, delta: Delta) -> bool: + if not can_set_delta_status_for_project(user, delta.project): + return False + + if delta.last_status not in ( + Delta.Status.PENDING, + Delta.Status.CONFLICT, + Delta.Status.NOT_APPLIED, + Delta.Status.ERROR, + ): + return False + + return True + + def can_create_delta(user: QfcUser, delta: Delta) -> bool: """Whether the user can store given delta.""" project: Project = delta.project @@ -249,6 +289,7 @@ def can_create_delta(user: QfcUser, delta: Delta) -> bool: return False +@deprecated("Use `can_set_delta_status` instead") def can_retry_delta(user: QfcUser, delta: Delta) -> bool: if not can_apply_deltas(user, delta.project): return False @@ -263,6 +304,7 @@ def can_retry_delta(user: QfcUser, delta: Delta) -> bool: return True +@deprecated("Use `can_set_delta_status` instead") def can_overwrite_delta(user: QfcUser, delta: Delta) -> bool: if not can_overwrite_deltas(user, delta.project): return False @@ -273,6 +315,7 @@ def can_overwrite_delta(user: QfcUser, delta: Delta) -> bool: return True +@deprecated("Use `can_set_delta_status` instead") def can_ignore_delta(user: QfcUser, delta: Delta) -> bool: if not can_apply_deltas(user, delta.project): return False diff --git a/docker-app/requirements.txt b/docker-app/requirements.txt index 630cdcc20..1a1e082c4 100644 --- a/docker-app/requirements.txt +++ b/docker-app/requirements.txt @@ -27,3 +27,4 @@ django-axes>=5.26.0,<6.0.0 mkdocs>=1.2.3 django-timezone-field>=4.2.1 django-model-utils>=4.2.0 +deprecated>=1.2.13 From 58b851f2c7e9ef107c9f5ab748b953d86bac0852 Mon Sep 17 00:00:00 2001 From: Matthias Kuhn Date: Mon, 20 Dec 2021 08:34:42 +0100 Subject: [PATCH 039/185] [chore] Use nyuki-token for backport --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 2a487ad90..58e42d4b2 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -13,4 +13,4 @@ jobs: - name: Backport uses: m-kuhn/backport@v1.2.4 with: - github_token: ${{ secrets.FAIRY_TOKEN }} + github_token: ${{ secrets.NYUKI_TOKEN }} From 20512bf53a91c27a0d08b70bc8ff9ad52c531dc0 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 20 Dec 2021 14:53:41 +0200 Subject: [PATCH 040/185] Rename vl_extent to vl_extent_wkt --- docker-qgis/entrypoint.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index e6eac871f..00b1ba2aa 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -185,13 +185,14 @@ def _call_qfieldsync_packager(project_filepath: Path, package_dir: Path) -> Dict } project_config = ProjectConfiguration(project) - vl_extent = QgsRectangle() + vl_extent_wkt = QgsRectangle() vl_extent_crs = project.crs().authid() if project_config.area_of_interest and project_config.area_of_interest_crs: - vl_extent = project_config.area_of_interest + vl_extent_wkt = project_config.area_of_interest vl_extent_crs = project_config.area_of_interest_crs else: + vl_extent = QgsRectangle() for layer in layers.values(): if type(layer) != QgsVectorLayer: continue @@ -231,14 +232,14 @@ def _call_qfieldsync_packager(project_filepath: Path, package_dir: Path) -> Dict if vl_extent.isNull() or not vl_extent.isFinite(): raise Exception("Failed to obtain the project extent.") - vl_extent = vl_extent.asWktPolygon() + vl_extent_wkt = vl_extent.asWktPolygon() vl_extent_crs = project.crs().authid() offline_editing = QgsOfflineEditing() offline_converter = OfflineConverter( project, str(package_dir), - vl_extent, + vl_extent_wkt, vl_extent_crs, offline_editing, export_type=ExportType.Cloud, From fd01d25eeb5b1951119fc11a4438dae4f7ab611a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 20 Dec 2021 14:54:17 +0200 Subject: [PATCH 041/185] Buffer the layer generated extent with 1 map unit sometimes the result is a polygon whose all points are on the same line this is an invalid polygon and cannot libqfieldsync does not like it --- docker-qgis/entrypoint.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 00b1ba2aa..f1d3e1b27 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -232,6 +232,9 @@ def _call_qfieldsync_packager(project_filepath: Path, package_dir: Path) -> Dict if vl_extent.isNull() or not vl_extent.isFinite(): raise Exception("Failed to obtain the project extent.") + # sometimes the result is a polygon whose all points are on the same line + # this is an invalid polygon and cannot libqfieldsync does not like it + vl_extent = vl_extent.buffered(1) vl_extent_wkt = vl_extent.asWktPolygon() vl_extent_crs = project.crs().authid() From 4610a7048efb36de3abaa35adea0755875e6c8c4 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 20 Dec 2021 16:27:21 +0200 Subject: [PATCH 042/185] Test update backport --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 58e42d4b2..5674967ea 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -11,6 +11,6 @@ jobs: name: Backport steps: - name: Backport - uses: m-kuhn/backport@v1.2.4 + uses: m-kuhn/backport@v1.2.5 with: github_token: ${{ secrets.NYUKI_TOKEN }} From fd78e4256c0dc8b2e0f5e82c9d8a36bfb8e58e1c Mon Sep 17 00:00:00 2001 From: Matthias Kuhn Date: Mon, 20 Dec 2021 15:32:31 +0100 Subject: [PATCH 043/185] Remove empty line From ca618a814b9795b6e246c5ffb82f5ec13a5ed36f Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 20 Dec 2021 20:09:52 +0200 Subject: [PATCH 044/185] Added better feedback in the "theMapCanvas" test --- docker-app/qfieldcloud/core/tests/test_qfield_file.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker-app/qfieldcloud/core/tests/test_qfield_file.py b/docker-app/qfieldcloud/core/tests/test_qfield_file.py index 54c6565ac..a481ecb18 100644 --- a/docker-app/qfieldcloud/core/tests/test_qfield_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qfield_file.py @@ -307,6 +307,9 @@ def test_downloaded_file_has_canvas_name(self): for line in f: if 'name="theMapCanvas"' in line: return + self.fail( + 'Worker failed, missing .qgs XML attribute: name="theMapCanvas"' + ) elif payload["status"] == "STATUS_ERROR": self.fail("Worker failed with error") From c80f5cc51bc654bbc1ea0501d4369c5e05c35d7d Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 20 Dec 2021 20:21:54 +0200 Subject: [PATCH 045/185] Bump libqfieldsync that fixes theMapCanvas --- docker-qgis/libqfieldsync | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-qgis/libqfieldsync b/docker-qgis/libqfieldsync index 79ee4add2..e45bf4a94 160000 --- a/docker-qgis/libqfieldsync +++ b/docker-qgis/libqfieldsync @@ -1 +1 @@ -Subproject commit 79ee4add257cbc3b6f5dad59cca1a87f55414ab0 +Subproject commit e45bf4a94d70fd03cebc5f2d10faaac45d35969b From b5e91ac6b4935b84714ad054e9d57a9126e383c8 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 21 Dec 2021 10:54:57 +0200 Subject: [PATCH 046/185] Add stale bot --- .github/workflows/stale.yml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 .github/workflows/stale.yml diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 000000000..0eab868ef --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,20 @@ +name: 👓 Close stale issues +on: + schedule: + - cron: "30 1 * * *" + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v4 + with: + repo-token: ${{ secrets.NYUKI_TOKEN }} + stale-issue-message: | + The QFieldCloud project highly values your report and would love to see it addressed. However, this issue has been left in feedback mode for the last 14 days and is being automatically marked as "stale". If you would like to continue with this issue, please provide any missing information or answer any open questions. If you could resolve the issue yourself meanwhile, please leave a note for future readers with the same problem and close the issue. + In case you should have any uncertainty, please leave a comment and we will be happy to help you proceed with this issue. + If there is no further activity on this issue, it will be closed in a week. + stale-issue-label: 'stale' + only-labels: 'feedback' + days-before-stale: 14 + days-before-close: 7 From 15f616cd941dd1faede92b6ee954ce4f0c4858b0 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 21 Dec 2021 17:02:13 +0200 Subject: [PATCH 047/185] No expiration for invitations --- docker-app/qfieldcloud/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index ddad69235..685c05a4b 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -274,7 +274,7 @@ # Django invitations configurations # https://github.com/bee-keeper/django-invitations#additional-configuration -INVITATIONS_INVITATION_EXPIRY = 14 # Days +INVITATIONS_INVITATION_EXPIRY = 0 # integer in days, 0 for no expiration INVITATIONS_INVITATION_ONLY = True INVITATIONS_ACCEPT_INVITE_AFTER_SIGNUP = True INVITATIONS_GONE_ON_ACCEPT_ERROR = False From d12378caeeab7beb725b601e3d57bd964fa518b2 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sun, 26 Dec 2021 17:13:41 +0200 Subject: [PATCH 048/185] Audit the important user actions --- docker-app/qfieldcloud/core/models.py | 15 ++++++++ docker-app/qfieldcloud/core/utils.py | 22 +++++++++++ docker-app/qfieldcloud/core/utils2/audit.py | 37 +++++++++++++++++++ .../qfieldcloud/core/views/files_views.py | 31 +++++++++++++++- docker-app/qfieldcloud/settings.py | 2 + docker-app/requirements.txt | 1 + 6 files changed, 107 insertions(+), 1 deletion(-) create mode 100644 docker-app/qfieldcloud/core/utils2/audit.py diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 3329b2a0e..a8fc3443f 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -7,6 +7,7 @@ from typing import Any, Iterable, Type import qfieldcloud.core.utils2.storage +from auditlog.registry import auditlog from django.contrib.auth.models import AbstractUser, UserManager from django.contrib.gis.db import models from django.core.exceptions import ValidationError @@ -1212,3 +1213,17 @@ class ApplyJobDelta(models.Model): def __str__(self): return f"{self.apply_job_id}:{self.delta_id}" + + +auditlog.register(User) +auditlog.register(UserAccount) +auditlog.register(Organization) +auditlog.register(OrganizationMember) +auditlog.register(Team) +auditlog.register(TeamMember) +auditlog.register(Project) +auditlog.register(ProjectCollaborator) +auditlog.register(Delta) +auditlog.register(ProcessProjectfileJob) +auditlog.register(PackageJob) +auditlog.register(ApplyJob) diff --git a/docker-app/qfieldcloud/core/utils.py b/docker-app/qfieldcloud/core/utils.py index f562c1439..6c1d0631a 100644 --- a/docker-app/qfieldcloud/core/utils.py +++ b/docker-app/qfieldcloud/core/utils.py @@ -277,6 +277,28 @@ def get_project_files_with_versions(project_id: str) -> Iterable[S3ObjectWithVer return list_files_with_versions(bucket, prefix, strip_prefix=True) +def get_project_file_with_versions( + project_id: str, filename: str +) -> Optional[S3ObjectWithVersions]: + """Returns a list of files and their versions. + + Args: + project_id (str): the project id + + Returns: + Iterable[S3ObjectWithVersions]: the list of files + """ + bucket = get_s3_bucket() + prefix = f"projects/{project_id}/files/{filename}" + files = [ + f + for f in list_files_with_versions(bucket, prefix, strip_prefix=True) + if f.latest.key == prefix + ] + + return files[0] if files else None + + def get_project_package_files(project_id: str) -> Iterable[S3Object]: """Returns a list of package files. diff --git a/docker-app/qfieldcloud/core/utils2/audit.py b/docker-app/qfieldcloud/core/utils2/audit.py new file mode 100644 index 000000000..9922b6f5e --- /dev/null +++ b/docker-app/qfieldcloud/core/utils2/audit.py @@ -0,0 +1,37 @@ +import json +from typing import Any, Dict, List, Union + +from auditlog.models import LogEntry +from django.contrib.auth.models import User +from django_currentuser.middleware import get_current_authenticated_user + + +def audit( + instance, + action: LogEntry.Action, + changes: Union[Dict[str, Any], List[Any], str] = None, + actor: User = None, + remote_addr: str = None, + additional_data: Any = None, +): + changes_json = None + + try: + if changes is not None: + changes_json = json.dumps(changes) + except Exception: + changes_json = json.dumps(str(changes)) + + if actor is None: + actor = get_current_authenticated_user() + + actor_id = actor.pk if actor else None + + return LogEntry.objects.log_create( + instance, + action=action, + changes=changes_json, + actor_id=actor_id, + remote_addr=remote_addr, + additional_data=additional_data, + ) diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index 9547b9365..0b2f270ea 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -4,6 +4,8 @@ from django.utils import timezone from qfieldcloud.core import exceptions, permissions_utils, utils from qfieldcloud.core.models import ProcessProjectfileJob, Project +from qfieldcloud.core.utils import get_project_file_with_versions +from qfieldcloud.core.utils2.audit import LogEntry, audit from rest_framework import permissions, status, views from rest_framework.parsers import MultiPartParser from rest_framework.response import Response @@ -143,7 +145,7 @@ def post(self, request, projectid, filename, format=None): ) request_file = request.FILES.get("file") - + old_object = get_project_file_with_versions(project.id, filename) sha256sum = utils.get_sha256(request_file) bucket = utils.get_s3_bucket() @@ -152,6 +154,10 @@ def post(self, request, projectid, filename, format=None): bucket.upload_fileobj(request_file, key, ExtraArgs={"Metadata": metadata}) + new_object = get_project_file_with_versions(project.id, filename) + + assert new_object + if is_qgis_project_file: project.project_filename = filename ProcessProjectfileJob.objects.create( @@ -161,6 +167,19 @@ def post(self, request, projectid, filename, format=None): project.data_last_updated_at = timezone.now() project.save() + if old_object: + audit( + project, + LogEntry.Action.UPDATE, + changes={filename: [old_object.latest.e_tag, new_object.latest.e_tag]}, + ) + else: + audit( + project, + LogEntry.Action.CREATE, + changes={filename: [None, new_object.latest.e_tag]}, + ) + return Response(status=status.HTTP_201_CREATED) def delete(self, request, projectid, filename): @@ -168,10 +187,20 @@ def delete(self, request, projectid, filename): key = utils.safe_join(f"projects/{projectid}/files/", filename) bucket = utils.get_s3_bucket() + old_object = get_project_file_with_versions(project.id, filename) + + assert old_object + bucket.object_versions.filter(Prefix=key).delete() if utils.is_qgis_project_file(filename): project.project_filename = None project.save() + audit( + project, + LogEntry.Action.DELETE, + changes={filename: [old_object.latest.e_tag, None]}, + ) + return Response(status=status.HTTP_200_OK) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 685c05a4b..34055b6cf 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -70,6 +70,7 @@ "invitations", "django_cron", "timezone_field", + "auditlog", # Local "qfieldcloud.core", "qfieldcloud.notifs", @@ -89,6 +90,7 @@ "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "django_currentuser.middleware.ThreadLocalUserMiddleware", + "auditlog.middleware.AuditlogMiddleware", "qfieldcloud.core.middleware.request_response_log.RequestResponseLogMiddleware", "qfieldcloud.core.middleware.timezone.TimezoneMiddleware", "axes.middleware.AxesMiddleware", diff --git a/docker-app/requirements.txt b/docker-app/requirements.txt index 1a1e082c4..7e26037bd 100644 --- a/docker-app/requirements.txt +++ b/docker-app/requirements.txt @@ -28,3 +28,4 @@ mkdocs>=1.2.3 django-timezone-field>=4.2.1 django-model-utils>=4.2.0 deprecated>=1.2.13 +django-auditlog==1.0a1 From c798dcc238998fae909b6c4c293be1511e3db311 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 29 Dec 2021 16:18:25 +0200 Subject: [PATCH 049/185] Fix export job HTTP 500 on already existing export jobs --- docker-app/qfieldcloud/core/views/qfield_files_views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/views/qfield_files_views.py b/docker-app/qfieldcloud/core/views/qfield_files_views.py index 2e713ac8d..555406bf7 100644 --- a/docker-app/qfieldcloud/core/views/qfield_files_views.py +++ b/docker-app/qfieldcloud/core/views/qfield_files_views.py @@ -64,7 +64,7 @@ def post(self, request, projectid): ).latest("started_at") if export_job: serializer = serializers.ExportJobSerializer(export_job) - return serializers.ExportJobSerializer(serializer.data) + return Response(serializer.data) if PackageJob.objects.filter(query).exists(): serializer = serializers.ExportJobSerializer(PackageJob.objects.get(query)) From 2d8322073f1c199c937b243a53a6a1a5920da4c1 Mon Sep 17 00:00:00 2001 From: Robert Pupel Date: Wed, 29 Dec 2021 17:31:06 +0100 Subject: [PATCH 050/185] Add job to trigger deployment on private repo --- .github/workflows/{build.yml => build_and_push.yml} | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) rename .github/workflows/{build.yml => build_and_push.yml} (90%) diff --git a/.github/workflows/build.yml b/.github/workflows/build_and_push.yml similarity index 90% rename from .github/workflows/build.yml rename to .github/workflows/build_and_push.yml index 67bf8ff98..3fea15e24 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build_and_push.yml @@ -3,7 +3,6 @@ on: push: branches: - master - - build-containers-with-github-actions tags: - "v*.*.*" jobs: @@ -100,3 +99,12 @@ jobs: file: ./docker-qgis/Dockerfile push: ${{ github.event_name != 'pull_request' }} tags: opengisch/qfieldcloud-qgis:${{ steps.prepare.outputs.tag }} + + - name: Trigger deployment on private repository + uses: peter-evans/repository-dispatch@v1 + with: + token: ${{ secrets.GIT_ACCESS_TOKEN }} + repository: opengisch/qfieldcloud-private + event-type: public_dispatch + client-payload: '{"version": "${{ steps.prepare.outputs.tag }}"}' + From e8ac9aa45f0147b3b837cf81fea808105f5f9293 Mon Sep 17 00:00:00 2001 From: Robert Pupel Date: Wed, 29 Dec 2021 17:31:47 +0100 Subject: [PATCH 051/185] Add current branch name for test triggering deployment --- .github/workflows/build_and_push.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build_and_push.yml b/.github/workflows/build_and_push.yml index 3fea15e24..dfa4bdd84 100644 --- a/.github/workflows/build_and_push.yml +++ b/.github/workflows/build_and_push.yml @@ -3,6 +3,7 @@ on: push: branches: - master + - dispatch_deploy_after_build tags: - "v*.*.*" jobs: From c2986d11d03e6ee3febf9e54dccd8b56c2e69401 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 28 Dec 2021 14:50:45 +0200 Subject: [PATCH 052/185] Fix failing false alarm with breaking tests due to cached content type ids the worker wrapper caches outdated ContentType ids during tests since it runs in a separate container than the tests, which then the tests. --- .github/workflows/test.yml | 11 ++++------- .pre-commit-config.yaml | 2 +- .../qfieldcloud/core/management/commands/dequeue.py | 7 +++++++ 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 830873100..e1ff8d98e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -3,7 +3,7 @@ on: push jobs: test: name: Code check and tests - runs-on: ubuntu-18.04 + runs-on: ubuntu-20.04 steps: - name: Checkout repo uses: actions/checkout@v2 @@ -12,17 +12,14 @@ jobs: - name: Setup Python uses: actions/setup-python@v1 with: - python-version: 3.8 + python-version: '3.10' - name: Install pipenv run: pip install pipenv + - name: Check code formatting + uses: pre-commit/action@v2.0.3 - name: Prepare docker-compose override file run: | ln -s docker-compose.override.local.yml docker-compose.override.yml - - name: Check code formatting - run: | - pipenv install pre_commit - pipenv install pyyaml - pipenv run python -m pre_commit run --all-files - name: Export the env variables file run: | cp .env.example .env diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0494ecc37..d5a29a4a2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: # Sort imports - repo: https://github.com/pycqa/isort - rev: "5.7.0" + rev: "5.10.1" hooks: - id: isort args: ["--profile", "black"] diff --git a/docker-app/qfieldcloud/core/management/commands/dequeue.py b/docker-app/qfieldcloud/core/management/commands/dequeue.py index 21b7fbb2e..99d8f3ce5 100644 --- a/docker-app/qfieldcloud/core/management/commands/dequeue.py +++ b/docker-app/qfieldcloud/core/management/commands/dequeue.py @@ -2,6 +2,8 @@ import signal from time import sleep +from django.conf import settings +from django.contrib.contenttypes.models import ContentType from django.core.management.base import BaseCommand from django.db import transaction from django.db.models import Count, Q @@ -41,6 +43,11 @@ def handle(self, *args, **options): while killer.alive: with use_test_db_if_exists(): + # the worker wrapper caches outdated ContentType ids during tests since it runs in a separate + # container than the tests, which then the tests. + if settings.DATABASES["default"]["NAME"].startswith("test_"): + ContentType.objects.clear_cache() + queued_job = None with transaction.atomic(): From 7f58210173bba454cff3f71a6ec55e8125b8f963 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 3 Jan 2022 18:23:59 +0200 Subject: [PATCH 053/185] API should return the latest package of the same project --- .github/workflows/build_and_push.yml | 1 - docker-app/qfieldcloud/core/views/qfield_files_views.py | 8 +++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build_and_push.yml b/.github/workflows/build_and_push.yml index dfa4bdd84..877f70418 100644 --- a/.github/workflows/build_and_push.yml +++ b/.github/workflows/build_and_push.yml @@ -108,4 +108,3 @@ jobs: repository: opengisch/qfieldcloud-private event-type: public_dispatch client-payload: '{"version": "${{ steps.prepare.outputs.tag }}"}' - diff --git a/docker-app/qfieldcloud/core/views/qfield_files_views.py b/docker-app/qfieldcloud/core/views/qfield_files_views.py index 555406bf7..ffcf72511 100644 --- a/docker-app/qfieldcloud/core/views/qfield_files_views.py +++ b/docker-app/qfieldcloud/core/views/qfield_files_views.py @@ -59,9 +59,11 @@ def post(self, request, projectid): # PackageJob.objects.filter(query).delete() if not project_obj.needs_repackaging: - export_job = PackageJob.objects.filter( - status=PackageJob.Status.FINISHED - ).latest("started_at") + export_job = ( + PackageJob.objects.filter(status=PackageJob.Status.FINISHED) + .filter(project=project_obj) + .latest("started_at") + ) if export_job: serializer = serializers.ExportJobSerializer(export_job) return Response(serializer.data) From 5f45d22cf8d9e0a6af9f4457c3791e2b6c359847 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 3 Jan 2022 18:36:26 +0200 Subject: [PATCH 054/185] Improve docs --- docker-app/qfieldcloud/core/management/commands/dequeue.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-app/qfieldcloud/core/management/commands/dequeue.py b/docker-app/qfieldcloud/core/management/commands/dequeue.py index 99d8f3ce5..16c879986 100644 --- a/docker-app/qfieldcloud/core/management/commands/dequeue.py +++ b/docker-app/qfieldcloud/core/management/commands/dequeue.py @@ -43,8 +43,8 @@ def handle(self, *args, **options): while killer.alive: with use_test_db_if_exists(): - # the worker wrapper caches outdated ContentType ids during tests since it runs in a separate - # container than the tests, which then the tests. + # the worker-wrapper caches outdated ContentType ids during tests since + # the worker-wrapper and the tests reside in different containers if settings.DATABASES["default"]["NAME"].startswith("test_"): ContentType.objects.clear_cache() From 0ea62c4f760d535ffd50f90dc6a6bff1bcdf5f62 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 3 Jan 2022 19:00:36 +0200 Subject: [PATCH 055/185] Pip freeze all dependencies --- docker-app/requirements.txt | 114 +++++++++++++++++++++++++++--------- 1 file changed, 85 insertions(+), 29 deletions(-) diff --git a/docker-app/requirements.txt b/docker-app/requirements.txt index 7e26037bd..e6c30226c 100644 --- a/docker-app/requirements.txt +++ b/docker-app/requirements.txt @@ -1,31 +1,87 @@ -Django>=3.2,<3.3 -djangorestframework>=3.12.2,<3.13 -markdown>=3.3.3,<3.4 -django-filter>=21.1,<22 -gunicorn>=20.1,<20.2 -psycopg2-binary>=2.8.6,<2.9 -django-allauth>=0.44.0,<0.45 -pyyaml==5.4 -drf-yasg>=1.20.0,<1.21 +asgiref==3.4.1 +attrs==21.2.0 +beautifulsoup4==4.10.0 +boto3==1.18.65 +boto3-stubs==1.20.26 +botocore==1.21.65 +botocore-stubs==1.23.26 +certifi==2021.10.8 +cffi==1.15.0 +charset-normalizer==2.0.9 +click==8.0.3 +click-plugins==1.1.1 +cligj==0.7.2 +coreapi==2.3.3 +coreschema==0.0.4 coverage==5.3 -boto3>=1.18,<1.19 -boto3-stubs[s3]>=1.18.46 -django-storages>=1.11,<1.12 -sentry-sdk -jsonschema>=3.2.0,<3.3 -django-tables2>=2.4,<2.5 -django-bootstrap4>=3.0,<4.0 -django-cron==0.5 -django-invitations>=1.9.3,<1.10 -redis==3.5.3 -JSON-log-formatter>=0.3.0<0.4.0 -docker>=4.2,<4.3 -fiona>=1.8.20<2.0.0 -django-notifications-hq==1.6.0 -django-currentuser==0.5.3 -django-axes>=5.26.0,<6.0.0 -mkdocs>=1.2.3 -django-timezone-field>=4.2.1 -django-model-utils>=4.2.0 -deprecated>=1.2.13 +cryptography==36.0.1 +defusedxml==0.7.1 +Deprecated==1.2.13 +Django==3.2.10 +django-allauth==0.44.0 django-auditlog==1.0a1 +django-axes==5.28.0 +django-bootstrap4==3.0.1 +django-common-helpers==0.9.2 +django-cron==0.5.0 +django-currentuser==0.5.3 +django-filter==21.1 +django-invitations==1.9.3 +django-ipware==4.0.2 +django-jsonfield==1.4.1 +django-model-utils==4.2.0 +django-notifications-hq==1.6.0 +django-storages==1.11.1 +django-tables2==2.4.1 +django-timezone-field==4.2.1 +djangorestframework==3.12.4 +docker==4.2.2 +drf-yasg==1.20.0 +Fiona==1.8.20 +ghp-import==2.0.2 +gunicorn==20.1.0 +idna==3.3 +importlib-metadata==4.10.0 +inflection==0.5.1 +itypes==1.2.0 +Jinja2==3.0.3 +jmespath==0.10.0 +JSON-log-formatter==0.5.0 +jsonfield==3.1.0 +jsonschema==3.2.0 +Markdown==3.3.6 +MarkupSafe==2.0.1 +mergedeep==1.3.4 +mkdocs==1.2.3 +munch==2.5.0 +mypy-boto3-s3==1.20.17 +oauthlib==3.1.1 +packaging==21.3 +psycopg2-binary==2.8.6 +pycparser==2.21 +PyJWT==2.3.0 +pyparsing==3.0.6 +pyrsistent==0.18.0 +python-dateutil==2.8.2 +python3-openid==3.2.0 +pytz==2021.3 +PyYAML==5.4 +pyyaml-env-tag==0.1 +redis==3.5.3 +requests==2.26.0 +requests-oauthlib==1.3.0 +ruamel.yaml==0.17.17 +ruamel.yaml.clib==0.2.6 +s3transfer==0.5.0 +sentry-sdk==1.5.1 +six==1.16.0 +soupsieve==2.3.1 +sqlparse==0.4.2 +swapper==1.3.0 +typing-extensions==4.0.1 +uritemplate==4.1.1 +urllib3==1.26.7 +watchdog==2.1.6 +websocket-client==1.2.3 +wrapt==1.13.3 +zipp==3.6.0 From 44a4882484ff705f969dc889453e4bb1514113c2 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 4 Jan 2022 23:47:56 +0200 Subject: [PATCH 056/185] Make gunicorn using multiple workers and multiple threads --- .env.example | 5 +++++ docker-compose.yml | 9 ++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 2a285270e..031b3702a 100644 --- a/.env.example +++ b/.env.example @@ -59,3 +59,8 @@ DEFAULT_FROM_EMAIL=webmaster@localhost COMPOSE_PROJECT_NAME=qfieldcloud QFIELDCLOUD_DEFAULT_NETWORK=qfieldcloud_default QFIELDCLOUD_ADMIN_URI=admin/ + +GUNICORN_TIMEOUT_S=300 +GUNICORN_MAX_REQUESTS=300 +GUNICORN_WORKERS=3 +GUNICORN_THREADS=3 diff --git a/docker-compose.yml b/docker-compose.yml index 8b521d229..7aa6141d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,7 +13,14 @@ services: build: context: ./docker-app restart: unless-stopped - command: gunicorn qfieldcloud.wsgi:application --bind 0.0.0.0:8000 -t 300 + command: > + gunicorn + qfieldcloud.wsgi:application + --bind 0.0.0.0:8000 + --timeout ${GUNICORN_TIMEOUT_S} + --max-requests ${GUNICORN_MAX_REQUESTS} + --workers ${GUNICORN_WORKERS} + --threads ${GUNICORN_THREADS} volumes: - static_volume:/usr/src/app/staticfiles - media_volume:/usr/src/app/mediafiles/ From e2170a43b054a1d8f2818853c246584c2adab8a7 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 5 Jan 2022 01:06:54 +0200 Subject: [PATCH 057/185] Improvements on the job execution inside the QGIS container - a more refined workflow definition - versioned workflow output - less settings to define a workflow step - less confusion - a new "output" key in the root feedback object to avoid using arr indices - better feedback when some tests are failing --- docker-app/qfieldcloud/core/serializers.py | 11 +- .../qfieldcloud/core/tests/test_delta.py | 42 ++- .../core/tests/test_qfield_file.py | 53 ++- .../qfieldcloud/core/views/package_views.py | 15 +- .../core/views/qfield_files_views.py | 15 +- docker-app/worker_wrapper/wrapper.py | 8 +- docker-qgis/entrypoint.py | 325 +++++++++--------- docker-qgis/utils.py | 182 ++++++++-- 8 files changed, 424 insertions(+), 227 deletions(-) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index 08c4ddff4..26bf89d44 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -288,10 +288,15 @@ def get_layers(self, obj): if not obj.feedback: return None - steps = obj.feedback.get("steps", []) + if obj.status != Job.Status.FINISHED: + return None - if len(steps) > 2 and steps[1].get("stage", 1) == 2: - return steps[1]["outputs"]["layer_checks"] + if obj.feedback.get("feedback_version") == "2.0": + return obj.feedback["outputs"]["package_project"]["layer_checks"] + else: + steps = obj.feedback.get("steps", []) + if len(steps) > 2 and steps[1].get("stage", 1) == 2: + return steps[1]["outputs"]["layer_checks"] return None diff --git a/docker-app/qfieldcloud/core/tests/test_delta.py b/docker-app/qfieldcloud/core/tests/test_delta.py index 9f382f298..6bd96321b 100644 --- a/docker-app/qfieldcloud/core/tests/test_delta.py +++ b/docker-app/qfieldcloud/core/tests/test_delta.py @@ -6,10 +6,10 @@ import fiona import requests import rest_framework -from django.http.response import HttpResponseRedirect +from django.http.response import HttpResponse, HttpResponseRedirect from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core import utils -from qfieldcloud.core.models import Project, ProjectCollaborator, User +from qfieldcloud.core.models import Job, Project, ProjectCollaborator, User from rest_framework import status from rest_framework.test import APITransactionTestCase @@ -72,6 +72,36 @@ def tearDownClass(cls): User.objects.all().delete() + def fail(self, msg: str, job: Job = None): + if job: + msg += f"\n\nOutput:\n================\n{job.output}\n================" + + if job.feedback: + if "error_stack" in job.feedback: + msg += "\n\nError:\n================" + for single_error_stack in job.feedback["error_stack"]: + msg += "\n" + msg += single_error_stack + + msg += f" {job.feedback['error']}\n================" + + feedback = json.dumps(job.feedback, indent=2, sort_keys=True) + msg += f"\n\nFeedback:\n================\n{feedback}\n================" + else: + msg += "\n\nFeedback: None" + + super().fail(msg) + + def assertHttpOk(self, response: HttpResponse): + try: + self.assertTrue( + rest_framework.status.is_success(response.status_code), response.json() + ) + except Exception: + self.assertTrue( + rest_framework.status.is_success(response.status_code), response.content + ) + def upload_project_files(self, project) -> Project: # Verify the original geojson file with open(testdata_path("delta/points.geojson")) as f: @@ -626,11 +656,15 @@ def upload_and_check_deltas( self.assertIn(payload[idx]["status"], status) self.assertEqual(payload[idx]["created_by"], created_by) + job = Job.objects.filter(project=self.project1).latest("updated_at") + for _ in range(10): time.sleep(2) response = self.client.get(uri) + self.assertHttpOk(response) + payload = response.json() payload = sorted(payload, key=lambda k: k["id"]) @@ -641,7 +675,7 @@ def upload_and_check_deltas( break if payload[idx]["status"] in failing_status: - self.fail(f"Got failing status {payload[idx]['status']}") + self.fail(f"Got failing status {payload[idx]['status']}", job=job) return delta_id, status, created_by = final_value @@ -652,4 +686,4 @@ def upload_and_check_deltas( self.assertEqual(payload[idx]["created_by"], created_by) return - self.fail("Worker didn't finish") + self.fail("Worker didn't finish", job=job) diff --git a/docker-app/qfieldcloud/core/tests/test_qfield_file.py b/docker-app/qfieldcloud/core/tests/test_qfield_file.py index a481ecb18..a310e26f4 100644 --- a/docker-app/qfieldcloud/core/tests/test_qfield_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qfield_file.py @@ -1,3 +1,4 @@ +import json import logging import os import tempfile @@ -5,10 +6,10 @@ import psycopg2 import requests -from django.http.response import HttpResponseRedirect +from django.http.response import HttpResponse, HttpResponseRedirect from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core.geodb_utils import delete_db_and_role -from qfieldcloud.core.models import Geodb, Project, User +from qfieldcloud.core.models import Geodb, Job, PackageJob, Project, User from rest_framework import status from rest_framework.test import APITransactionTestCase @@ -63,6 +64,32 @@ def tearDown(self): # Remove credentials self.client.credentials() + def fail(self, msg: str, job: Job = None): + if job: + msg += f"\n\nOutput:\n================\n{job.output}\n================" + + if job.feedback: + if "error_stack" in job.feedback: + msg += "\n\nError:\n================" + for single_error_stack in job.feedback["error_stack"]: + msg += "\n" + msg += single_error_stack + + msg += f" {job.feedback['error']}\n================" + + feedback = json.dumps(job.feedback, indent=2, sort_keys=True) + msg += f"\n\nFeedback:\n================\n{feedback}\n================" + else: + msg += "\n\nFeedback: None" + + super().fail(msg) + + def assertHttpOk(self, response: HttpResponse): + try: + self.assertTrue(status.is_success(response.status_code), response.json()) + except Exception: + self.assertTrue(status.is_success(response.status_code), response.content) + def test_list_files_for_qfield(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -181,7 +208,10 @@ def test_download_file_for_qfield(self): response = self.client.get( "/api/v1/qfield-files/export/{}/".format(self.project1.id), ) + + self.assertHttpOk(response) payload = response.json() + if payload["status"] == "STATUS_EXPORTED": response = self.client.get( f"/api/v1/qfield-files/{self.project1.id}/project_qfield.qgs/" @@ -209,7 +239,10 @@ def test_download_file_for_qfield(self): ) return elif payload["status"] == "STATUS_ERROR": - self.fail("Worker failed with error") + self.fail( + "Worker failed with error", + job=PackageJob.objects.filter(project=self.project1).last(), + ) self.fail("Worker didn't finish") @@ -237,10 +270,15 @@ def test_list_files_for_qfield_broken_file(self): response = self.client.get( "/api/v1/qfield-files/export/{}/".format(self.project1.id), ) + + self.assertHttpOk(response) + if response.json()["status"] == "STATUS_ERROR": return - self.fail("Worker didn't finish") + self.fail( + "Worker didn't finish", job=Job.objects.filter(project=self.project1).last() + ) def test_downloaded_file_has_canvas_name(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -354,6 +392,8 @@ def test_download_project_with_broken_layer_datasources(self): "/api/v1/qfield-files/{}/".format(self.project1.id), ) + self.assertHttpOk(response) + export_payload = response.json() layer_ok = export_payload["layers"][ "points_c2784cf9_c9c3_45f6_9ce5_98a6047e4d6c" @@ -366,7 +406,10 @@ def test_download_project_with_broken_layer_datasources(self): self.assertFalse(layer_failed["valid"], layer_failed["status"]) return elif payload["status"] == "STATUS_ERROR": - self.fail("Worker failed with error") + self.fail( + "Worker failed with error", + job=Job.objects.filter(project=self.project1).last(), + ) self.fail("Worker didn't finish") diff --git a/docker-app/qfieldcloud/core/views/package_views.py b/docker-app/qfieldcloud/core/views/package_views.py index f89729c97..323b72366 100644 --- a/docker-app/qfieldcloud/core/views/package_views.py +++ b/docker-app/qfieldcloud/core/views/package_views.py @@ -52,12 +52,15 @@ def get(self, request, project_id): if not files: raise exceptions.InvalidJobError("Empty project package.") - steps = last_job.feedback.get("steps", []) - layers = ( - steps[1]["outputs"]["layer_checks"] - if len(steps) > 2 and steps[1].get("stage", 1) == 2 - else None - ) + if last_job.feedback.get("feedback_version") == "2.0": + layers = last_job.feedback["outputs"]["package_project"]["layer_checks"] + else: + steps = last_job.feedback.get("steps", []) + layers = ( + steps[1]["outputs"]["layer_checks"] + if len(steps) > 2 and steps[1].get("stage", 1) == 2 + else None + ) return Response( { diff --git a/docker-app/qfieldcloud/core/views/qfield_files_views.py b/docker-app/qfieldcloud/core/views/qfield_files_views.py index ffcf72511..1157c2f8e 100644 --- a/docker-app/qfieldcloud/core/views/qfield_files_views.py +++ b/docker-app/qfieldcloud/core/views/qfield_files_views.py @@ -151,12 +151,15 @@ def get(self, request, projectid): } ) - steps = export_job.feedback.get("steps", []) - layers = ( - steps[1]["outputs"]["layer_checks"] - if len(steps) > 2 and steps[1].get("stage", 1) == 2 - else None - ) + if export_job.feedback.get("feedback_version") == "2.0": + layers = export_job.feedback["outputs"]["package_project"]["layer_checks"] + else: + steps = export_job.feedback.get("steps", []) + layers = ( + steps[1]["outputs"]["layer_checks"] + if len(steps) > 2 and steps[1].get("stage", 1) == 2 + else None + ) return Response( { diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index 61e5729b6..daded3c42 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -323,7 +323,7 @@ def before_docker_run(self) -> None: json.dump(deltafile_contents, f) def after_docker_run(self) -> None: - delta_feedback = self.job.feedback["steps"][1]["outputs"]["delta_feedback"] + delta_feedback = self.job.feedback["outputs"]["apply_deltas"]["delta_feedback"] is_data_modified = True for feedback in delta_feedback: @@ -397,9 +397,9 @@ def get_context(self, *args) -> Dict[str, Any]: def after_docker_run(self) -> None: project = self.job.project - - project_details = self.job.feedback["steps"][3]["outputs"]["project_details"] - project.project_details = project_details + project.project_details = self.job.feedback["outputs"]["project_details"][ + "project_details" + ] thumbnail_filename = self.shared_tempdir.joinpath("thumbnail.png") with open(thumbnail_filename, "rb") as f: diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index f1d3e1b27..01572619b 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -6,14 +6,14 @@ import os import tempfile from pathlib import Path, PurePath -from typing import Dict, List +from typing import Dict import boto3 import qfieldcloud.qgis.apply_deltas import qfieldcloud.qgis.process_projectfile from libqfieldsync.offline_converter import ExportType, OfflineConverter from libqfieldsync.project import ProjectConfiguration -from qfieldcloud.qgis.utils import Step +from qfieldcloud.qgis.utils import Step, StepOutput, WorkDirPath, Workflow from qgis.core import ( QgsApplication, QgsCoordinateTransform, @@ -67,7 +67,7 @@ def _get_sha256sum(filepath): return hasher.hexdigest() -def _download_project_directory(project_id: str, tmpdir: Path = None) -> Path: +def _download_project_directory(project_id: str, download_dir: Path = None) -> Path: """Download the files in the project "working" directory from the S3 Storage into a temporary directory. Returns the directory path""" @@ -76,12 +76,12 @@ def _download_project_directory(project_id: str, tmpdir: Path = None) -> Path: # Prefix of the working directory on the Storages working_prefix = "/".join(["projects", project_id, "files"]) - if not tmpdir: + if not download_dir: # Create a temporary directory - tmpdir = Path(tempfile.mkdtemp()) + download_dir = Path(tempfile.mkdtemp()) # Create a local working directory - working_dir = tmpdir.joinpath("files") + working_dir = download_dir.joinpath("files") working_dir.mkdir(parents=True) # Download the files @@ -90,12 +90,12 @@ def _download_project_directory(project_id: str, tmpdir: Path = None) -> Path: # Get the path of the file relative to the project directory relative_filename = key_filename.relative_to(*key_filename.parts[:2]) - absolute_filename = tmpdir.joinpath(relative_filename) + absolute_filename = download_dir.joinpath(relative_filename) absolute_filename.parent.mkdir(parents=True, exist_ok=True) bucket.download_file(obj.key, str(absolute_filename)) - return tmpdir + return download_dir def _upload_project_directory( @@ -126,7 +126,8 @@ def _upload_project_directory( sha256sum = _get_sha256sum(e) # Create the key - key = "/".join([prefix, str(elem.relative_to(*elem.parts[:4]))]) + filename = str(elem.relative_to(*elem.parts[:4])) + key = "/".join([prefix, filename]) metadata = {"sha256sum": sha256sum} if should_delete: @@ -139,10 +140,13 @@ def _upload_project_directory( # Check if the file is different on the storage if metadata["sha256sum"] != storage_metadata["sha256sum"]: + logging.info( + f'Uploading file "{key}", size: {elem.stat().st_size} bytes, sha256sum: "{sha256sum}" ' + ) bucket.upload_file(str(elem), key, ExtraArgs={"Metadata": metadata}) -def _call_qfieldsync_packager(project_filepath: Path, package_dir: Path) -> Dict: +def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> Dict: """Call the function of QFieldSync to package a project for QField""" argvb = list(map(os.fsencode, [""])) @@ -150,11 +154,11 @@ def _call_qfieldsync_packager(project_filepath: Path, package_dir: Path) -> Dict qgis_app.initQgis() project = QgsProject.instance() - if not project_filepath.exists(): - raise FileNotFoundError(project_filepath) + if not project_filename.exists(): + raise FileNotFoundError(project_filename) - if not project.read(str(project_filepath)): - raise Exception(f"Unable to open file with QGIS: {project_filepath}") + if not project.read(str(project_filename)): + raise Exception(f"Unable to open file with QGIS: {project_filename}") layers = project.mapLayers() # Check if the layers are valid (i.e. if the datasources are available) @@ -260,168 +264,157 @@ def _call_qfieldsync_packager(project_filepath: Path, package_dir: Path) -> Dict def cmd_package_project(args): - tmpdir = Path(tempfile.mkdtemp()) - packagedir = tmpdir.joinpath("export") - packagedir.mkdir() - - steps: List[Step] = [ - Step( - id="download_project_directory", - name="Download Project Directory", - arguments={ - "tmpdir": tmpdir, - "project_id": args.projectid, - }, - arg_names=["project_id", "tmpdir"], - method=_download_project_directory, - return_names=["tmp_project_dir"], - public_returns=["tmp_project_dir"], - ), - Step( - id="export_project", - name="Package Project", - arguments={ - "project_filename": tmpdir.joinpath("files", args.project_file), - "exportdir": packagedir, - }, - arg_names=["project_filename", "exportdir"], - return_names=["layer_checks"], - output_names=["layer_checks"], - method=_call_qfieldsync_packager, - ), - Step( - id="upload_exported_project", - name="Upload Packaged Project", - arguments={ - "project_id": args.projectid, - "exportdir": packagedir, - "should_delete": True, - }, - arg_names=["project_id", "exportdir", "should_delete"], - method=_upload_project_directory, - ), - ] - - qfieldcloud.qgis.utils.run_task( - steps, + workflow = Workflow( + id="package_project", + name="Package Project", + version="2.0", + description="Packages a QGIS project to be used on QField. Converts layers for offline editing if configured.", + steps=[ + Step( + id="download_project_directory", + name="Download Project Directory", + arguments={ + "project_id": args.projectid, + "download_dir": WorkDirPath(mkdir=True), + }, + method=_download_project_directory, + return_names=["tmp_project_dir"], + ), + Step( + id="package_project", + name="Package Project", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + "package_dir": WorkDirPath("export", mkdir=True), + }, + method=_call_qfieldsync_packager, + return_names=["layer_checks"], + outputs=["layer_checks"], + ), + Step( + id="upload_packaged_project", + name="Upload Packaged Project", + arguments={ + "project_id": args.projectid, + "local_dir": WorkDirPath("export", mkdir=True), + "should_delete": True, + }, + method=_upload_project_directory, + ), + ], + ) + + qfieldcloud.qgis.utils.run_workflow( + workflow, Path("/io/feedback.json"), ) def _apply_delta(args): - tmpdir = Path(tempfile.mkdtemp()) - files_dir = tmpdir.joinpath("files") - steps: List[Step] = [ - Step( - id="download_project_directory", - name="Download Project Directory", - arguments={ - "project_id": args.projectid, - "tmpdir": tmpdir, - }, - arg_names=["project_id", "tmpdir"], - method=_download_project_directory, - return_names=["tmp_project_dir"], - public_returns=["tmp_project_dir"], - ), - Step( - id="apply_deltas", - name="Apply Deltas", - arguments={ - "project_filename": tmpdir.joinpath("files", args.project_file), - "delta_filename": "/io/deltafile.json", - "inverse": args.inverse, - "overwrite_conflicts": args.overwrite_conflicts, - }, - arg_names=[ - "project_filename", - "delta_filename", - "inverse", - "overwrite_conflicts", - ], - method=qfieldcloud.qgis.apply_deltas.delta_apply, - return_names=["delta_feedback"], - output_names=["delta_feedback"], - ), - Step( - id="upload_exported_project", - name="Upload Project", - arguments={ - "project_id": args.projectid, - "files_dir": files_dir, - "should_delete": False, - }, - arg_names=["project_id", "files_dir", "should_delete"], - method=_upload_project_directory, - ), - ] - - qfieldcloud.qgis.utils.run_task( - steps, + workflow = Workflow( + id="apply_changes", + name="Apply Changes", + version="2.0", + steps=[ + Step( + id="download_project_directory", + name="Download Project Directory", + arguments={ + "project_id": args.projectid, + "download_dir": WorkDirPath(mkdir=True), + }, + method=_download_project_directory, + return_names=["tmp_project_dir"], + ), + Step( + id="apply_deltas", + name="Apply Deltas", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + "delta_filename": "/io/deltafile.json", + "inverse": args.inverse, + "overwrite_conflicts": args.overwrite_conflicts, + }, + method=qfieldcloud.qgis.apply_deltas.delta_apply, + return_names=["delta_feedback"], + outputs=["delta_feedback"], + ), + Step( + id="upload_exported_project", + name="Upload Project", + arguments={ + "project_id": args.projectid, + "local_dir": WorkDirPath("files"), + "should_delete": False, + }, + method=_upload_project_directory, + ), + ], + ) + + qfieldcloud.qgis.utils.run_workflow( + workflow, Path("/io/feedback.json"), ) def cmd_process_projectfile(args): - project_id = args.projectid - project_file = args.project_file - - tmpdir = Path(tempfile.mkdtemp()) - project_filename = tmpdir.joinpath("files", project_file) - steps: List[Step] = [ - Step( - id="download_project_directory", - name="Download Project Directory", - arguments={ - "project_id": project_id, - "tmpdir": tmpdir, - }, - arg_names=["project_id", "tmpdir"], - method=_download_project_directory, - return_names=["tmp_project_dir"], - public_returns=["tmp_project_dir"], - ), - Step( - id="project_validity_check", - name="Project Validity Check", - arguments={ - "project_filename": project_filename, - }, - arg_names=["project_filename"], - method=qfieldcloud.qgis.process_projectfile.check_valid_project_file, - ), - Step( - id="opening_check", - name="Opening Check", - arguments={ - "project_filename": project_filename, - }, - arg_names=["project_filename"], - method=qfieldcloud.qgis.process_projectfile.load_project_file, - return_names=["project"], - public_returns=["project"], - ), - Step( - id="project_details", - name="Project Details", - arg_names=["project"], - method=qfieldcloud.qgis.process_projectfile.extract_project_details, - return_names=["project_details"], - output_names=["project_details"], - ), - Step( - id="generate_thumbnail_image", - name="Generate Thumbnail Image", - arguments={ - "thumbnail_filename": Path("/io/thumbnail.png"), - }, - arg_names=["project", "thumbnail_filename"], - method=qfieldcloud.qgis.process_projectfile.generate_thumbnail, - ), - ] - - qfieldcloud.qgis.utils.run_task( - steps, + workflow = Workflow( + id="process_projectfile", + name="Process Projectfile", + version="2.0", + steps=[ + Step( + id="download_project_directory", + name="Download Project Directory", + arguments={ + "project_id": args.projectid, + "download_dir": WorkDirPath(mkdir=True), + }, + method=_download_project_directory, + return_names=["tmp_project_dir"], + ), + Step( + id="project_validity_check", + name="Project Validity Check", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + }, + method=qfieldcloud.qgis.process_projectfile.check_valid_project_file, + ), + Step( + id="opening_check", + name="Opening Check", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + }, + method=qfieldcloud.qgis.process_projectfile.load_project_file, + return_names=["project"], + ), + Step( + id="project_details", + name="Project Details", + arguments={ + "project": StepOutput("opening_check", "project"), + }, + method=qfieldcloud.qgis.process_projectfile.extract_project_details, + return_names=["project_details"], + outputs=["project_details"], + ), + Step( + id="generate_thumbnail_image", + name="Generate Thumbnail Image", + arguments={ + "project": StepOutput("opening_check", "project"), + "thumbnail_filename": Path("/io/thumbnail.png"), + }, + method=qfieldcloud.qgis.process_projectfile.generate_thumbnail, + ), + ], + ) + + qfieldcloud.qgis.utils.run_workflow( + workflow, Path("/io/feedback.json"), ) diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index 460735f09..22ba617ae 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -1,4 +1,5 @@ import atexit +import inspect import json import logging import os @@ -143,6 +144,73 @@ def stop_app(): del QGISAPP +class WorkflowValidationException(Exception): + ... + + +class Workflow: + def __init__( + self, + id: str, + version: str, + name: str, + steps: List["Step"], + description: str = "", + ): + self.id = id + self.version = version + self.name = name + self.description = description + self.steps = steps + + self.validate() + + def validate(self): + if not self.steps: + raise WorkflowValidationException( + f'The workflow "{self.id}" should contain at least one step.' + ) + + all_step_returns = {} + for step in self.steps: + param_names = [] + sig = inspect.signature(step.method) + for param in sig.parameters.values(): + if ( + param.kind != inspect.Parameter.KEYWORD_ONLY + and param.kind != inspect.Parameter.POSITIONAL_OR_KEYWORD + ): + raise WorkflowValidationException( + f'The workflow "{self.id}" method "{step.method.__name__}" has a non keyword parameter "{param.name}".' + ) + + if param.name not in step.arguments: + raise WorkflowValidationException( + f'The workflow "{self.id}" method "{step.method.__name__}" has an argument "{param.name}" that is not available in the step definition "arguments", expected one of {list(step.arguments.keys())}.' + ) + + param_names.append(param.name) + + for name, value in step.arguments.items(): + if isinstance(value, StepOutput): + if value.step_id not in all_step_returns: + raise WorkflowValidationException( + f'The workflow "{self.id}" has step "{step.id}" that requires a non-existing step return value "{value.step_id}.{value.return_name}" for argument "{name}". Previous step with that id does not exist.' + ) + + if value.return_name not in all_step_returns[value.step_id]: + raise WorkflowValidationException( + f'The workflow "{self.id}" has step "{step.id}" that requires a non-existing step return value "{value.step_id}.{value.return_name}" for argument "{name}". Previous step with that id found, but returns no value with such name.' + ) + + if name not in param_names: + raise WorkflowValidationException( + f'The workflow "{self.id}" method "{step.method.__name__}" receives a parameter "{name}" that is not available in the method definition, expected one of {param_names}.' + ) + + all_step_returns[step.id] = all_step_returns.get(step.id, step.return_names) + + class Step: def __init__( self, @@ -150,24 +218,38 @@ def __init__( name: str, method: Callable, arguments: Dict[str, Any] = {}, - arg_names: List[str] = [], return_names: List[str] = [], - output_names: List[str] = [], - public_returns: List[str] = [], + outputs: List[str] = [], ): self.id = id self.name = name self.method = method self.arguments = arguments - self.arg_names = arg_names # names of method return values self.return_names = return_names - # names of method return values that will be part of the outputs - self.output_names = output_names - # names of method return values that will be available in arg_names for the next steps - self.public_returns = public_returns + # names of method return values that will be part of the outputs. They are assumed to be safe to be shown to the user. + self.outputs = outputs self.stage = 0 - self.outputs = {} + + +class StepOutput: + def __init__(self, step_id: str, return_name: str): + self.step_id = step_id + self.return_name = return_name + + +class WorkDirPath: + def __init__(self, *parts: str, mkdir: bool = False) -> None: + self.parts = parts + self.mkdir = mkdir + + def eval(self, root: Path) -> Path: + path = root.joinpath(*self.parts) + + if self.mkdir: + path.mkdir(parents=True, exist_ok=True) + + return path class BaseException(Exception): @@ -277,8 +359,17 @@ def on_project_read(doc): return details -def run_task( - steps: List[Step], +def json_default(obj): + obj_str = type(obj).__qualname__ + try: + obj_str += f" {str(obj)}" + except Exception: + obj_str += " " + return f"" + + +def run_workflow( + workflow: Workflow, feedback_filename: Optional[Union[IO, Path]], ) -> Dict: """Executes the steps required to run a task and return structured feedback from the execution @@ -290,55 +381,80 @@ def run_task( Some return values can used as arguments for next steps, as defined in `public_returns`. Args: - steps (List[Step]): ordered steps to be executed + workflow (Workflow): workflow to be executed feedback_filename (Optional[Union[IO, Path]]): write feedback to an IO device, to Path filename, or don't write it """ - feedback = {} + feedback: Dict[str, Any] = { + "feedback_version": "2.0", + "workflow_version": workflow.version, + "workflow_id": workflow.id, + "workflow_name": workflow.name, + } # it may be modified after the successful completion of each step. - returned_arguments = {} + step_returns = {} try: - for step in steps: + root_workdir = Path(tempfile.mkdtemp()) + for step in workflow.steps: with logger_context(step): arguments = { - **returned_arguments, **step.arguments, } - args = [arguments[arg_name] for arg_name in step.arg_names] - return_values = step.method(*args) + for name, value in arguments.items(): + if isinstance(value, StepOutput): + arguments[name] = step_returns[value.step_id][value.return_name] + elif isinstance(value, WorkDirPath): + arguments[name] = value.eval(root_workdir) + + return_values = step.method(**arguments) return_values = ( return_values if len(step.return_names) > 1 else (return_values,) ) - return_map = {} + step_returns[step.id] = {} for name, value in zip(step.return_names, return_values): - return_map[name] = value - - for output_name in step.output_names: - step.outputs[output_name] = return_map[output_name] - - for return_name in step.public_returns: - returned_arguments[return_name] = return_map[return_name] + step_returns[step.id][name] = value except Exception as err: feedback["error"] = str(err) (_type, _value, tb) = sys.exc_info() feedback["error_stack"] = traceback.format_tb(tb) finally: - feedback["steps"] = [ - { + feedback["steps"] = [] + feedback["outputs"] = {} + + for step in workflow.steps: + step_feedback = { + "id": step.id, "name": step.name, "stage": step.stage, - "outputs": step.outputs, + "returns": {}, } - for step in steps - ] + + if step.stage == 2: + step_feedback["returns"] = step_returns[step.id] + feedback["outputs"][step.id] = {} + for output_name in step.outputs: + feedback["outputs"][step.id][output_name] = step_returns[step.id][ + output_name + ] + + feedback["steps"].append(step_feedback) if feedback_filename in [sys.stderr, sys.stdout]: print("Feedback:") - print(json.dump(feedback, feedback_filename, indent=2, sort_keys=True)) + print( + json.dump( + feedback, + feedback_filename, + indent=2, + sort_keys=True, + default=json_default, + ) + ) elif isinstance(feedback_filename, Path): with open(feedback_filename, "w") as f: - json.dump(feedback, f, indent=2, sort_keys=True) + print(feedback) + json.dump(feedback, f, indent=2, sort_keys=True, default=json_default) return feedback From 19a4578e38eff92247b7e853359af1eee8adfbe7 Mon Sep 17 00:00:00 2001 From: stcz Date: Wed, 5 Jan 2022 21:25:33 +0100 Subject: [PATCH 058/185] Update .env.example to fix 'invalid sslmode value' --- .env.example | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 2a285270e..08270d682 100644 --- a/.env.example +++ b/.env.example @@ -30,7 +30,8 @@ POSTGRES_PASSWORD=3shJDd2r7Twwkehb POSTGRES_DB=qfieldcloud_db POSTGRES_HOST=db POSTGRES_PORT=5432 -POSTGRES_SSLMODE=prefer # "prefer" OR "require" most of the times +# "prefer" OR "require" most of the times +POSTGRES_SSLMODE=prefer HOST_POSTGRES_PORT=5433 GEODB_HOST=geodb From d6bc8288ce6ecb35c2279649e60aef6969c8f0e5 Mon Sep 17 00:00:00 2001 From: stcz Date: Wed, 5 Jan 2022 21:37:53 +0100 Subject: [PATCH 059/185] Fix invalid syntax in docker-compose.override.local.yml --- docker-compose.override.local.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 70a342a1b..54c6b58db 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -102,8 +102,7 @@ services: createbuckets: image: minio/mc depends_on: - minio: - condition: service_healthy + - minio entrypoint: > /bin/sh -c " /usr/bin/mc config host add myminio ${STORAGE_ENDPOINT_URL} ${STORAGE_ACCESS_KEY_ID} ${STORAGE_SECRET_ACCESS_KEY}; From 8ea00ec8a4f1c43eed838c2a7cb19b85bbb1caa5 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sun, 16 Jan 2022 23:17:24 +0200 Subject: [PATCH 060/185] Improve docs on certs for development --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 78916e1c8..19ec036f9 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,7 @@ QFieldCloud will automatically generate a certificate and it's root certificate On Debian/Ubuntu, copy the root certificate to the directory with trusted certificates. Note the extension has been changed to `.crt`: - sudo mv ./config/nginx/certs/rootCA.pem /usr/local/share/ca-certificates/rootCA.crt + sudo cp ./conf/nginx/certs/rootCA.pem /usr/local/share/ca-certificates/rootCA.crt Trust the newly added certificate: @@ -86,7 +86,7 @@ Connecting with `curl` should return no errors: If you want to remove or change the root certificate, you need to remove the root certificate file and refresh the list of certificates: - rm /usr/local/share/ca-certificates/rootCA.crt + sudo rm /usr/local/share/ca-certificates/rootCA.crt sudo update-ca-certificates --fresh Now connecting with `curl` should fail with a similar error: From c3d978d2bb06472b35ae31f6b6a5a5abb10181f2 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 10 Dec 2021 00:03:04 +0200 Subject: [PATCH 061/185] Download files using nginx x-accel-redirect, so the s3 redirect remains internal --- conf/nginx/templates/default.conf.template | 42 +++++++++++++++++++ .../qfieldcloud/core/views/files_views.py | 13 +++++- 2 files changed, 53 insertions(+), 2 deletions(-) diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index 2ba618ad9..abd648585 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -1,3 +1,8 @@ +log_format upstreamlog '[$time_local] $remote_addr - $remote_user ' + 'to: $upstream_addr "$request" $status $body_bytes_sent ' + '"$http_referer" "$http_user_agent"' + 'rt=$request_time uct="$upstream_connect_time" uht="$upstream_header_time" urt="$upstream_response_time"'; + upstream django { server app:8000 fail_timeout=0; } @@ -60,4 +65,41 @@ server { set $target http://minio1:9000/; proxy_pass $target; } + + location /storage-download/ { + # Only allow internal redirects + internal; + + access_log /var/log/nginx/access.log upstreamlog; + set $redirect_uri "$upstream_http_redirect_uri"; + + # required DNS + resolver 8.8.8.8; + + # Stops the local disk from being written to (just forwards data through) + proxy_max_temp_file_size 0; + proxy_buffering off; + + # does not work with S3 otherwise + proxy_ssl_server_name on; + + # remove the authorization and the cookie headers + proxy_set_header Authorization ''; + proxy_set_header Cookie ''; + + # hide Object Storage related headers + proxy_hide_header Access-Control-Allow-Credentials; + proxy_hide_header Access-Control-Allow-Headers; + proxy_hide_header Access-Control-Allow-Methods; + proxy_hide_header Access-Control-Allow-Origin; + proxy_hide_header Access-Control-Expose-Headers; + proxy_hide_header X-Amz-Meta-Sha256sum; + proxy_hide_header X-Amz-Req-Time-Micros; + proxy_hide_header X-Amz-Request-Id; + proxy_hide_header X-Amz-Storage-Class; + proxy_hide_header X-Amz-Version-Id; + + proxy_pass $redirect_uri; + } + } diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index 0b2f270ea..32d390db1 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -1,6 +1,6 @@ from pathlib import PurePath -from django.http.response import HttpResponseRedirect +from django.http.response import HttpResponse, HttpResponseRedirect from django.utils import timezone from qfieldcloud.core import exceptions, permissions_utils, utils from qfieldcloud.core.models import ProcessProjectfileJob, Project @@ -125,7 +125,16 @@ def get(self, request, projectid, filename): HttpMethod="GET", ) - return HttpResponseRedirect(url) + if request.META["HTTP_HOST"].split(":")[-1] == request.META["WEB_HTTPS_PORT"]: + # Let's NGINX handle the redirect to the storage and streaming the file contents back to the client + response = HttpResponse() + response["X-Accel-Redirect"] = "/storage-download/" + response["redirect_uri"] = url + + return response + else: + # requesting the Django development webserver + return HttpResponseRedirect(url) def post(self, request, projectid, filename, format=None): project = Project.objects.get(id=projectid) From 2e186b667daa326804db4a15e07b6144767b8550 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 17 Jan 2022 03:19:04 +0200 Subject: [PATCH 062/185] Do not run Django development server --- docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 7aa6141d1..18b414c20 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -100,7 +100,6 @@ services: LETSENCRYPT_EMAIL: ${LETSENCRYPT_EMAIL} LETSENCRYPT_STAGING: ${LETSENCRYPT_STAGING} LETSENCRYPT_RSA_KEY_SIZE: ${LETSENCRYPT_RSA_KEY_SIZE} - command: [nginx-debug, '-g', 'daemon off;'] logging: *default-logging depends_on: - app From 2f625036e8fe4545e9e220e5cc744c288913f8ee Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 17 Jan 2022 03:20:38 +0200 Subject: [PATCH 063/185] Hide the django http server and prefer the nginx proxy --- docker-compose.override.local.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 54c6b58db..c738bcf5d 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -5,7 +5,7 @@ services: app: ports: # allow direct access without nginx - - "8000:8000" + - "5001:8000" volumes: # mount the source for live reload - ./docker-app/qfieldcloud:/usr/src/app/qfieldcloud From 2b18c419c5557e74eca37345b6c06af2f8130b55 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 17 Jan 2022 03:20:49 +0200 Subject: [PATCH 064/185] Add docs about ports in use --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index 19ec036f9..16167778d 100644 --- a/README.md +++ b/README.md @@ -164,6 +164,20 @@ Note you may want to change the `LETSENCRYPT_EMAIL`, `LETSENCRYPT_RSA_KEY_SIZE` Based on this example +### Ports + +| service | port | configuration | local | development | production | +|---------------|------|----------------------|--------------------|--------------------|--------------------| +| nginx http | 80 | WEB_HTTP_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| nginx https | 443 | WEB_HTTPS_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| django http | 5001 | | :white_check_mark: | :x: | :x: | +| postgres | 5433 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| redis | 6379 | REDIS_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | +| geodb | 5432 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :x: | +| minio browser | 8010 | STORAGE_BROWSER_PORT | :white_check_mark: | :x: | :x: | +| smtp web | 5000 | | :white_check_mark: | :x: | :x: | +| smtp | 25 | | :white_check_mark: | :x: | :x: | +| imap | 143 | | :white_check_mark: | :x: | :x: | ### Logs From 1c54e73d896cf82034f59919ee40f2938b403046 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 17 Jan 2022 14:43:08 +0200 Subject: [PATCH 065/185] Revert "Merge pull request #215 from stcz/fix-docker-compose.override.local.yml" This reverts commit 364557bdff4f3f86b3fc1fa06ef8a57c3b3772fb, reversing changes made to ea6b90ba974d2d6b013f5c4393a55c313bfab7c5. --- docker-compose.override.local.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index c738bcf5d..d43bd0102 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -102,7 +102,8 @@ services: createbuckets: image: minio/mc depends_on: - - minio + minio: + condition: service_healthy entrypoint: > /bin/sh -c " /usr/bin/mc config host add myminio ${STORAGE_ENDPOINT_URL} ${STORAGE_ACCESS_KEY_ID} ${STORAGE_SECRET_ACCESS_KEY}; From 793b0ff6df128df7a498f34a76e4b426224ffb40 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 17 Jan 2022 15:52:14 +0200 Subject: [PATCH 066/185] Fix tests --- docker-app/qfieldcloud/core/tests/test_delta.py | 8 ++------ docker-app/qfieldcloud/core/views/files_views.py | 4 +++- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/docker-app/qfieldcloud/core/tests/test_delta.py b/docker-app/qfieldcloud/core/tests/test_delta.py index 6bd96321b..780cbe479 100644 --- a/docker-app/qfieldcloud/core/tests/test_delta.py +++ b/docker-app/qfieldcloud/core/tests/test_delta.py @@ -19,9 +19,6 @@ class QfcTestCase(APITransactionTestCase): - - DJANGO_BASE_URL = "http://localhost:8000/api/v1/" - def setUp(self): # Create a user self.user1 = User.objects.create_user(username="user1", password="abc123") @@ -593,9 +590,8 @@ def test_change_and_delete_pushed_only_features(self): def get_file_contents(self, project, filename): response = self.client.get(f"/api/v1/files/{project.id}/{filename}/") - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url) + if isinstance(response, HttpResponseRedirect): + response = requests.get(response.url) self.assertTrue(status.is_success(response.status_code)) self.assertEqual(get_filename(response), filename) diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index 32d390db1..82f23f609 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -125,7 +125,9 @@ def get(self, request, projectid, filename): HttpMethod="GET", ) - if request.META["HTTP_HOST"].split(":")[-1] == request.META["WEB_HTTPS_PORT"]: + if request.META.get("HTTP_HOST", "").split(":")[-1] == request.META.get( + "WEB_HTTPS_PORT" + ): # Let's NGINX handle the redirect to the storage and streaming the file contents back to the client response = HttpResponse() response["X-Accel-Redirect"] = "/storage-download/" From 7c79c07f1f0251021950cb70d54ce2e6a90f51b5 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 18 Jan 2022 20:23:53 +0200 Subject: [PATCH 067/185] Safer way to export variable from a .env file Includes proper handling of comments newlines, and spaces --- .env.example | 4 ++-- .github/workflows/test.yml | 2 +- scripts/init_letsencrypt.sh | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.env.example b/.env.example index 707ccad70..e59d7aeb5 100644 --- a/.env.example +++ b/.env.example @@ -2,7 +2,7 @@ DEBUG=1 QFIELDCLOUD_HOST=localhost DJANGO_SETTINGS_MODULE=qfieldcloud.settings -DJANGO_ALLOWED_HOSTS='localhost 127.0.0.1 0.0.0.0' +DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 0.0.0.0 SECRET_KEY=change_me @@ -37,7 +37,7 @@ HOST_POSTGRES_PORT=5433 GEODB_HOST=geodb GEODB_PORT=5432 GEODB_USER=postgres -GEODB_PASSWORD='KUAa7h!G&wQEmkS3' +GEODB_PASSWORD=KUAa7h!G&wQEmkS3 GEODB_DB=postgres SENTRY_DSN= diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e1ff8d98e..7719cc8e8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -23,7 +23,7 @@ jobs: - name: Export the env variables file run: | cp .env.example .env - set -a; source .env; set +a + eval $(egrep "^[^#;]" .env | xargs -d'\n' -n1 | sed -E 's/(\w+)=(.*)/export \1='"'"'\2'"'"'/g') - name: Pull docker containers run: docker-compose pull - name: Build and run docker containers diff --git a/scripts/init_letsencrypt.sh b/scripts/init_letsencrypt.sh index ebf6f1904..693f96172 100755 --- a/scripts/init_letsencrypt.sh +++ b/scripts/init_letsencrypt.sh @@ -7,7 +7,7 @@ if ! [ -x "$(command -v docker-compose)" ]; then exit 1 fi -set -a; source .env; set +a +eval $(egrep "^[^#;]" .env | xargs -d'\n' -n1 | sed -E 's/(\w+)=(.*)/export \1='"'"'\2'"'"'/g') CONFIG_PATH="./conf/nginx" From bd84c1ab48f1feb1fba1884b729200b82a9c9482 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 19:04:09 +0000 Subject: [PATCH 068/185] Bump django from 3.2.10 to 3.2.11 in /docker-app Bumps [django](https://github.com/django/django) from 3.2.10 to 3.2.11. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/3.2.10...3.2.11) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- docker-app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/requirements.txt b/docker-app/requirements.txt index e6c30226c..90c96be26 100644 --- a/docker-app/requirements.txt +++ b/docker-app/requirements.txt @@ -17,7 +17,7 @@ coverage==5.3 cryptography==36.0.1 defusedxml==0.7.1 Deprecated==1.2.13 -Django==3.2.10 +Django==3.2.11 django-allauth==0.44.0 django-auditlog==1.0a1 django-axes==5.28.0 From 1df1a9080d96b34dc2c3facb550f473c4da11942 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 18 Jan 2022 21:12:37 +0200 Subject: [PATCH 069/185] Run tests also on pull requests --- .github/workflows/test.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7719cc8e8..23124a08a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,5 +1,7 @@ name: Test -on: push +on: + - push + - pull_request jobs: test: name: Code check and tests From 5f2a115803a15e98529952499d6b85d3b4fc1ad6 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sun, 23 Jan 2022 19:24:58 +0200 Subject: [PATCH 070/185] Add status key in the project response --- docker-app/qfieldcloud/core/serializers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index 26bf89d44..a26e568ce 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -69,6 +69,7 @@ class Meta: "data_last_updated_at", "can_repackage", "needs_repackaging", + "status", "user_role", "user_role_origin", ) From c2cbb3374356020355bed4ba370d685d4ac99330 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 24 Jan 2022 02:02:38 +0200 Subject: [PATCH 071/185] Increase timeouts when uploading huge files --- conf/nginx/templates/default.conf.template | 3 +++ 1 file changed, 3 insertions(+) diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index abd648585..8fc80c330 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -45,6 +45,9 @@ server { proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header Host $http_host; + proxy_read_timeout 300; + proxy_send_timeout 300; + proxy_redirect off; proxy_pass http://django; } From dd82c537d1a48607ecf073e115299aa796e644ff Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 25 Jan 2022 00:59:32 +0200 Subject: [PATCH 072/185] Ignore newcoming deltas with the same id --- docker-app/qfieldcloud/core/exceptions.py | 8 ----- .../qfieldcloud/core/tests/test_delta.py | 2 +- .../qfieldcloud/core/views/deltas_views.py | 29 ++++++++----------- 3 files changed, 13 insertions(+), 26 deletions(-) diff --git a/docker-app/qfieldcloud/core/exceptions.py b/docker-app/qfieldcloud/core/exceptions.py index 23dd0945f..3c9c96d19 100644 --- a/docker-app/qfieldcloud/core/exceptions.py +++ b/docker-app/qfieldcloud/core/exceptions.py @@ -101,14 +101,6 @@ class DeltafileValidationError(QFieldCloudException): status_code = status.HTTP_400_BAD_REQUEST -class DeltafileDuplicationError(QFieldCloudException): - """Raised when a deltafile with the same id has already been uploaded""" - - code = "duplicate_deltafile" - message = "Deltafile already uploaded" - status_code = status.HTTP_400_BAD_REQUEST - - class NoDeltasToApplyError(QFieldCloudException): """Raised when a deltafile validation fails""" diff --git a/docker-app/qfieldcloud/core/tests/test_delta.py b/docker-app/qfieldcloud/core/tests/test_delta.py index 780cbe479..851fc2179 100644 --- a/docker-app/qfieldcloud/core/tests/test_delta.py +++ b/docker-app/qfieldcloud/core/tests/test_delta.py @@ -251,7 +251,7 @@ def test_push_apply_delta_file_twice(self): ], ) - self.assertFalse( + self.assertTrue( self.upload_deltas(project, "singlelayer_singledelta_diff_content.json") ) diff --git a/docker-app/qfieldcloud/core/views/deltas_views.py b/docker-app/qfieldcloud/core/views/deltas_views.py index 879c9fb98..f3990eba8 100644 --- a/docker-app/qfieldcloud/core/views/deltas_views.py +++ b/docker-app/qfieldcloud/core/views/deltas_views.py @@ -4,7 +4,6 @@ from django.contrib.auth import get_user_model from django.db import transaction -from django.db.utils import IntegrityError from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from qfieldcloud.core import exceptions, permissions_utils, utils @@ -60,6 +59,7 @@ def post(self, request, projectid): raise exceptions.EmptyContentError() request_file = request.data["file"] + created_deltas = [] try: deltafile_json = json.load(request_file) @@ -71,18 +71,12 @@ def post(self, request, projectid): deltas = deltafile_json.get("deltas", []) delta_ids = sorted([str(delta["uuid"]) for delta in deltas]) existing_delta_ids = [ - str(delta.id) - for delta in Delta.objects.filter( - deltafile_id=deltafile_id, - ).order_by("id") + str(v) + for v in Delta.objects.filter(id__in=delta_ids) + .order_by("id") + .values_list("id", flat=True) ] - if len(existing_delta_ids) != 0: - if delta_ids == existing_delta_ids: - return Response() - else: - raise exceptions.DeltafileDuplicationError() - if project_file is None: raise exceptions.NoQGISProjectError() @@ -93,6 +87,10 @@ def post(self, request, projectid): with transaction.atomic(): for delta in deltas: + if delta["uuid"] in existing_delta_ids: + logger.warning(f'Duplicate delta id: ${delta["uuid"]}') + continue + delta_obj = Delta( id=delta["uuid"], deltafile_id=deltafile_id, @@ -107,6 +105,7 @@ def post(self, request, projectid): delta_obj.last_status = Delta.Status.UNPERMITTED delta_obj.save(force_insert=True) + created_deltas.append(delta_obj) except Exception as err: if request_file: @@ -118,18 +117,14 @@ def post(self, request, projectid): logger.exception(err) - if isinstance(err, IntegrityError): - raise exceptions.DeltafileDuplicationError() - elif isinstance(err, exceptions.NoQGISProjectError): - raise err - elif isinstance(err, exceptions.DeltafileDuplicationError): + if isinstance(err, exceptions.NoQGISProjectError): raise err elif isinstance(err, exceptions.DeltafileValidationError): raise err else: raise exceptions.QFieldCloudException() from err - if not jobs.apply_deltas( + if created_deltas and not jobs.apply_deltas( project_obj, self.request.user, project_file, From 6762bf61ee1966919d06f66e84c0636fa92e96bb Mon Sep 17 00:00:00 2001 From: Mathieu Pellerin Date: Tue, 25 Jan 2022 17:52:19 +0700 Subject: [PATCH 073/185] Type fix in README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 16167778d..d302c6269 100644 --- a/README.md +++ b/README.md @@ -144,7 +144,7 @@ Run and build the docker containers docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml up -d --build # prod server - docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml up -d --build + docker-compose -f docker-compose.yml -f docker-compose.override.prod.yml up -d --build Run the django database migrations From 37a8b3ea0474b2ad68b54a3f16e44d74b5872303 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 27 Jan 2022 06:09:59 +0200 Subject: [PATCH 074/185] Fix migrations on deltas for 3d geometry --- .../core/migrations/0051_auto_20211125_0444.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py index d9399aebf..4bf860e50 100644 --- a/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py +++ b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py @@ -51,8 +51,10 @@ class Migration(migrations.Migration): old_geom = ST_Transform( ST_SetSRID( - ST_GeomFromText( - jsonb_extract_path_text(core_delta.content, 'old', 'geometry') + ST_Force2D( + ST_GeomFromText( + jsonb_extract_path_text(core_delta.content, 'old', 'geometry') + ) ), subquery.srid ), @@ -61,8 +63,10 @@ class Migration(migrations.Migration): new_geom = ST_Transform( ST_SetSRID( - ST_GeomFromText( - jsonb_extract_path_text(core_delta.content, 'new', 'geometry') + ST_Force2D( + ST_GeomFromText( + jsonb_extract_path_text(core_delta.content, 'new', 'geometry') + ) ), subquery.srid ), @@ -89,8 +93,8 @@ class Migration(migrations.Migration): ELSE NULL END INTO srid; - NEW.old_geom := ST_Transform( ST_SetSRID( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'old', 'geometry') ), srid ), 4326 ); - NEW.new_geom := ST_Transform( ST_SetSRID( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'new', 'geometry') ), srid ), 4326 ); + NEW.old_geom := ST_Transform( ST_SetSRID( ST_Force2D( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'old', 'geometry') ) ), srid ), 4326 ); + NEW.new_geom := ST_Transform( ST_SetSRID( ST_Force2D( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'new', 'geometry') ) ), srid ), 4326 ); RETURN NEW; END; $$ From c25a6089d9be0673ad0fa7389bfaa05b775f9c88 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 27 Jan 2022 06:33:10 +0200 Subject: [PATCH 075/185] Add documentation about submodules and fix Nyuki banner Fix #230 @nirvn :) --- README.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d302c6269..2adde1cc6 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ QFieldCloud allows seamless synchronization of your field data with your spatial # Hosted solution If you're interested in quickly getting up and running, we suggest subscribing to the version hosted by OPENGIS.ch at https://qfield.cloud. This is also the instance that is integrated by default into QField. -QFieldCloud logo +QFieldCloud logo ## Documentation @@ -20,6 +20,17 @@ Permissions documentation is [here](https://github.com/opengisch/qfieldcloud/blo ## Development +### Clone the repository + +Clone the repository and all its submodules: + + git clone --recurse-submodules git://github.com/opengisch/qfieldcloud.git + +To fetch upstream development, don't forget to update the submodules too: + + git pull --recurse-submodules && git submodule update --recursive + + ### Launch a local instance Copy the `.env.example` into `.env` file and configure it to your From 309f5df179f8a814d4a9b2e3b650ad4fd743bc51 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 27 Jan 2022 10:10:41 +0200 Subject: [PATCH 076/185] Trim whitespace --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2adde1cc6..cd50cbf38 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Permissions documentation is [here](https://github.com/opengisch/qfieldcloud/blo Clone the repository and all its submodules: git clone --recurse-submodules git://github.com/opengisch/qfieldcloud.git - + To fetch upstream development, don't forget to update the submodules too: git pull --recurse-submodules && git submodule update --recursive From 6ff92e8041899879be700288f98fbbe778fcf498 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 27 Jan 2022 15:54:25 +0200 Subject: [PATCH 077/185] Fix migrations on deltas for nan coordinates in the geometry --- .../core/migrations/0051_auto_20211125_0444.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py index 4bf860e50..d3ec31356 100644 --- a/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py +++ b/docker-app/qfieldcloud/core/migrations/0051_auto_20211125_0444.py @@ -53,7 +53,7 @@ class Migration(migrations.Migration): ST_SetSRID( ST_Force2D( ST_GeomFromText( - jsonb_extract_path_text(core_delta.content, 'old', 'geometry') + REPLACE( jsonb_extract_path_text(core_delta.content, 'old', 'geometry'), 'nan', '0' ) ) ), subquery.srid @@ -65,7 +65,7 @@ class Migration(migrations.Migration): ST_SetSRID( ST_Force2D( ST_GeomFromText( - jsonb_extract_path_text(core_delta.content, 'new', 'geometry') + REPLACE( jsonb_extract_path_text(core_delta.content, 'new', 'geometry'), 'nan', '0' ) ) ), subquery.srid @@ -93,8 +93,8 @@ class Migration(migrations.Migration): ELSE NULL END INTO srid; - NEW.old_geom := ST_Transform( ST_SetSRID( ST_Force2D( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'old', 'geometry') ) ), srid ), 4326 ); - NEW.new_geom := ST_Transform( ST_SetSRID( ST_Force2D( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'new', 'geometry') ) ), srid ), 4326 ); + NEW.old_geom := ST_Transform( ST_SetSRID( ST_Force2D( REPLACE( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'old', 'geometry') ), 'nan', '0' ) ), srid ), 4326 ); + NEW.new_geom := ST_Transform( ST_SetSRID( ST_Force2D( REPLACE( ST_GeomFromText( jsonb_extract_path_text(NEW.content, 'new', 'geometry') ), 'nan', '0' ) ), srid ), 4326 ); RETURN NEW; END; $$ From c5c185cd7301f06c6842438b02a6bb2e1b6a4d2b Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 1 Feb 2022 13:38:23 +0200 Subject: [PATCH 078/185] Add limit argument how many users should be invited per run --- .../core/management/commands/inviteusers.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/docker-app/qfieldcloud/core/management/commands/inviteusers.py b/docker-app/qfieldcloud/core/management/commands/inviteusers.py index 0d87c7d28..6b780d7ed 100644 --- a/docker-app/qfieldcloud/core/management/commands/inviteusers.py +++ b/docker-app/qfieldcloud/core/management/commands/inviteusers.py @@ -1,3 +1,5 @@ +from datetime import datetime + from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from qfieldcloud.core.invitations_utils import invite_user_by_email @@ -15,6 +17,7 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--inviter", type=str, required=True) + parser.add_argument("--limit", type=int, default=30) parser.add_argument("--emails", type=str, nargs="+", required=True) parser.add_argument("--exit-on-failure", action="store_true") @@ -24,6 +27,7 @@ def handle(self, *args, **options): inviter_username = options.get("inviter") emails = options.get("emails", []) exit_on_failure = options.get("exit-on-failure") + sent_emails_limit = options.get("limit", 0) try: inviter = User.objects.get(username=inviter_username) @@ -31,13 +35,23 @@ def handle(self, *args, **options): print(f'ERROR: Failed to find user "{inviter_username}"!') exit(1) + sent_emails_count = 0 + for email in emails: + if sent_emails_count >= sent_emails_limit: + break + success, message = invite_user_by_email(email, inviter) if success: - print(f"SUCCESS: invitation sent to {email}.") + sent_emails_count += 1 + print( + f"{datetime.now().isoformat()}\tSUCCESS\tinvitation sent to {email}." + ) else: - print(f"WARNING: invitation not sent to {email}. {message}") + print( + f"{datetime.now().isoformat()}\tWARNING\tinvitation not sent to {email}. {message}" + ) if exit_on_failure: exit(1) From 375258495872722e5f745b087621bc817043da82 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 1 Feb 2022 14:37:41 +0200 Subject: [PATCH 079/185] Expiry date to 365 days, if 0 - invites are never active --- docker-app/qfieldcloud/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 34055b6cf..61787244a 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -276,7 +276,7 @@ # Django invitations configurations # https://github.com/bee-keeper/django-invitations#additional-configuration -INVITATIONS_INVITATION_EXPIRY = 0 # integer in days, 0 for no expiration +INVITATIONS_INVITATION_EXPIRY = 365 # integer in days, 0 for no expiration INVITATIONS_INVITATION_ONLY = True INVITATIONS_ACCEPT_INVITE_AFTER_SIGNUP = True INVITATIONS_GONE_ON_ACCEPT_ERROR = False From fc7186d1f6f3584ec9ad808cbea9e4d01d790886 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 1 Feb 2022 20:28:30 +0200 Subject: [PATCH 080/185] Fix misleading docs --- docker-app/qfieldcloud/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 61787244a..90621c39a 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -276,7 +276,7 @@ # Django invitations configurations # https://github.com/bee-keeper/django-invitations#additional-configuration -INVITATIONS_INVITATION_EXPIRY = 365 # integer in days, 0 for no expiration +INVITATIONS_INVITATION_EXPIRY = 365 # integer in days, 0 disables invitations INVITATIONS_INVITATION_ONLY = True INVITATIONS_ACCEPT_INVITE_AFTER_SIGNUP = True INVITATIONS_GONE_ON_ACCEPT_ERROR = False From c0695717b0b6a1520cae4e4cc4729fec98004be8 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 2 Feb 2022 01:21:30 +0200 Subject: [PATCH 081/185] Fix unable to add the owner of an organization to a team --- docker-app/qfieldcloud/core/models.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index a8fc3443f..5b95a58ed 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -699,7 +699,10 @@ class Meta: ) def clean(self) -> None: - if not self.team.team_organization.members.filter(member=self.member): + if ( + self.team.team_organization.members.filter(member=self.member).count() == 0 + and self.team.team_organization.organization_owner != self.member + ): raise ValidationError( _("Cannot add team member that is not an organization member.") ) From 958f29a8de930a579cf2368b644e5a0339b6cda0 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 2 Feb 2022 16:10:06 +0100 Subject: [PATCH 082/185] Add management command to purge old file versions Implements: 1y7uwvn Usage: docker-compose exec app python manage.py purge_old_file_versions --force --keep_count 2 --- .../commands/purge_old_file_versions.py | 86 +++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100644 docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py new file mode 100644 index 000000000..2fc9cfc2f --- /dev/null +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -0,0 +1,86 @@ +from collections import defaultdict + +from django.core.management.base import BaseCommand, CommandError +from qfieldcloud.core import utils +from qfieldcloud.core.models import Project + + +class Command(BaseCommand): + """ + Deletes old versions of files + """ + + PROMPT_TXT = "This will purge old files for all projects. Type 'yes' to continue, or 'no' to cancel: " + + def add_arguments(self, parser): + parser.add_argument( + "--projects", + type=str, + help="Comma separated list of ids of projects to prune. If unset, will purge all projects", + ) + parser.add_argument( + "--force", + action="store_true", + help="Prevent confirmation prompt when purging all projects", + ) + parser.add_argument( + "--keep_count", type=int, default=10, help="How many versions to keep" + ) + + def handle(self, *args, **options): + + # Determine project ids to work on + projects_ids = options.get("projects") + if not projects_ids: + if options.get("force") is not True and input(Command.PROMPT_TXT) != "yes": + raise CommandError("Collecting static files cancelled.") + projects_ids = Project.objects.values_list("id", flat=True) + else: + projects_ids = projects_ids.split(",") + + # Get the affected projects + projects_qs = Project.objects.all() + if projects_ids: + projects_qs = projects_qs.filter(pk__in=projects_ids) + + bucket = utils.get_s3_bucket() + + for project_id in projects_ids: + print(f"Processing project {project_id}") + + prefix = f"projects/{project_id}/files/" + keep_count = options.get("keep_count") + + # All version under prefix + all_versions = bucket.object_versions.filter(Prefix=prefix) + + # Organize the versions by file in a dict + old_versions_by_file = defaultdict(list) + for version in all_versions: + # The latest is not an old version + if version.is_latest: + continue + old_versions_by_file[version.key].append(version) + + # Process file by file + for filename, old_versions in old_versions_by_file.items(): + + # Sort by date (newest first) + old_versions.sort(key=lambda i: i.last_modified, reverse=True) + + # Skip the newest N + old_versions_to_purge = old_versions[keep_count:] + + # Debug print + all_count = len(old_versions) + topurge_count = len(old_versions_to_purge) + print( + f"{filename}: will purge {topurge_count} out of {all_count} old versions" + ) + + # Remove the N oldest + for old_version in old_versions_to_purge: + old_version.delete() + # TODO: audit ? take implementation from files_views.py:211 + + print("done !") From 83f62616d18485b1960de896a426381ab6208c14 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 2 Feb 2022 17:07:09 +0100 Subject: [PATCH 083/185] followup (hardcode counts according to account type, better output) --- .../commands/purge_old_file_versions.py | 57 +++++++++++-------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py index 2fc9cfc2f..8fee63614 100644 --- a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -2,15 +2,17 @@ from django.core.management.base import BaseCommand, CommandError from qfieldcloud.core import utils -from qfieldcloud.core.models import Project +from qfieldcloud.core.models import Project, UserAccount class Command(BaseCommand): - """ - Deletes old versions of files + + help = """ + Deletes old versions of files. Will keep only the 3 most recent versions + for COMMUNITY accounts and the 10 most recent for PRO accounts. """ - PROMPT_TXT = "This will purge old files for all projects. Type 'yes' to continue, or 'no' to cancel: " + PROMPT_TXT = "This will purge old files for all projects. Rerun with --force, or type 'yes' to continue, or 'no' to cancel: " def add_arguments(self, parser): parser.add_argument( @@ -23,35 +25,41 @@ def add_arguments(self, parser): action="store_true", help="Prevent confirmation prompt when purging all projects", ) - parser.add_argument( - "--keep_count", type=int, default=10, help="How many versions to keep" - ) def handle(self, *args, **options): # Determine project ids to work on - projects_ids = options.get("projects") - if not projects_ids: + proj_ids = options.get("projects") + + # Get the affected projects + if not proj_ids: if options.get("force") is not True and input(Command.PROMPT_TXT) != "yes": raise CommandError("Collecting static files cancelled.") - projects_ids = Project.objects.values_list("id", flat=True) + proj_instances = Project.objects.all() else: - projects_ids = projects_ids.split(",") + proj_instances = Project.objects.filter(pk__in=proj_ids.split(",")) - # Get the affected projects - projects_qs = Project.objects.all() - if projects_ids: - projects_qs = projects_qs.filter(pk__in=projects_ids) + # We'll need useraccount type + proj_instances = proj_instances.prefetch_related("owner__useraccount") bucket = utils.get_s3_bucket() - - for project_id in projects_ids: - print(f"Processing project {project_id}") - - prefix = f"projects/{project_id}/files/" - keep_count = options.get("keep_count") - - # All version under prefix + for proj_instance in proj_instances: + + print(f"Processing {proj_instance}") + + # Determine account type + account_type = proj_instance.owner.useraccount.account_type + if account_type == UserAccount.TYPE_COMMUNITY: + keep_count = 3 + elif account_type == UserAccount.TYPE_PRO: + keep_count = 10 + else: + print(f"⚠️ Unknown account type - skipping purge ⚠️") + continue + print(f"Keeping {keep_count} versions") + + # Get all files versions for that project + prefix = f"projects/{proj_instance.pk}/files/" all_versions = bucket.object_versions.filter(Prefix=prefix) # Organize the versions by file in a dict @@ -75,11 +83,12 @@ def handle(self, *args, **options): all_count = len(old_versions) topurge_count = len(old_versions_to_purge) print( - f"{filename}: will purge {topurge_count} out of {all_count} old versions" + f"- {filename}: will purge {topurge_count} out of {all_count} old versions" ) # Remove the N oldest for old_version in old_versions_to_purge: + # TODO: any way to batch those ? will probaby get slow on production old_version.delete() # TODO: audit ? take implementation from files_views.py:211 From f5635395e5edd60c9d32514717a6417fbba2e4ac Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 2 Feb 2022 17:13:31 +0100 Subject: [PATCH 084/185] followup (add cronjob) --- docker-app/qfieldcloud/core/cron.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docker-app/qfieldcloud/core/cron.py b/docker-app/qfieldcloud/core/cron.py index 9d63ec854..41cb83c9d 100644 --- a/docker-app/qfieldcloud/core/cron.py +++ b/docker-app/qfieldcloud/core/cron.py @@ -1,5 +1,6 @@ import logging +from django.core.management import call_command from django_cron import CronJobBase, Schedule from invitations.utils import get_invitation_model @@ -35,3 +36,15 @@ def do(self): logger.info( f'Resend {len(invitation_emails)} previously failed invitation(s) to: {", ".join(invitation_emails)}' ) + + +class PurgeOldFileVersions(CronJobBase): + """Purges old version of files of all projects using the purge_old_file_versions + management command. + """ + + schedule = Schedule(runs_every_mins=60 * 24 * 7) + code = "qfieldcloud.purge_old_file_versions" + + def do(self): + call_command("purge_old_file_versions", "--force") From adf9c9b631f4e37dd708c1a8b47bd23db7f9b439 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 2 Feb 2022 17:39:23 +0100 Subject: [PATCH 085/185] followup (fix flake8) --- .../core/management/commands/purge_old_file_versions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py index 8fee63614..645cc4367 100644 --- a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -54,7 +54,7 @@ def handle(self, *args, **options): elif account_type == UserAccount.TYPE_PRO: keep_count = 10 else: - print(f"⚠️ Unknown account type - skipping purge ⚠️") + print("⚠️ Unknown account type - skipping purge ⚠️") continue print(f"Keeping {keep_count} versions") From f4cb075974e2e0b915bcf24abc540d4cac0cfeb1 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 2 Feb 2022 19:01:02 +0200 Subject: [PATCH 086/185] Allow running parallel jobs on the server --- docker-app/qfieldcloud/core/serializers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index a26e568ce..e8e3f870b 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -336,7 +336,8 @@ def check_create_new_job(self): ModelClass: Job = self.Meta.model last_active_job = ( ModelClass.objects.filter( - status__in=[Job.Status.PENDING, Job.Status.QUEUED, Job.Status.STARTED] + project=self.initial_data.get("project_id"), + status__in=[Job.Status.PENDING, Job.Status.QUEUED, Job.Status.STARTED], ) .only("id") .order_by("-started_at", "-created_at") From 3bc927dfa26f2fdf7e44af758a554db35fdacb32 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 2 Feb 2022 18:04:14 +0100 Subject: [PATCH 087/185] Fix storage size calculation to include old versions - note: not sure this exactly matches what's used on disk ? --- docker-app/qfieldcloud/core/models.py | 1 + docker-app/qfieldcloud/core/utils.py | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 5b95a58ed..1a535a7df 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -909,6 +909,7 @@ def __str__(self): return self.name + " (" + str(self.id) + ")" + " owner: " + self.owner.username def storage_size(self): + """Retrieves the storage size from S3""" return utils.get_s3_project_size(self.id) @property diff --git a/docker-app/qfieldcloud/core/utils.py b/docker-app/qfieldcloud/core/utils.py index 6c1d0631a..91f3f88f0 100644 --- a/docker-app/qfieldcloud/core/utils.py +++ b/docker-app/qfieldcloud/core/utils.py @@ -249,15 +249,15 @@ def get_deltafile_schema_validator() -> jsonschema.Draft7Validator: def get_s3_project_size(project_id: str) -> int: """Return the size in MiB of the project on the storage, included the - exported files""" + exported files and their versions""" bucket = get_s3_bucket() - prefix = "projects/{}/".format(project_id) - total_size = 0 + prefix = f"projects/{project_id}/" - for obj in bucket.objects.filter(Prefix=prefix): - total_size += obj.size + total_size = 0 + for version in bucket.object_versions.filter(Prefix=prefix): + total_size += version.size return round(total_size / (1024 * 1024), 3) From 2825e3c95c01c4200774b3731a146da70b7633c1 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 2 Feb 2022 18:13:08 +0100 Subject: [PATCH 088/185] followup (fix typo) --- docker-app/qfieldcloud/core/cron.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/cron.py b/docker-app/qfieldcloud/core/cron.py index 41cb83c9d..96400c4eb 100644 --- a/docker-app/qfieldcloud/core/cron.py +++ b/docker-app/qfieldcloud/core/cron.py @@ -43,7 +43,7 @@ class PurgeOldFileVersions(CronJobBase): management command. """ - schedule = Schedule(runs_every_mins=60 * 24 * 7) + schedule = Schedule(run_every_mins=60 * 24 * 7) code = "qfieldcloud.purge_old_file_versions" def do(self): From 9f6b76d3baf7faaf5f0029a404f44c1faa6f7f30 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 2 Feb 2022 18:19:01 +0100 Subject: [PATCH 089/185] [precommit] move check_envvars hook to CI (as this script isn't cross platform) --- .github/workflows/test.yml | 3 +++ .pre-commit-config.yaml | 11 ----------- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 23124a08a..ac75380ec 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -22,6 +22,9 @@ jobs: - name: Prepare docker-compose override file run: | ln -s docker-compose.override.local.yml docker-compose.override.yml + - name: Check env vars coniguration + run: | + scripts/check_envvars.sh - name: Export the env variables file run: | cp .env.example .env diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d5a29a4a2..1caa0f1df 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,14 +39,3 @@ repos: rev: "3.9.0" hooks: - id: flake8 - - # Make sure the envvars from the .env.example are available in the docker-compose configurations - - repo: local - hooks: - - id: check_envvars - name: Check envvars - description: Make sure the envvars from the .env.example are available in the docker-compose configurations - entry: scripts/check_envvars.sh - language: script - pass_filenames: false - types: [text] From 16487538b60758c40aff349f45eb0c9a11d4cebf Mon Sep 17 00:00:00 2001 From: Olivier Dalang Date: Thu, 3 Feb 2022 10:14:02 +0100 Subject: [PATCH 090/185] followup (suggested on PR) Co-authored-by: Ivan Ivanov --- .../core/management/commands/purge_old_file_versions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py index 645cc4367..3153b441d 100644 --- a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -83,7 +83,7 @@ def handle(self, *args, **options): all_count = len(old_versions) topurge_count = len(old_versions_to_purge) print( - f"- {filename}: will purge {topurge_count} out of {all_count} old versions" + f'Purging {topurge_count} out of {all_count} old versions for "{filename}"...' ) # Remove the N oldest From 2ff366ec6bfe3ce6203af63c4e75fe39be03a53e Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 3 Feb 2022 11:56:18 +0100 Subject: [PATCH 091/185] followup: use util to retrieve versions --- .../commands/purge_old_file_versions.py | 25 +++++-------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py index 3153b441d..7a4fd7bc6 100644 --- a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -1,5 +1,3 @@ -from collections import defaultdict - from django.core.management.base import BaseCommand, CommandError from qfieldcloud.core import utils from qfieldcloud.core.models import Project, UserAccount @@ -39,10 +37,8 @@ def handle(self, *args, **options): else: proj_instances = Project.objects.filter(pk__in=proj_ids.split(",")) - # We'll need useraccount type + # Iterate through projects proj_instances = proj_instances.prefetch_related("owner__useraccount") - - bucket = utils.get_s3_bucket() for proj_instance in proj_instances: print(f"Processing {proj_instance}") @@ -58,20 +54,11 @@ def handle(self, *args, **options): continue print(f"Keeping {keep_count} versions") - # Get all files versions for that project - prefix = f"projects/{proj_instance.pk}/files/" - all_versions = bucket.object_versions.filter(Prefix=prefix) - - # Organize the versions by file in a dict - old_versions_by_file = defaultdict(list) - for version in all_versions: - # The latest is not an old version - if version.is_latest: - continue - old_versions_by_file[version.key].append(version) - # Process file by file - for filename, old_versions in old_versions_by_file.items(): + for file in utils.get_project_files_with_versions(proj_instance.pk): + + filename = file.latest.name + old_versions = file.versions # Sort by date (newest first) old_versions.sort(key=lambda i: i.last_modified, reverse=True) @@ -89,7 +76,7 @@ def handle(self, *args, **options): # Remove the N oldest for old_version in old_versions_to_purge: # TODO: any way to batch those ? will probaby get slow on production - old_version.delete() + old_version._data.delete() # TODO: audit ? take implementation from files_views.py:211 print("done !") From e2472ba92d731b4c8bb96e5332f153420e94efb1 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 3 Feb 2022 12:22:43 +0100 Subject: [PATCH 092/185] followup: test --- .../qfieldcloud/core/tests/test_qgis_file.py | 57 ++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/tests/test_qgis_file.py b/docker-app/qfieldcloud/core/tests/test_qgis_file.py index 634904c27..f3eef8481 100644 --- a/docker-app/qfieldcloud/core/tests/test_qgis_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qgis_file.py @@ -1,13 +1,15 @@ import filecmp +import io import logging import tempfile import time import requests +from django.core.management import call_command from django.http.response import HttpResponseRedirect from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core import utils -from qfieldcloud.core.models import Project, User +from qfieldcloud.core.models import Project, User, UserAccount from rest_framework import status from rest_framework.test import APITransactionTestCase @@ -562,3 +564,56 @@ def test_upload_10mb_file(self): self.assertEqual("bigfile.big", response.json()[0]["name"]) self.assertGreater(response.json()[0]["size"], 10000000) self.assertLess(response.json()[0]["size"], 11000000) + + def test_purge_old_versions(self): + """This tests manual purging of old versions""" + + self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) + + apipath = f"/api/v1/files/{self.project1.id}/file.txt/" + + def read_version(file, n): + """returns the content of version""" + return file.versions[n]._data.get()["Body"].read().decode() + + # Create 20 versions + for i in range(20): + test_file = io.StringIO(f"v{i}") + self.client.post(apipath, {"file": test_file}, format="multipart") + + # Ensure it worked + files = list(self.project1.files) + self.assertEqual(len(files[0].versions), 20) + self.assertEqual(read_version(files[0], 0), "v19") + self.assertEqual(read_version(files[0], 9), "v0") + + # Purge another project has no effect + other = Project.objects.create(name="other") + call_command("purge_old_file_versions", "--force", "--projects", other.pk) + files = list(self.project1.files) + self.assertEqual(len(files[0].versions), 20) + + # Purge pro account keeps 10 versions + self.user1.useraccount.account_type = UserAccount.TYPE_PRO + self.user1.useraccount.save() + call_command("purge_old_file_versions", "--force") + files = list(self.project1.files) + self.assertEqual(len(files[0].versions), 10) + self.assertEqual(read_version(files[0], 0), "v19") + self.assertEqual(read_version(files[0], 9), "v10") + + # Purge community account keeps 3 versions + self.user1.useraccount.account_type = UserAccount.TYPE_COMMUNITY + self.user1.useraccount.save() + call_command("purge_old_file_versions", "--force") + files = list(self.project1.files) + self.assertEqual(len(files[0].versions), 3) + self.assertEqual(read_version(files[0], 0), "v19") + self.assertEqual(read_version(files[0], 3), "v17") + + # Purge is idempotent + call_command("purge_old_file_versions", "--force") + files = list(self.project1.files) + self.assertEqual(len(files[0].versions), 3) + self.assertEqual(read_version(files[0], 0), "v19") + self.assertEqual(read_version(files[0], 3), "v17") From c69300e1d7c41799f10c28c344342f923222d368 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 3 Feb 2022 12:28:43 +0100 Subject: [PATCH 093/185] followup: readd a check just in case (we'll see later if we instead throw an exception) --- .../core/management/commands/purge_old_file_versions.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py index 7a4fd7bc6..ebd1f8bf2 100644 --- a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -75,6 +75,13 @@ def handle(self, *args, **options): # Remove the N oldest for old_version in old_versions_to_purge: + if old_version.is_latest: + # This is not supposed to happen, as versions were sorted above, + # but leaving it here as a security measure + print( + "⚠️ Unexpected behaviour in purging old files - check sorting of versions ⚠️" + ) + continue # TODO: any way to batch those ? will probaby get slow on production old_version._data.delete() # TODO: audit ? take implementation from files_views.py:211 From c73c0f2197742eba5a3cb0c61ca2f2128021ce7d Mon Sep 17 00:00:00 2001 From: Olivier Dalang Date: Thu, 3 Feb 2022 12:30:35 +0100 Subject: [PATCH 094/185] typo in docstring Co-authored-by: Ivan Ivanov --- docker-app/qfieldcloud/core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/utils.py b/docker-app/qfieldcloud/core/utils.py index 91f3f88f0..3e675a226 100644 --- a/docker-app/qfieldcloud/core/utils.py +++ b/docker-app/qfieldcloud/core/utils.py @@ -248,7 +248,7 @@ def get_deltafile_schema_validator() -> jsonschema.Draft7Validator: def get_s3_project_size(project_id: str) -> int: - """Return the size in MiB of the project on the storage, included the + """Return the size in MiB of the project on the storage, including the exported files and their versions""" bucket = get_s3_bucket() From c1dc36ff838427af87639928abb9fd38085de5d5 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 3 Feb 2022 13:01:07 +0100 Subject: [PATCH 095/185] followup: throw instead of log exceptions (as per https://github.com/opengisch/qfieldcloud/pull/241#pullrequestreview-870827994) --- .../management/commands/purge_old_file_versions.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py index ebd1f8bf2..047b66284 100644 --- a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -50,8 +50,7 @@ def handle(self, *args, **options): elif account_type == UserAccount.TYPE_PRO: keep_count = 10 else: - print("⚠️ Unknown account type - skipping purge ⚠️") - continue + raise NotImplementedError(f"Unknown account type {account_type}") print(f"Keeping {keep_count} versions") # Process file by file @@ -77,11 +76,9 @@ def handle(self, *args, **options): for old_version in old_versions_to_purge: if old_version.is_latest: # This is not supposed to happen, as versions were sorted above, - # but leaving it here as a security measure - print( - "⚠️ Unexpected behaviour in purging old files - check sorting of versions ⚠️" - ) - continue + # but leaving it here as a security measure in case version + # ordering changes for some reason. + raise Exception("Trying to delete latest version") # TODO: any way to batch those ? will probaby get slow on production old_version._data.delete() # TODO: audit ? take implementation from files_views.py:211 From 3ad404383da34da35625cf7f2dd26507ab929e91 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 3 Feb 2022 14:10:29 +0100 Subject: [PATCH 096/185] followup: fix tests --- .../qfieldcloud/core/tests/test_qgis_file.py | 43 ++++++++++--------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/docker-app/qfieldcloud/core/tests/test_qgis_file.py b/docker-app/qfieldcloud/core/tests/test_qgis_file.py index f3eef8481..6fbc7bc84 100644 --- a/docker-app/qfieldcloud/core/tests/test_qgis_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qgis_file.py @@ -572,8 +572,14 @@ def test_purge_old_versions(self): apipath = f"/api/v1/files/{self.project1.id}/file.txt/" - def read_version(file, n): - """returns the content of version""" + def count_versions(): + """counts the versions in first file of project1""" + file = list(self.project1.files)[0] + return len(file.versions) + + def read_version(n): + """returns the content of version in first file of project1""" + file = list(self.project1.files)[0] return file.versions[n]._data.get()["Body"].read().decode() # Create 20 versions @@ -582,38 +588,33 @@ def read_version(file, n): self.client.post(apipath, {"file": test_file}, format="multipart") # Ensure it worked - files = list(self.project1.files) - self.assertEqual(len(files[0].versions), 20) - self.assertEqual(read_version(files[0], 0), "v19") - self.assertEqual(read_version(files[0], 9), "v0") + self.assertEqual(count_versions(), 20) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(19), "v0") # Purge another project has no effect - other = Project.objects.create(name="other") + other = Project.objects.create(name="other", owner=self.user1) call_command("purge_old_file_versions", "--force", "--projects", other.pk) - files = list(self.project1.files) - self.assertEqual(len(files[0].versions), 20) + self.assertEqual(count_versions(), 20) # Purge pro account keeps 10 versions self.user1.useraccount.account_type = UserAccount.TYPE_PRO self.user1.useraccount.save() call_command("purge_old_file_versions", "--force") - files = list(self.project1.files) - self.assertEqual(len(files[0].versions), 10) - self.assertEqual(read_version(files[0], 0), "v19") - self.assertEqual(read_version(files[0], 9), "v10") + self.assertEqual(count_versions(), 10) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(9), "v10") # Purge community account keeps 3 versions self.user1.useraccount.account_type = UserAccount.TYPE_COMMUNITY self.user1.useraccount.save() call_command("purge_old_file_versions", "--force") - files = list(self.project1.files) - self.assertEqual(len(files[0].versions), 3) - self.assertEqual(read_version(files[0], 0), "v19") - self.assertEqual(read_version(files[0], 3), "v17") + self.assertEqual(count_versions(), 3) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(2), "v17") # Purge is idempotent call_command("purge_old_file_versions", "--force") - files = list(self.project1.files) - self.assertEqual(len(files[0].versions), 3) - self.assertEqual(read_version(files[0], 0), "v19") - self.assertEqual(read_version(files[0], 3), "v17") + self.assertEqual(count_versions(), 3) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(2), "v17") From 235f1811c3632326faa0a44468b857f4a9f9cb03 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 3 Feb 2022 14:11:07 +0100 Subject: [PATCH 097/185] Fix useraccount string representation --- docker-app/qfieldcloud/core/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 5b95a58ed..959002265 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -425,7 +425,7 @@ def avatar_url(self): return None def __str__(self): - return self.TYPE_CHOICES[self.account_type][1] + return self.get_account_type_display() class Geodb(models.Model): From 4127bf01e76f35207da0c56442255bc90f591401 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 3 Feb 2022 18:24:27 +0100 Subject: [PATCH 098/185] Move purge_old_file_versions to utils2.storage --- .../commands/purge_old_file_versions.py | 53 ++---------------- docker-app/qfieldcloud/core/utils2/storage.py | 55 +++++++++++++++++++ 2 files changed, 60 insertions(+), 48 deletions(-) diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py index 047b66284..c23736777 100644 --- a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -1,14 +1,12 @@ from django.core.management.base import BaseCommand, CommandError -from qfieldcloud.core import utils -from qfieldcloud.core.models import Project, UserAccount +from qfieldcloud.core.models import Project +from qfieldcloud.core.utils2 import storage class Command(BaseCommand): + """Runs purge_old_file_versions as a management command""" - help = """ - Deletes old versions of files. Will keep only the 3 most recent versions - for COMMUNITY accounts and the 10 most recent for PRO accounts. - """ + help = storage.purge_old_file_versions.__doc__ PROMPT_TXT = "This will purge old files for all projects. Rerun with --force, or type 'yes' to continue, or 'no' to cancel: " @@ -40,47 +38,6 @@ def handle(self, *args, **options): # Iterate through projects proj_instances = proj_instances.prefetch_related("owner__useraccount") for proj_instance in proj_instances: - - print(f"Processing {proj_instance}") - - # Determine account type - account_type = proj_instance.owner.useraccount.account_type - if account_type == UserAccount.TYPE_COMMUNITY: - keep_count = 3 - elif account_type == UserAccount.TYPE_PRO: - keep_count = 10 - else: - raise NotImplementedError(f"Unknown account type {account_type}") - print(f"Keeping {keep_count} versions") - - # Process file by file - for file in utils.get_project_files_with_versions(proj_instance.pk): - - filename = file.latest.name - old_versions = file.versions - - # Sort by date (newest first) - old_versions.sort(key=lambda i: i.last_modified, reverse=True) - - # Skip the newest N - old_versions_to_purge = old_versions[keep_count:] - - # Debug print - all_count = len(old_versions) - topurge_count = len(old_versions_to_purge) - print( - f'Purging {topurge_count} out of {all_count} old versions for "{filename}"...' - ) - - # Remove the N oldest - for old_version in old_versions_to_purge: - if old_version.is_latest: - # This is not supposed to happen, as versions were sorted above, - # but leaving it here as a security measure in case version - # ordering changes for some reason. - raise Exception("Trying to delete latest version") - # TODO: any way to batch those ? will probaby get slow on production - old_version._data.delete() - # TODO: audit ? take implementation from files_views.py:211 + storage.cleanup_old_file_versions(proj_instance) print("done !") diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index c238502ff..1fbd4122d 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -1,8 +1,12 @@ from __future__ import annotations +import logging from typing import IO import qfieldcloud.core.utils +from qfieldcloud.core.models import UserAccount + +logger = logging.getLogger(__name__) def upload_user_avatar(user: "User", file: IO, mimetype: str) -> str: # noqa: F821 @@ -96,3 +100,54 @@ def remove_project_thumbail(project: "Project") -> None: # noqa: F821 bucket = qfieldcloud.core.utils.get_s3_bucket() key = project.thumbnail_uri bucket.object_versions.filter(Prefix=key).delete() + + +def purge_old_file_versions(project: "Project") -> None: # noqa: F821 + """ + Deletes old versions of all files in the given project. Will keep __3__ + versions for COMMUNITY user accounts, and __10__ versions for PRO user + accounts + """ + + logger.info(f"Cleaning up old files for {project}") + + # Determine account type + account_type = project.owner.useraccount.account_type + if account_type == UserAccount.TYPE_COMMUNITY: + keep_count = 3 + elif account_type == UserAccount.TYPE_PRO: + keep_count = 10 + else: + raise NotImplementedError(f"Unknown account type {account_type}") + + logger.debug(f"Keeping {keep_count} versions") + + # Process file by file + for file in qfieldcloud.core.utils.get_project_files_with_versions(project.pk): + + filename = file.latest.name + old_versions = file.versions + + # Sort by date (newest first) + old_versions.sort(key=lambda i: i.last_modified, reverse=True) + + # Skip the newest N + old_versions_to_purge = old_versions[keep_count:] + + # Debug print + all_count = len(old_versions) + topurge_count = len(old_versions_to_purge) + logger.debug( + f'Purging {topurge_count} out of {all_count} old versions for "{filename}"...' + ) + + # Remove the N oldest + for old_version in old_versions_to_purge: + if old_version.is_latest: + # This is not supposed to happen, as versions were sorted above, + # but leaving it here as a security measure in case version + # ordering changes for some reason. + raise Exception("Trying to delete latest version") + # TODO: any way to batch those ? will probaby get slow on production + old_version._data.delete() + # TODO: audit ? take implementation from files_views.py:211 From 3e731035fb5f7077d27768a933e66bd49f9afc38 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 3 Feb 2022 18:47:50 +0100 Subject: [PATCH 099/185] Run purge_old_file_versions after file uploads instead of cron - removed cron - run purge_old_file_versions after uploads through API - update tests accordingly --- docker-app/qfieldcloud/core/cron.py | 13 ---- .../commands/purge_old_file_versions.py | 2 +- .../qfieldcloud/core/tests/test_qgis_file.py | 75 ++++++++++++++----- .../qfieldcloud/core/views/files_views.py | 4 + 4 files changed, 62 insertions(+), 32 deletions(-) diff --git a/docker-app/qfieldcloud/core/cron.py b/docker-app/qfieldcloud/core/cron.py index 96400c4eb..9d63ec854 100644 --- a/docker-app/qfieldcloud/core/cron.py +++ b/docker-app/qfieldcloud/core/cron.py @@ -1,6 +1,5 @@ import logging -from django.core.management import call_command from django_cron import CronJobBase, Schedule from invitations.utils import get_invitation_model @@ -36,15 +35,3 @@ def do(self): logger.info( f'Resend {len(invitation_emails)} previously failed invitation(s) to: {", ".join(invitation_emails)}' ) - - -class PurgeOldFileVersions(CronJobBase): - """Purges old version of files of all projects using the purge_old_file_versions - management command. - """ - - schedule = Schedule(run_every_mins=60 * 24 * 7) - code = "qfieldcloud.purge_old_file_versions" - - def do(self): - call_command("purge_old_file_versions", "--force") diff --git a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py index c23736777..496814065 100644 --- a/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py +++ b/docker-app/qfieldcloud/core/management/commands/purge_old_file_versions.py @@ -38,6 +38,6 @@ def handle(self, *args, **options): # Iterate through projects proj_instances = proj_instances.prefetch_related("owner__useraccount") for proj_instance in proj_instances: - storage.cleanup_old_file_versions(proj_instance) + storage.purge_old_file_versions(proj_instance) print("done !") diff --git a/docker-app/qfieldcloud/core/tests/test_qgis_file.py b/docker-app/qfieldcloud/core/tests/test_qgis_file.py index 6fbc7bc84..c131995c7 100644 --- a/docker-app/qfieldcloud/core/tests/test_qgis_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qgis_file.py @@ -565,13 +565,11 @@ def test_upload_10mb_file(self): self.assertGreater(response.json()[0]["size"], 10000000) self.assertLess(response.json()[0]["size"], 11000000) - def test_purge_old_versions(self): - """This tests manual purging of old versions""" + def test_purge_old_versions_command(self): + """This tests manual purging of old versions with the management command""" self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) - apipath = f"/api/v1/files/{self.project1.id}/file.txt/" - def count_versions(): """counts the versions in first file of project1""" file = list(self.project1.files)[0] @@ -582,39 +580,80 @@ def read_version(n): file = list(self.project1.files)[0] return file.versions[n]._data.get()["Body"].read().decode() - # Create 20 versions + # Create 20 versions (direct upload to s3) + bucket = utils.get_s3_bucket() + key = f"projects/{self.project1.id}/files/file.txt/" for i in range(20): - test_file = io.StringIO(f"v{i}") - self.client.post(apipath, {"file": test_file}, format="multipart") + test_file = io.BytesIO(f"v{i}".encode()) + bucket.upload_fileobj(test_file, key) # Ensure it worked self.assertEqual(count_versions(), 20) self.assertEqual(read_version(0), "v19") self.assertEqual(read_version(19), "v0") - # Purge another project has no effect + # Run management command on other project should have no effect other = Project.objects.create(name="other", owner=self.user1) call_command("purge_old_file_versions", "--force", "--projects", other.pk) self.assertEqual(count_versions(), 20) - # Purge pro account keeps 10 versions + # Run management command should leave 3 + call_command("purge_old_file_versions", "--force") + self.assertEqual(count_versions(), 3) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(2), "v17") + + # Run management command is idempotent + call_command("purge_old_file_versions", "--force") + self.assertEqual(count_versions(), 3) + self.assertEqual(read_version(0), "v19") + self.assertEqual(read_version(2), "v17") + + def test_purge_old_versions(self): + """This tests automated purging of old versions when uploading files""" + + self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) + + apipath = f"/api/v1/files/{self.project1.id}/file.txt/" + + def count_versions(): + """counts the versions in first file of project1""" + file = list(self.project1.files)[0] + return len(file.versions) + + def read_version(n): + """returns the content of version in first file of project1""" + file = list(self.project1.files)[0] + return file.versions[n]._data.get()["Body"].read().decode() + + # As PRO account, 10 version should be kept out of 20 self.user1.useraccount.account_type = UserAccount.TYPE_PRO self.user1.useraccount.save() - call_command("purge_old_file_versions", "--force") + for i in range(20): + test_file = io.StringIO(f"v{i}") + self.client.post(apipath, {"file": test_file}, format="multipart") self.assertEqual(count_versions(), 10) self.assertEqual(read_version(0), "v19") self.assertEqual(read_version(9), "v10") - # Purge community account keeps 3 versions + # As COMMUNITY account, 3 version should be kept self.user1.useraccount.account_type = UserAccount.TYPE_COMMUNITY self.user1.useraccount.save() - call_command("purge_old_file_versions", "--force") - self.assertEqual(count_versions(), 3) + + # But first we check that uploading to another project doesn't affect a projct + otherproj = Project.objects.create(name="other", owner=self.user1) + otherpath = f"/api/v1/files/{otherproj.id}/file.txt/" + self.client.post(otherpath, {"file": io.StringIO("v1")}, format="multipart") + self.assertEqual(count_versions(), 10) self.assertEqual(read_version(0), "v19") - self.assertEqual(read_version(2), "v17") + self.assertEqual(read_version(9), "v10") - # Purge is idempotent - call_command("purge_old_file_versions", "--force") + # As COMMUNITY account, 3 version should be kept out of 20 new ones + self.user1.useraccount.account_type = UserAccount.TYPE_COMMUNITY + self.user1.useraccount.save() + for i in range(20, 40): + test_file = io.StringIO(f"v{i}") + self.client.post(apipath, {"file": test_file}, format="multipart") self.assertEqual(count_versions(), 3) - self.assertEqual(read_version(0), "v19") - self.assertEqual(read_version(2), "v17") + self.assertEqual(read_version(0), "v39") + self.assertEqual(read_version(2), "v37") diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index 82f23f609..c482ae9ed 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -6,6 +6,7 @@ from qfieldcloud.core.models import ProcessProjectfileJob, Project from qfieldcloud.core.utils import get_project_file_with_versions from qfieldcloud.core.utils2.audit import LogEntry, audit +from qfieldcloud.core.utils2.storage import purge_old_file_versions from rest_framework import permissions, status, views from rest_framework.parsers import MultiPartParser from rest_framework.response import Response @@ -191,6 +192,9 @@ def post(self, request, projectid, filename, format=None): changes={filename: [None, new_object.latest.e_tag]}, ) + # Delete the old versions if the files + purge_old_file_versions(project) + return Response(status=status.HTTP_201_CREATED) def delete(self, request, projectid, filename): From d0642498714af5490978a96a41a8ada85a21991d Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Fri, 4 Feb 2022 13:24:37 +0100 Subject: [PATCH 100/185] followup: code style as per PR review --- docker-app/qfieldcloud/core/utils2/storage.py | 14 ++++---------- docker-app/qfieldcloud/core/views/files_views.py | 2 +- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index 1fbd4122d..9c257e8f1 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -125,20 +125,14 @@ def purge_old_file_versions(project: "Project") -> None: # noqa: F821 # Process file by file for file in qfieldcloud.core.utils.get_project_files_with_versions(project.pk): - filename = file.latest.name - old_versions = file.versions - - # Sort by date (newest first) - old_versions.sort(key=lambda i: i.last_modified, reverse=True) - # Skip the newest N - old_versions_to_purge = old_versions[keep_count:] + old_versions_to_purge = sorted( + file.versions, key=lambda v: v.last_modified, reverse=True + )[keep_count:] # Debug print - all_count = len(old_versions) - topurge_count = len(old_versions_to_purge) logger.debug( - f'Purging {topurge_count} out of {all_count} old versions for "{filename}"...' + f'Purging {len(old_versions_to_purge)} out of {len(file.versions)} old versions for "{file.latest.name}"...' ) # Remove the N oldest diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index c482ae9ed..e35c7960d 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -192,7 +192,7 @@ def post(self, request, projectid, filename, format=None): changes={filename: [None, new_object.latest.e_tag]}, ) - # Delete the old versions if the files + # Delete the old file versions purge_old_file_versions(project) return Response(status=status.HTTP_201_CREATED) From 346017862274c15f8a02d492d5bb18316a115086 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Fri, 4 Feb 2022 13:29:27 +0100 Subject: [PATCH 101/185] followup: fix circular import --- docker-app/qfieldcloud/core/utils2/storage.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index 9c257e8f1..50afe81e4 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -3,8 +3,8 @@ import logging from typing import IO +import qfieldcloud.core.models import qfieldcloud.core.utils -from qfieldcloud.core.models import UserAccount logger = logging.getLogger(__name__) @@ -113,9 +113,9 @@ def purge_old_file_versions(project: "Project") -> None: # noqa: F821 # Determine account type account_type = project.owner.useraccount.account_type - if account_type == UserAccount.TYPE_COMMUNITY: + if account_type == qfieldcloud.core.models.UserAccount.TYPE_COMMUNITY: keep_count = 3 - elif account_type == UserAccount.TYPE_PRO: + elif account_type == qfieldcloud.core.models.UserAccount.TYPE_PRO: keep_count = 10 else: raise NotImplementedError(f"Unknown account type {account_type}") From 61aa9756ac6d766d86f69588a2318afcb6b65715 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 4 Feb 2022 14:50:44 +0200 Subject: [PATCH 102/185] Fix needs_repackaging check, last_packaged < last_updated => need repackaging --- docker-app/qfieldcloud/core/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 1a535a7df..11715afa5 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -961,7 +961,7 @@ def needs_repackaging(self) -> bool: and self.data_last_packaged_at ): # if all vector layers are file based and have been packaged after the last update, it is safe to say there are no modifications - return self.data_last_packaged_at > self.data_last_updated_at + return self.data_last_packaged_at < self.data_last_updated_at else: # if the project has online vector layers (PostGIS/WFS/etc) we cannot be sure if there are modification or not, so better say there are return True From d037eb453d4b6d9bc7f3f82748b7c3c672c6387a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 4 Feb 2022 14:51:13 +0200 Subject: [PATCH 103/185] More optimal check if the project has been modified and needs repackaging --- docker-app/worker_wrapper/wrapper.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index daded3c42..df0723f43 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -324,7 +324,7 @@ def before_docker_run(self) -> None: def after_docker_run(self) -> None: delta_feedback = self.job.feedback["outputs"]["apply_deltas"]["delta_feedback"] - is_data_modified = True + is_data_modified = False for feedback in delta_feedback: delta_id = feedback["delta_id"] @@ -360,9 +360,9 @@ def after_docker_run(self) -> None: modified_pk=modified_pk, ) - if is_data_modified: - self.job.project.data_last_updated_at = timezone.now() - self.job.project.save() + if is_data_modified: + self.job.project.data_last_updated_at = timezone.now() + self.job.project.save() def after_docker_exception(self) -> None: Delta.objects.filter( From a09ecb03c272d83b7cd9f9bf18f057d39b4a1e15 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 4 Feb 2022 14:51:21 +0200 Subject: [PATCH 104/185] Remove extra logging --- docker-qgis/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index 22ba617ae..1f93431e9 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -454,7 +454,6 @@ def run_workflow( ) elif isinstance(feedback_filename, Path): with open(feedback_filename, "w") as f: - print(feedback) json.dump(feedback, f, indent=2, sort_keys=True, default=json_default) return feedback From 47f5784b4e07853522abfe55989e57bb5afb2bce Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 4 Feb 2022 22:50:34 +0200 Subject: [PATCH 105/185] Test detection of online vector layers within a project --- .../qfieldcloud/core/tests/test_packages.py | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py index 7dec59df9..3483bb77e 100644 --- a/docker-app/qfieldcloud/core/tests/test_packages.py +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -84,6 +84,39 @@ def upload_files( ) self.assertTrue(status.is_success(response.status_code)) + def wait_for_project_ok_status(self, project: Project, wait_s: int = 30): + jobs = Job.objects.filter(project=project).exclude( + status__in=[Job.Status.FAILED, Job.Status.FINISHED] + ) + + if jobs.count() == 0: + return + + has_no_pending_jobs = False + for _ in range(wait_s): + if ( + Job.objects.filter(project=project, status=Job.Status.PENDING).count() + == 0 + ): + has_no_pending_jobs = True + + time.sleep(1) + + if not has_no_pending_jobs: + self.fail(f"Still pending jobs after waiting for {wait_s} seconds") + + for _ in range(wait_s): + project.refresh_from_db() + if project.status == Project.Status.OK: + return + if project.status == Project.Status.FAILED: + self.fail("Waited for ok status, but got failed") + return + + time.sleep(1) + + self.fail(f"Waited for ok status for {wait_s} seconds") + def upload_files_and_check_package( self, token: str, @@ -321,6 +354,40 @@ def test_download_project_with_broken_layer_datasources(self): invalid_layers=["surfacestructure_35131bca_337c_483b_b09e_1cf77b1dfb16"], ) + def test_has_online_vector_data(self): + cur = self.conn.cursor() + cur.execute("CREATE TABLE point (id integer, geometry geometry(point, 2056))") + self.conn.commit() + + self.upload_files( + self.token1.key, + self.project1, + files=[ + ("delta/project2.qgs", "project.qgs"), + ], + ) + + self.wait_for_project_ok_status(self.project1) + + self.project1.refresh_from_db() + + self.assertTrue(self.project1.has_online_vector_data) + + def test_has_no_online_vector_data(self): + self.upload_files( + self.token1.key, + self.project1, + files=[ + ("delta/project.qgs", "project.qgs"), + ], + ) + + self.wait_for_project_ok_status(self.project1) + + self.project1.refresh_from_db() + + self.assertTrue(self.project1.has_online_vector_data) + def test_filename_with_whitespace(self): self.upload_files_and_check_package( token=self.token1.key, From 709ab8e56b3eed2948f3753377195958ca23d7c3 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 5 Feb 2022 00:08:42 +0200 Subject: [PATCH 106/185] Added needs repackaging tests --- .../qfieldcloud/core/tests/test_packages.py | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py index 3483bb77e..b1d12687d 100644 --- a/docker-app/qfieldcloud/core/tests/test_packages.py +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -354,6 +354,70 @@ def test_download_project_with_broken_layer_datasources(self): invalid_layers=["surfacestructure_35131bca_337c_483b_b09e_1cf77b1dfb16"], ) + def test_needs_repackaging_without_online_vector(self): + self.project1.refresh_from_db() + # newly uploaded project should always need to be packaged at least once + self.assertTrue(self.project1.needs_repackaging) + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ("delta/testdata.gpkg", "testdata.gpkg"), + ("delta/points.geojson", "points.geojson"), + ("delta/polygons.geojson", "polygons.geojson"), + ("delta/project.qgs", "project.qgs"), + ], + expected_files=[ + "data.gpkg", + "project_qfield.qgs", + ], + ) + + self.project1.refresh_from_db() + # no longer needs repackaging since geopackage layers cannot change without deltas/reupload + self.assertFalse(self.project1.needs_repackaging) + + self.upload_files( + self.token1.key, + self.project1, + files=[ + ("delta/nonspatial.csv", "nonspatial.csv"), + ], + ) + + self.project1.refresh_from_db() + # a layer file changed, so we need to repackage + self.assertTrue(self.project1.needs_repackaging) + + def test_needs_repackaging_with_online_vector(self): + cur = self.conn.cursor() + cur.execute("CREATE TABLE point (id integer, geometry geometry(point, 2056))") + self.conn.commit() + cur.execute( + "INSERT INTO point(id, geometry) VALUES(1, ST_GeomFromText('POINT(2725505 1121435)', 2056))" + ) + self.conn.commit() + + self.project1.refresh_from_db() + # newly uploaded project should always need to be packaged at least once + self.assertTrue(self.project1.needs_repackaging) + + self.upload_files_and_check_package( + token=self.token1.key, + project=self.project1, + files=[ + ("delta/project2.qgs", "project.qgs"), + ("delta/points.geojson", "points.geojson"), + ], + expected_files=["data.gpkg", "project_qfield.qgs"], + ) + + self.project1.refresh_from_db() + # projects with online vector layer should always show as it needs repackaging + self.assertTrue(self.project1.needs_repackaging) + def test_has_online_vector_data(self): cur = self.conn.cursor() cur.execute("CREATE TABLE point (id integer, geometry geometry(point, 2056))") From 8e3daeb169c7cb0d8052e60cc63a9879dfd13984 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sun, 6 Feb 2022 13:57:36 +0200 Subject: [PATCH 107/185] Restart nginx automatically and remove circular dependency --- docker-compose.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 18b414c20..1dea8e89d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -82,6 +82,7 @@ services: nginx: image: nginx:stable + restart: unless-stopped volumes: - static_volume:/var/www/html/staticfiles/ - media_volume:/var/www/html/mediafiles/ @@ -101,8 +102,6 @@ services: LETSENCRYPT_STAGING: ${LETSENCRYPT_STAGING} LETSENCRYPT_RSA_KEY_SIZE: ${LETSENCRYPT_RSA_KEY_SIZE} logging: *default-logging - depends_on: - - app mkcert: image: vishnunair/docker-mkcert From 84df917fd4dfb234f8bc42032568645873713140 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 7 Feb 2022 21:36:40 +0200 Subject: [PATCH 108/185] Add django-classy-tags requirement --- docker-app/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-app/requirements.txt b/docker-app/requirements.txt index 90c96be26..8985a06ca 100644 --- a/docker-app/requirements.txt +++ b/docker-app/requirements.txt @@ -22,6 +22,7 @@ django-allauth==0.44.0 django-auditlog==1.0a1 django-axes==5.28.0 django-bootstrap4==3.0.1 +django-classy-tags==3.0.1 django-common-helpers==0.9.2 django-cron==0.5.0 django-currentuser==0.5.3 From 9d5b8d2f9103f5f948b92589e7dca3d288c7ffcf Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 8 Feb 2022 03:03:37 +0200 Subject: [PATCH 109/185] Added delete_file_version function to deletes a specific file version --- docker-app/qfieldcloud/core/utils2/storage.py | 54 ++++++++++++++++++- 1 file changed, 53 insertions(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index 50afe81e4..62cb13f9f 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import IO +from typing import IO, List import qfieldcloud.core.models import qfieldcloud.core.utils @@ -145,3 +145,55 @@ def purge_old_file_versions(project: "Project") -> None: # noqa: F821 # TODO: any way to batch those ? will probaby get slow on production old_version._data.delete() # TODO: audit ? take implementation from files_views.py:211 + + +def delete_file_version( + project: "Project", # noqa: F821 + filename: str, + version_id: str, + include_older: bool = False, +) -> List[qfieldcloud.core.utils.S3ObjectVersion]: + """Deletes a specific version of given file. + + Args: + project (Project): project the file belongs to + filename (str): filename the version belongs to + version_id (str): version id to delete + include_older (bool, optional): when True, versions older than the passed `version` will also be deleted. If the version_id is the latest version of a file, this parameter will treated as False. Defaults to False. + + Returns: + int: the number of versions deleted + """ + file = qfieldcloud.core.utils.get_project_file_with_versions(project.id, filename) + + if not file: + raise Exception("No file with such name in the given project found") + + if file.latest.id == version_id: + include_older = False + + versions_to_delete = [] + + for file_version in file.versions: + if file_version.id == version_id: + versions_to_delete.append(file_version) + + if include_older: + continue + else: + break + + if versions_to_delete: + assert ( + include_older + ), "We should continue to loop only if `include_older` is True" + assert ( + versions_to_delete[-1].last_modified > file_version.last_modified + ), "Assert the other versions are really older than the requested one" + + versions_to_delete.append(file_version) + + for file_version in versions_to_delete: + file_version._data.delete() + + return versions_to_delete From a55bd30677fd798c8c7a0f56354d109ad5cd90b0 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 8 Feb 2022 03:04:22 +0200 Subject: [PATCH 110/185] Added a display property to versions to have a unified user representation --- docker-app/qfieldcloud/core/utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-app/qfieldcloud/core/utils.py b/docker-app/qfieldcloud/core/utils.py index 3e675a226..449c65104 100644 --- a/docker-app/qfieldcloud/core/utils.py +++ b/docker-app/qfieldcloud/core/utils.py @@ -63,6 +63,10 @@ def e_tag(self) -> str: def is_latest(self) -> bool: return self._data.is_latest + @property + def display(self) -> str: + return self.last_modified.strftime("v%Y%m%d%H%M%S") + class S3ObjectWithVersions(NamedTuple): latest: S3ObjectVersion From c0aa0ce43c6fcf1c59d0f4ee5cdca8f650e9a3d5 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 8 Feb 2022 03:18:21 +0200 Subject: [PATCH 111/185] Return the display string in the files_views --- docker-app/qfieldcloud/core/views/files_views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index e35c7960d..22652ca6f 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -4,7 +4,7 @@ from django.utils import timezone from qfieldcloud.core import exceptions, permissions_utils, utils from qfieldcloud.core.models import ProcessProjectfileJob, Project -from qfieldcloud.core.utils import get_project_file_with_versions +from qfieldcloud.core.utils import S3ObjectVersion, get_project_file_with_versions from qfieldcloud.core.utils2.audit import LogEntry, audit from qfieldcloud.core.utils2.storage import purge_old_file_versions from rest_framework import permissions, status, views @@ -69,6 +69,7 @@ def get(self, request, projectid): "version_id": version.version_id, "last_modified": last_modified, "is_latest": version.is_latest, + "display": S3ObjectVersion(version.key, version).display, } ) From e35ae6c8e54c8a5ddcae5cb9fda6e152c460d806 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 9 Feb 2022 10:02:14 +0100 Subject: [PATCH 112/185] disable request logging when developping (note: unsure why we do that logging ? if it's for general purpose auditing/stats, shouldn't it be done at the nginx level ?) --- docker-app/qfieldcloud/core/logging/filters.py | 6 ++++++ docker-app/qfieldcloud/settings.py | 4 ++++ 2 files changed, 10 insertions(+) diff --git a/docker-app/qfieldcloud/core/logging/filters.py b/docker-app/qfieldcloud/core/logging/filters.py index 056d2a05a..d775df776 100644 --- a/docker-app/qfieldcloud/core/logging/filters.py +++ b/docker-app/qfieldcloud/core/logging/filters.py @@ -1,5 +1,6 @@ import logging +from django.conf import settings from django.http import HttpRequest @@ -27,3 +28,8 @@ def extra_from_record(self, record): the `LogRecord`. """ return {attr_name: record.__dict__[attr_name] for attr_name in record.__dict__} + + +class NotOnDebugFilter(logging.Filter): + def filter(self, record): + return not settings.DEBUG diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 90621c39a..dc240b5b3 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -297,6 +297,9 @@ "skip_logging": { "()": "qfieldcloud.core.logging.filters.SkipLoggingFilter", }, + "not_on_debug": { + "()": "qfieldcloud.core.logging.filters.NotOnDebugFilter", + }, }, "handlers": { "console.json": { @@ -317,6 +320,7 @@ "level": LOGLEVEL, "filters": [ "skip_logging", + "not_on_debug", ], "handlers": [ # TODO enable console.json once it is clear how we do store the json logs From 984797ec94ca82881a2ee63e0b1363b912c9cba0 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 9 Feb 2022 12:04:51 +0100 Subject: [PATCH 113/185] drop request logging logic altogether (can now be done at nginx level) --- .../qfieldcloud/core/logging/filters.py | 35 ----- .../qfieldcloud/core/logging/formatters.py | 72 ---------- .../core/middleware/request_response_log.py | 133 ------------------ .../qfieldcloud/core/views/status_views.py | 2 - docker-app/qfieldcloud/settings.py | 31 ---- 5 files changed, 273 deletions(-) delete mode 100644 docker-app/qfieldcloud/core/logging/filters.py delete mode 100644 docker-app/qfieldcloud/core/middleware/request_response_log.py diff --git a/docker-app/qfieldcloud/core/logging/filters.py b/docker-app/qfieldcloud/core/logging/filters.py deleted file mode 100644 index d775df776..000000000 --- a/docker-app/qfieldcloud/core/logging/filters.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging - -from django.conf import settings -from django.http import HttpRequest - - -def skip_logging(func): - def wrapper(instance, *args, **kwargs): - request = instance.request - - if not isinstance(request, HttpRequest): - request = request._request - - request.skip_logging = True - - return func(instance, *args, **kwargs) - - return wrapper - - -class SkipLoggingFilter(logging.Filter): - def filter(self, record): - return not getattr(record, "skip_logging", False) - - def extra_from_record(self, record): - """Returns `extra` dict you passed to logger. - The `extra` keyword argument is used to populate the `__dict__` of - the `LogRecord`. - """ - return {attr_name: record.__dict__[attr_name] for attr_name in record.__dict__} - - -class NotOnDebugFilter(logging.Filter): - def filter(self, record): - return not settings.DEBUG diff --git a/docker-app/qfieldcloud/core/logging/formatters.py b/docker-app/qfieldcloud/core/logging/formatters.py index 81d4a0ebd..cbe18fbbb 100644 --- a/docker-app/qfieldcloud/core/logging/formatters.py +++ b/docker-app/qfieldcloud/core/logging/formatters.py @@ -1,8 +1,3 @@ -import datetime -import json -import logging -import traceback - import json_log_formatter from django.core.handlers.wsgi import WSGIRequest from django.core.serializers.json import DjangoJSONEncoder @@ -30,73 +25,6 @@ def to_json(self, record): return self.json_lib.dumps(record, cls=JsonEncoder) -class CustomisedRequestHumanFormatter(logging.Formatter): - def format(self, record): - record.getMessage() - extra = self.extra_from_record(record) - - created = extra.get("created") - if created: - created = datetime.datetime.fromtimestamp(created) - - request_headers = "\n" - for header, value in extra.get("request_headers", {}).items(): - request_headers += f" {header}: {value}\n" - - response_headers = "\n" - for header, value in extra.get("response_headers", {}).items(): - response_headers += f" {header}: {value}\n" - - request_body = ( - extra.get("request_body", "NO_REQUEST_BODY") or "EMPTY_REQUEST_BODY" - ) - if not isinstance(request_body, str): - request_body = json.dumps(request_body, indent=2, cls=JsonEncoder) - - response_body = ( - extra.get("response_body", "NO_RESPONSE_BODY") or "EMPTY_RESPONSE_BODY" - ) - if not isinstance(response_body, str): - response_body = json.dumps(response_body, indent=2, cls=JsonEncoder) - - python_exception = "" - if extra.get("exception"): - exception = extra.get("exception") - tb1 = traceback.TracebackException.from_exception(exception) - exception_str = " ".join(tb1.format()) - python_exception = f"""Exception (ERROR): - {exception_str} -""" - - return f""" -================================================================================ -| HTTP Request -================================================================================ -Request: {extra.get("request_method", "UNKNOWN_REQUEST_METHOD")} {extra.get("request_path", "UNKNOWN_REQUEST_PATH")} {extra.get("status_code", "UNKNOWN_STATUS_CODE")} -Time: {created}; relative - {extra.get("relativeCreated", "UNKNOWN_RELATIVE_CREATED")}; runtime - {extra.get("run_time", "UNKNOWN_RUN_TIME")} -Context: PID #{extra.get("process", "UNKNOWN_PID")}; thread #{extra.get("thread", "UNKNOWN_THREAD")} ({extra.get("threadName", "UNKNOWN_THREAD_NAME")}) -Request headers: {request_headers} -Request files: {", ".join(extra.get("files", [])) or "NO_FILES"} -Request payload: -------------------------------------------------------------------------------S -{request_body} -------------------------------------------------------------------------------E -{python_exception} -Response headers: {response_headers} -Response payload: -------------------------------------------------------------------------------S -{response_body} -------------------------------------------------------------------------------E - """ - - def extra_from_record(self, record): - """Returns `extra` dict you passed to logger. - The `extra` keyword argument is used to populate the `__dict__` of - the `LogRecord`. - """ - return {attr_name: record.__dict__[attr_name] for attr_name in record.__dict__} - - def json_default(obj): if isinstance(obj, WSGIRequest): return str(obj) diff --git a/docker-app/qfieldcloud/core/middleware/request_response_log.py b/docker-app/qfieldcloud/core/middleware/request_response_log.py deleted file mode 100644 index bc064c533..000000000 --- a/docker-app/qfieldcloud/core/middleware/request_response_log.py +++ /dev/null @@ -1,133 +0,0 @@ -""" -Middleware to log all requests and responses. -Uses a logger configured by the name of django.request -to log all requests and responses according to configuration -specified for django.request. - -inspired by https://gist.github.com/SehgalDivij/1ca5c647c710a2c3a0397bce5ec1c1b4 -""" -import json - -# import json -import logging -import os -import socket -import time - -from django.utils.deprecation import MiddlewareMixin - -logger = logging.getLogger("qfieldcloud.request_response_log") - -MAX_RESPONSE_BODY_LENGTH = 1000 -CENSOR_DATA_KEYS = [ - "password", - "token", - "Authorization", -] - - -class RequestResponseLogMiddleware(MiddlewareMixin): - """Request Logging Middleware.""" - - def __init__(self, *args, **kwargs): - """Constructor method.""" - super().__init__(*args, **kwargs) - - def process_request(self, request): - """Set Request Start Time to measure time taken to service request.""" - request.start_time = time.time() - - def extract_log_info(self, request, response=None, exception=None): - """Extract appropriate log info from requests/responses/exceptions.""" - log_data = { - "skip_logging": getattr(request, "skip_logging", False), - "remote_address": request.META["REMOTE_ADDR"], - "server_hostname": socket.gethostname(), - "request_method": request.method, - "files": tuple(dict(request.FILES).keys()), - "request_path": request.get_full_path(), - "request_headers": {**request.headers}, - "run_time": time.time() - request.start_time, - } - - log_data["request_headers"] = self.censor_sensitive_data( - log_data["request_headers"] - ) - - if request.method in ["PUT", "POST", "PATCH"]: - if request.content_type == "application/octet-stream": - log_data["request_body"] = None - else: - log_data["request_body"] = request.POST - log_data["request_body"] = self.censor_sensitive_data( - log_data["request_body"] - ) - - if hasattr(request, "exception"): - log_data["exception"] = request.exception - - if response: - if response.get("content-type") == "application/json": - response_string = "" - if hasattr(response, "data"): - try: - response_string = json.dumps( - response.data, sort_keys=True, indent=1 - ) - except Exception as err: - response_string = str(response.content, "utf-8") - log_data["json_serialize_error"] = str(err) - else: - response_string = str(response.content, "utf-8") - else: - response_string = str(response.content, "utf-8") - - log_data["response_body"] = response_string[:MAX_RESPONSE_BODY_LENGTH] - - if len(response_string) > MAX_RESPONSE_BODY_LENGTH: - log_data["response_trimmed"] = MAX_RESPONSE_BODY_LENGTH - - log_data["response_headers"] = {**response.headers} - log_data["status_code"] = response.status_code - log_data["response_body"] = self.censor_sensitive_data( - log_data["response_body"] - ) - - return log_data - - def censor_sensitive_data(self, data): - # probably needs to be separated for the payload and the headers, but works for now - - if not data: - return "" - - data_copy = data - if isinstance(data, dict): - for key in CENSOR_DATA_KEYS: - if key in data_copy: - # copy only if really needed - if id(data) == id(data_copy): - data_copy = {**data} - - data_copy[key] = "***" - - return data_copy - - def process_response(self, request, response): - """Log data using logger.""" - - # use Django logger only in the development environment. - if request.META.get("SERVER_PORT") != os.environ.get( - "WEB_HTTP_PORT" - ) and request.META.get("SERVER_PORT") != os.environ.get("WEB_HTTPS_PORT"): - log_data = self.extract_log_info(request=request, response=response) - - logger.info(msg="", extra=log_data) - - return response - - def process_exception(self, request, exception): - """Log Exceptions.""" - request.exception = exception - - raise exception diff --git a/docker-app/qfieldcloud/core/views/status_views.py b/docker-app/qfieldcloud/core/views/status_views.py index 5e3cc8c19..92577bc15 100644 --- a/docker-app/qfieldcloud/core/views/status_views.py +++ b/docker-app/qfieldcloud/core/views/status_views.py @@ -3,7 +3,6 @@ from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from qfieldcloud.core import geodb_utils, utils -from qfieldcloud.core.logging.filters import skip_logging from rest_framework import status, views from rest_framework.permissions import AllowAny from rest_framework.response import Response @@ -19,7 +18,6 @@ class APIStatusView(views.APIView): permission_classes = [AllowAny] - @skip_logging def get(self, request): # Try to get the status from the cache results = cache.get("status_results", {}) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index dc240b5b3..28b14454d 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -91,7 +91,6 @@ "django.middleware.clickjacking.XFrameOptionsMiddleware", "django_currentuser.middleware.ThreadLocalUserMiddleware", "auditlog.middleware.AuditlogMiddleware", - "qfieldcloud.core.middleware.request_response_log.RequestResponseLogMiddleware", "qfieldcloud.core.middleware.timezone.TimezoneMiddleware", "axes.middleware.AxesMiddleware", ] @@ -286,50 +285,20 @@ "version": 1, "disable_existing_loggers": True, "formatters": { - "request.human": { - "()": "qfieldcloud.core.logging.formatters.CustomisedRequestHumanFormatter", - }, "json": { "()": "qfieldcloud.core.logging.formatters.CustomisedJSONFormatter", }, }, - "filters": { - "skip_logging": { - "()": "qfieldcloud.core.logging.filters.SkipLoggingFilter", - }, - "not_on_debug": { - "()": "qfieldcloud.core.logging.filters.NotOnDebugFilter", - }, - }, "handlers": { "console.json": { "class": "logging.StreamHandler", "formatter": "json", }, - "console.human": { - "class": "logging.StreamHandler", - "formatter": "request.human", - }, }, "root": { "handlers": ["console.json"], "level": "INFO", }, - "loggers": { - "qfieldcloud.request_response_log": { - "level": LOGLEVEL, - "filters": [ - "skip_logging", - "not_on_debug", - ], - "handlers": [ - # TODO enable console.json once it is clear how we do store the json logs - # 'console.json', - "console.human", - ], - "propagate": False, - }, - }, } DEFAULT_AUTO_FIELD = "django.db.models.AutoField" From e21aa69b2f5d7650f007f6131f185705cd260b02 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 9 Feb 2022 10:09:03 +0100 Subject: [PATCH 114/185] include debugpy in debug builds to allow remote debugging then add the following in you script to allow remote connection import debugpy debugpy.listen(("0.0.0.0", 5678)) print("Waiting for client...") debugpy.wait_for_client() --- docker-app/Dockerfile | 4 ++++ docker-compose.override.dev.yml | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/docker-app/Dockerfile b/docker-app/Dockerfile index 1096cf50f..09e5994a8 100644 --- a/docker-app/Dockerfile +++ b/docker-app/Dockerfile @@ -20,6 +20,10 @@ RUN apt update \ COPY ./requirements.txt . RUN pip install -r requirements.txt +# install debug dependencies +ARG DEBUG_BUILD +RUN if [ "$DEBUG_BUILD" = "1" ]; then pip install debugpy; fi + # copy project COPY . . diff --git a/docker-compose.override.dev.yml b/docker-compose.override.dev.yml index 0e60c7995..f9f64ad3e 100644 --- a/docker-compose.override.dev.yml +++ b/docker-compose.override.dev.yml @@ -5,6 +5,12 @@ services: app: depends_on: - geodb + ports: + # debugpy + - "5678:5678" + build: + args: + DEBUG_BUILD: ${DEBUG} geodb: image: postgis/postgis:12-3.0 From e1e5f2fd52b95a7c7269b1d249c396d6104ba7ed Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 9 Feb 2022 12:33:08 +0100 Subject: [PATCH 115/185] add debug() util --- docker-app/qfieldcloud/utils.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 docker-app/qfieldcloud/utils.py diff --git a/docker-app/qfieldcloud/utils.py b/docker-app/qfieldcloud/utils.py new file mode 100644 index 000000000..7d324a247 --- /dev/null +++ b/docker-app/qfieldcloud/utils.py @@ -0,0 +1,32 @@ +import debugpy + + +def debug(wait=True): + """Starts the debugpy server. + + In your code, add: + ``` + from qfieldcloud.utils import debug + debug() + ``` + + In VSCode, add the following launch configuration, then connect with F5: + ``` + { + "name": "QFieldCloud - Remote attach", + "type": "python", + "request": "attach", + "connect": {"host": "localhost", "port": 5678}, + "pathMappings": [{ + "localRoot": "${workspaceFolder}/docker-app/qfieldcloud", + "remoteRoot": "/usr/src/app/qfieldcloud" + }] + } + ``` + """ + + print("Starting debugging server... 🐛") + debugpy.listen(("0.0.0.0", 5678)) + if wait: + print("Waiting for debugger to connect... 🕰️") + debugpy.wait_for_client() From 29b9686d8e59364b889ac49279976cbc80e5ed28 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 9 Feb 2022 14:11:53 +0100 Subject: [PATCH 116/185] remove debug() util but document readme instead --- README.md | 34 +++++++++++++++++++++++++++++++++ docker-app/qfieldcloud/utils.py | 32 ------------------------------- 2 files changed, 34 insertions(+), 32 deletions(-) delete mode 100644 docker-app/qfieldcloud/utils.py diff --git a/README.md b/README.md index cd50cbf38..05c4545be 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,40 @@ To run only a test module (e.g. `test_permission.py`) docker-compose run app python manage.py test qfieldcloud.core.tests.test_permission +### Debugging + +> This section gives examples for VSCode, please adapt to your IDE) + +If using the provided docker-compose overrides for developement, `debugpy` is installed. + +You can debug interactively by adding this snipped anywhere in the code. +```python +import debugpy +debugpy.listen(("0.0.0.0", 5678)) +print("debugpy waiting for debugger... 🐛") +debugpy.wait_for_client() # optional +``` + +Then, configure your IDE to connect (example given for VSCode's `.vscode/launch.json`, triggered with `F5`): +``` +{ + "version": "0.2.0", + "configurations": [ + { + "name": "QFieldCloud - Remote attach", + "type": "python", + "request": "attach", + "connect": {"host": "localhost", "port": 5678}, + "pathMappings": [{ + "localRoot": "${workspaceFolder}/docker-app/qfieldcloud", + "remoteRoot": "/usr/src/app/qfieldcloud" + }] + } + ] +} +``` + + ## Add root certificate QFieldCloud will automatically generate a certificate and it's root certificate in `./config/nginx/certs`. However, you need to trust the root certificate first, so other programs (e.g. curl) can create secure connection to the local QFieldCloud instance. diff --git a/docker-app/qfieldcloud/utils.py b/docker-app/qfieldcloud/utils.py deleted file mode 100644 index 7d324a247..000000000 --- a/docker-app/qfieldcloud/utils.py +++ /dev/null @@ -1,32 +0,0 @@ -import debugpy - - -def debug(wait=True): - """Starts the debugpy server. - - In your code, add: - ``` - from qfieldcloud.utils import debug - debug() - ``` - - In VSCode, add the following launch configuration, then connect with F5: - ``` - { - "name": "QFieldCloud - Remote attach", - "type": "python", - "request": "attach", - "connect": {"host": "localhost", "port": 5678}, - "pathMappings": [{ - "localRoot": "${workspaceFolder}/docker-app/qfieldcloud", - "remoteRoot": "/usr/src/app/qfieldcloud" - }] - } - ``` - """ - - print("Starting debugging server... 🐛") - debugpy.listen(("0.0.0.0", 5678)) - if wait: - print("Waiting for debugger to connect... 🕰️") - debugpy.wait_for_client() From bac9c6caf8ade1cd502e48f5fde7d5c630ddfacc Mon Sep 17 00:00:00 2001 From: Olivier Dalang Date: Wed, 9 Feb 2022 14:26:26 +0100 Subject: [PATCH 117/185] Also document command line usage of debugpy --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index 05c4545be..4e0899f3b 100644 --- a/README.md +++ b/README.md @@ -92,6 +92,11 @@ print("debugpy waiting for debugger... 🐛") debugpy.wait_for_client() # optional ``` +Or alternativley, prefix your commands with `python -m debugpy --listen 0.0.0.0:5678`. +```shell +docker-compose run app python -m debugpy --listen 0.0.0.0:5678 manage.py test +``` + Then, configure your IDE to connect (example given for VSCode's `.vscode/launch.json`, triggered with `F5`): ``` { From d65ef44f5e6668d5aaf92f5ab15d649e9b41c4fd Mon Sep 17 00:00:00 2001 From: Olivier Dalang Date: Wed, 9 Feb 2022 16:02:46 +0100 Subject: [PATCH 118/185] Improve how the API handles exceptions (#257) Improve DRF exception handler - rethrow unexpected exceptions in tests (so we get the actual trace to the console instead of uninformative error 500) - don't expose debug details when DEBUG=false - only log unexpected errors with logger.exception (the other are logged with logger.info, as they are not errors - also we now log the original exception instead of the constructed QFieldCloudException) --- docker-app/qfieldcloud/core/rest_utils.py | 18 ++++++++++++++---- docker-app/qfieldcloud/core/tests/test_user.py | 1 + docker-app/qfieldcloud/settings.py | 2 ++ docker-app/qfieldcloud/testing.py | 11 +++++++++++ 4 files changed, 28 insertions(+), 4 deletions(-) create mode 100644 docker-app/qfieldcloud/testing.py diff --git a/docker-app/qfieldcloud/core/rest_utils.py b/docker-app/qfieldcloud/core/rest_utils.py index e5683088e..19dfc5d0e 100644 --- a/docker-app/qfieldcloud/core/rest_utils.py +++ b/docker-app/qfieldcloud/core/rest_utils.py @@ -1,7 +1,9 @@ import logging +from django.conf import settings from django.core import exceptions from qfieldcloud.core import exceptions as qfieldcloud_exceptions +from qfieldcloud.testing import IN_TEST_SUITE from rest_framework import exceptions as rest_exceptions from rest_framework.response import Response @@ -9,6 +11,7 @@ def exception_handler(exc, context): + if isinstance(exc, qfieldcloud_exceptions.QFieldCloudException): pass elif isinstance(exc, rest_exceptions.AuthenticationFailed): @@ -24,21 +27,28 @@ def exception_handler(exc, context): elif isinstance(exc, exceptions.ValidationError): exc = qfieldcloud_exceptions.ValidationError(detail=str(exc)) else: + # When running tests, we rethrow the exception, so we get a full trace to + # help with debugging + if IN_TEST_SUITE: + raise exc + logging.exception(exc) exc = qfieldcloud_exceptions.QFieldCloudException(detail=str(exc)) body = { "code": exc.code, "message": exc.message, - "debug": { + } + + if settings.DEBUG: + body["debug"] = { "view": str(context["view"]), "args": context["args"], "kwargs": context["kwargs"], "request": str(context["request"]), "detail": exc.detail, - }, - } + } - logging.exception(exc) + logging.info(exc) return Response( body, diff --git a/docker-app/qfieldcloud/core/tests/test_user.py b/docker-app/qfieldcloud/core/tests/test_user.py index 9e9c60091..d79c5da08 100644 --- a/docker-app/qfieldcloud/core/tests/test_user.py +++ b/docker-app/qfieldcloud/core/tests/test_user.py @@ -72,6 +72,7 @@ def tearDown(self): self.client.credentials() def test_login(self): + response = self.client.post( "/api/v1/auth/login/", {"username": "user1", "password": "abc123"} ) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 28b14454d..dd87fd717 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -280,6 +280,8 @@ INVITATIONS_ACCEPT_INVITE_AFTER_SIGNUP = True INVITATIONS_GONE_ON_ACCEPT_ERROR = False +TEST_RUNNER = "qfieldcloud.testing.QfcTestSuiteRunner" + LOGLEVEL = os.environ.get("LOGLEVEL", "DEBUG").upper() LOGGING = { "version": 1, diff --git a/docker-app/qfieldcloud/testing.py b/docker-app/qfieldcloud/testing.py new file mode 100644 index 000000000..7869a31dc --- /dev/null +++ b/docker-app/qfieldcloud/testing.py @@ -0,0 +1,11 @@ +from django.test.runner import DiscoverRunner + +# Whether we are currently running tests +IN_TEST_SUITE = False + + +class QfcTestSuiteRunner(DiscoverRunner): + def __init__(self, *args, **kwargs): + global IN_TEST_SUITE + IN_TEST_SUITE = True + super().__init__(*args, **kwargs) From 65360c11c750b98aa0d2308aab3326518293cd54 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Feb 2022 08:13:12 +0000 Subject: [PATCH 119/185] Bump django from 3.2.11 to 3.2.12 in /docker-app Bumps [django](https://github.com/django/django) from 3.2.11 to 3.2.12. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/3.2.11...3.2.12) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- docker-app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/requirements.txt b/docker-app/requirements.txt index 8985a06ca..618596930 100644 --- a/docker-app/requirements.txt +++ b/docker-app/requirements.txt @@ -17,7 +17,7 @@ coverage==5.3 cryptography==36.0.1 defusedxml==0.7.1 Deprecated==1.2.13 -Django==3.2.11 +Django==3.2.12 django-allauth==0.44.0 django-auditlog==1.0a1 django-axes==5.28.0 From e008b887887bfa052ef3088ac56b83171b774254 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 8 Feb 2022 15:20:09 +0200 Subject: [PATCH 120/185] When a file is uploaded to a project, start a new process_projectfile job This way the order in which files are uploaded is not that important anymore. To save extra unnecessary process jobs, DCIM dir is ignored, since it consists of staticfiles only. --- docker-app/qfieldcloud/core/models.py | 27 +++++++++++++++++-- docker-app/qfieldcloud/core/utils2/storage.py | 17 ++++++++++++ .../qfieldcloud/core/views/files_views.py | 10 ++++--- 3 files changed, 49 insertions(+), 5 deletions(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 757f4ae27..4c7e14cac 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -4,7 +4,7 @@ import uuid from datetime import timedelta from enum import Enum -from typing import Any, Iterable, Type +from typing import Any, Iterable, List, Type import qfieldcloud.core.utils2.storage from auditlog.registry import auditlog @@ -913,7 +913,30 @@ def storage_size(self): return utils.get_s3_project_size(self.id) @property - def private(self): + def staticfile_dirs(self) -> List[str]: + """Returns a list of configured staticfile dirs for the project. + + Staticfile dir is a special directory in the QField infrastructure that holds static files + such as images, pdf etc. By default "DCIM" is considered a staticfile directory. + + TODO this function expects whether `staticfile_dirs` key in project_details. However, + neither the extraction from the projectfile, nor the configuration in QFieldSync are implemented. + + Returns: + List[str]: A list configured staticfile dirs for the project. + """ + staticfile_dirs = [] + + if self.project_details and self.project_details.get("staticfile_dirs"): + staticfile_dirs = self.project_details.get("staticfile_dirs", []) + + if not staticfile_dirs: + staticfile_dirs = ["DCIM"] + + return staticfile_dirs + + @property + def private(self) -> bool: # still used in the project serializer return not self.is_public diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index 62cb13f9f..df969174a 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -9,6 +9,23 @@ logger = logging.getLogger(__name__) +def staticfile_prefix(project: "Project", filename: str) -> str: # noqa: F821 + """Returns the staticfile dir where the file belongs to or empty string if it does not. + + Args: + project (Project): project to check + filename (str): filename to check + + Returns: + str: the staticfile dir or empty string if no match found + """ + for staticfile_dir in project.staticfile_dirs: + if filename.startswith(staticfile_dir): + return staticfile_dir + + return "" + + def upload_user_avatar(user: "User", file: IO, mimetype: str) -> str: # noqa: F821 """Uploads a picture as a user avatar. diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index 22652ca6f..fb87f80b7 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -6,7 +6,7 @@ from qfieldcloud.core.models import ProcessProjectfileJob, Project from qfieldcloud.core.utils import S3ObjectVersion, get_project_file_with_versions from qfieldcloud.core.utils2.audit import LogEntry, audit -from qfieldcloud.core.utils2.storage import purge_old_file_versions +from qfieldcloud.core.utils2.storage import purge_old_file_versions, staticfile_prefix from rest_framework import permissions, status, views from rest_framework.parsers import MultiPartParser from rest_framework.response import Response @@ -171,8 +171,12 @@ def post(self, request, projectid, filename, format=None): assert new_object - if is_qgis_project_file: - project.project_filename = filename + if staticfile_prefix(project, filename) == "" and ( + is_qgis_project_file or project.project_filename is not None + ): + if is_qgis_project_file: + project.project_filename = filename + ProcessProjectfileJob.objects.create( project=project, created_by=self.request.user ) From 00638cb62dd47d011dc7a843efcc92e46d7ae963 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 11 Feb 2022 04:21:43 +0200 Subject: [PATCH 121/185] Reenable logs (e.g. when server error occurred) --- docker-app/qfieldcloud/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index dd87fd717..4a0557d55 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -285,7 +285,7 @@ LOGLEVEL = os.environ.get("LOGLEVEL", "DEBUG").upper() LOGGING = { "version": 1, - "disable_existing_loggers": True, + "disable_existing_loggers": False, "formatters": { "json": { "()": "qfieldcloud.core.logging.formatters.CustomisedJSONFormatter", From 55a38d1b0002a1ffc2fd67d388f4d99178545552 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 11 Feb 2022 07:55:31 +0200 Subject: [PATCH 122/185] If object storage (s3) returns non-ok status, show 404 --- conf/nginx/pages/404.html | 12 ++++++++++++ conf/nginx/templates/default.conf.template | 4 ++++ docker-compose.yml | 1 + 3 files changed, 17 insertions(+) create mode 100644 conf/nginx/pages/404.html diff --git a/conf/nginx/pages/404.html b/conf/nginx/pages/404.html new file mode 100644 index 000000000..8aa91d829 --- /dev/null +++ b/conf/nginx/pages/404.html @@ -0,0 +1,12 @@ + + + + + + + 404 Not Found + + +

404 Not Found

+ + diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index 8fc80c330..3f715efaa 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -102,7 +102,11 @@ server { proxy_hide_header X-Amz-Storage-Class; proxy_hide_header X-Amz-Version-Id; + proxy_intercept_errors on; + proxy_pass $redirect_uri; + + error_page 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 500 501 502 503 504 505 =404 /pages/404.html; } } diff --git a/docker-compose.yml b/docker-compose.yml index 1dea8e89d..c966bb654 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -86,6 +86,7 @@ services: volumes: - static_volume:/var/www/html/staticfiles/ - media_volume:/var/www/html/mediafiles/ + - ./conf/nginx/pages/:/var/www/html/pages/ - ./conf/nginx/templates/:/etc/nginx/templates/ - ./conf/nginx/certs/:/etc/nginx/certs/:ro - ./conf/nginx/options-ssl-nginx.conf:/etc/nginx/options-ssl-nginx.conf From fdc01cb21afa27ecf0c39a89d45d9ff333d29197 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 11 Feb 2022 07:56:15 +0200 Subject: [PATCH 123/185] Automatically upgrade http to https on non-standard ports --- conf/nginx/templates/default.conf.template | 2 ++ 1 file changed, 2 insertions(+) diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index 3f715efaa..1f2619516 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -35,6 +35,8 @@ server { # path for static files (only needed for serving local staticfiles) root /var/www/html/; + error_page 497 https://$host:${WEB_HTTPS_PORT}$request_uri; + # checks for static file, if not found proxy to app location / { try_files $uri @proxy_to_app; From 0af7fc67792b913b1c59820ffa032d4cf430122b Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Wed, 9 Feb 2022 15:39:49 +0100 Subject: [PATCH 124/185] [COSMETICS] don't use signal for same-app behavior (django recommends to only use signals for loosely coupled behavior, e.g. from a different optional django app, to keep logic more readable) --- docker-app/qfieldcloud/core/models.py | 66 +++++++++++---------------- 1 file changed, 26 insertions(+), 40 deletions(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 757f4ae27..4c875a3ea 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -4,7 +4,7 @@ import uuid from datetime import timedelta from enum import Enum -from typing import Any, Iterable, Type +from typing import Iterable import qfieldcloud.core.utils2.storage from auditlog.registry import auditlog @@ -17,8 +17,6 @@ from django.db.models import When from django.db.models.aggregates import Count from django.db.models.fields.json import JSONField -from django.db.models.signals import post_delete, post_save, pre_delete -from django.dispatch import receiver from django.urls import reverse_lazy from django.utils.translation import gettext as _ from model_utils.managers import InheritanceManager @@ -360,12 +358,16 @@ def username_with_full_name(self) -> str: def has_geodb(self) -> bool: return hasattr(self, "geodb") + def save(self, *args, **kwargs): + created = self._state.adding + super().save(*args, **kwargs) + if created: + UserAccount.objects.create(user=self) -# Automatically create a UserAccount instance when a user is created. -@receiver(post_save, sender=User) -def create_account_for_user(sender, instance, created, **kwargs): - if created: - UserAccount.objects.create(user=instance) + def delete(self, *args, **kwargs): + if self.user_type != User.TYPE_TEAM: + qfieldcloud.core.utils2.storage.remove_user_avatar(self) + super().delete(*args, **kwargs) class UserAccount(models.Model): @@ -488,6 +490,18 @@ def __str__(self): self.user.username, self.dbname, self.username ) + def save(self, *args, **kwargs): + created = self._state.adding + super().save(*args, **kwargs) + # Automatically create a role and database when a Geodb object is created. + if created: + geodb_utils.create_role_and_db(self) + + def delete(self, *args, **kwargs): + super().delete(*args, **kwargs) + # Automatically delete role and database when a Geodb object is deleted. + geodb_utils.delete_db_and_role(self.dbname, self.username) + class OrganizationQueryset(models.QuerySet): """Adds of_user(user) method to the organization's querysets, allowing to filter only organization related to that user. @@ -568,18 +582,6 @@ def with_roles(self, user): return self.get_queryset().with_roles(user) -# Automatically create a role and database when a Geodb object is created. -@receiver(post_save, sender=Geodb) -def create_geodb(sender, instance, created, **kwargs): - if created: - geodb_utils.create_role_and_db(instance) - - -@receiver(post_delete, sender=Geodb) -def delete_geodb(sender, instance, **kwargs): - geodb_utils.delete_db_and_role(instance.dbname, instance.username) - - class Organization(User): objects = OrganizationManager() @@ -599,21 +601,6 @@ def save(self, *args, **kwargs): return super().save(*args, **kwargs) -@receiver(post_save, sender=Organization) -def create_account_for_organization(sender, instance, created, **kwargs): - if created: - UserAccount.objects.create(user=instance) - - -@receiver(pre_delete, sender=User) -@receiver(pre_delete, sender=Organization) -def delete_user(sender: Type[User], instance: User, **kwargs: Any) -> None: - if instance.user_type == User.TYPE_TEAM: - return - - qfieldcloud.core.utils2.storage.remove_user_avatar(instance) - - class OrganizationMember(models.Model): class Roles(models.TextChoices): ADMIN = "admin", _("Admin") @@ -981,11 +968,10 @@ def status(self) -> Status: else: return Project.Status.OK - -@receiver(pre_delete, sender=Project) -def delete_project(sender: Type[Project], instance: Project, **kwargs: Any) -> None: - if instance.thumbnail_uri: - qfieldcloud.core.utils2.storage.remove_project_thumbail(instance) + def delete(self, *args, **kwargs): + if self.thumbnail_uri: + qfieldcloud.core.utils2.storage.remove_project_thumbail(self) + super().delete(*args, **kwargs) class ProjectCollaborator(models.Model): From 001eb840eeed72a3386b0e23cbac395bc6ee4af2 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 10 Feb 2022 17:40:46 +0100 Subject: [PATCH 125/185] [COSMETICS] use COMPOSE_FILE in .env to shorten docker-compose commands --- .env.example | 5 +++++ README.md | 21 ++++++++------------- docker-compose.override.prod.yml | 1 - scripts/check_envvars.py | 8 +++++++- 4 files changed, 20 insertions(+), 15 deletions(-) delete mode 100644 docker-compose.override.prod.yml diff --git a/.env.example b/.env.example index e59d7aeb5..e7bfb94f4 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,5 @@ +COMPOSE_FILE=docker-compose.yml:docker-compose.override.local.yml + DEBUG=1 QFIELDCLOUD_HOST=localhost @@ -65,3 +67,6 @@ GUNICORN_TIMEOUT_S=300 GUNICORN_MAX_REQUESTS=300 GUNICORN_WORKERS=3 GUNICORN_THREADS=3 + +# required for making COMPOSE_FILE above cross-platform (do not change) +COMPOSE_PATH_SEPARATOR=: diff --git a/README.md b/README.md index 4e0899f3b..e727938d2 100644 --- a/README.md +++ b/README.md @@ -39,16 +39,12 @@ desire with a good editor: cp .env.example .env emacs .env -Link or copy `docker-compose.override.local.yml` into `docker-compose.override.yml`: - - ln -s docker-compose.override.local.yml docker-compose.override.yml - To build development images and run the containers: docker-compose up -d --build -It will read `docker-compose.yml` and `docker-compose.override.yml` -and start a django built-in server at `http://localhost:8000`. +It will read the `docker-compose*.yml` files specified in the `COMPOSE_FILE` +variable and start a django built-in server at `http://localhost:8000`. Run the django database migrations. @@ -184,21 +180,20 @@ desire with a good editor cp .env.example .env emacs .env +Do not forget to set DEBUG=0 and to adapt COMPOSE_FILE to not load local +development configurations. + Create the directory for qfieldcloud logs and supervisor socket file mkdir /var/local/qfieldcloud Run and build the docker containers - # dev server: - docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml up -d --build - - # prod server - docker-compose -f docker-compose.yml -f docker-compose.override.prod.yml up -d --build + docker-compose up -d --build Run the django database migrations - docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml exec app python manage.py migrate + docker-compose exec app python manage.py migrate ## Create a certificate using Let's Encrypt @@ -233,7 +228,7 @@ Based on this example Docker logs are managed by docker in the default way. To read the logs: - docker-compose -f docker-compose.yml -f docker-compose.override.dev.yml logs + docker-compose logs ### Geodb diff --git a/docker-compose.override.prod.yml b/docker-compose.override.prod.yml deleted file mode 100644 index 1e5a90c5a..000000000 --- a/docker-compose.override.prod.yml +++ /dev/null @@ -1 +0,0 @@ -version: '3.7' diff --git a/scripts/check_envvars.py b/scripts/check_envvars.py index 6293b881c..a5fcf9fe9 100755 --- a/scripts/check_envvars.py +++ b/scripts/check_envvars.py @@ -18,7 +18,13 @@ def get_env_varnames_from_envfile(filename: str) -> Set[str]: if len(line.strip()) == 0: continue - result.add(line.strip().split("=")[0]) + variable_name = line.strip().split("=")[0] + + # not settings + if variable_name in ["COMPOSE_FILE", "COMPOSE_PATH_SEPARATOR"]: + continue + + result.add(variable_name) return result From b6cca1054895534d674b53772d1723c498228891 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 14 Feb 2022 16:44:05 +0200 Subject: [PATCH 126/185] Add special handling for 404 and 403 and all the rest should be 500 --- conf/nginx/pages/403.html | 12 ++++++++++++ conf/nginx/pages/500.html | 12 ++++++++++++ conf/nginx/templates/default.conf.template | 4 +++- 3 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 conf/nginx/pages/403.html create mode 100644 conf/nginx/pages/500.html diff --git a/conf/nginx/pages/403.html b/conf/nginx/pages/403.html new file mode 100644 index 000000000..0da8e2cd1 --- /dev/null +++ b/conf/nginx/pages/403.html @@ -0,0 +1,12 @@ + + + + + + + 403 Forbidden + + +

403 Forbidden

+ + diff --git a/conf/nginx/pages/500.html b/conf/nginx/pages/500.html new file mode 100644 index 000000000..02645e68b --- /dev/null +++ b/conf/nginx/pages/500.html @@ -0,0 +1,12 @@ + + + + + + + 500 Internal Server Error + + +

500 Internal Server Error

+ + diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index 1f2619516..cd5c8be12 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -108,7 +108,9 @@ server { proxy_pass $redirect_uri; - error_page 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 500 501 502 503 504 505 =404 /pages/404.html; + error_page 404 =404 /pages/404.html; + error_page 403 =403 /pages/403.html; + error_page 401 402 405 406 407 408 409 410 411 412 413 414 415 416 417 500 501 502 503 504 505 =500 /pages/500.html; } } From 0c9ec1747de2c2758fa3fd0946ed4149f55e7ac8 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 11 Feb 2022 07:05:25 +0200 Subject: [PATCH 127/185] Download file improvements, hiding the s3 urls; No more redirecting fallback All files from s3 (except packages) are now served either using the nginx X-Acccel-Redirect trick, or directly served by django in DEBUG mode. This way we got rid of the so problematic redirect that was breaking pretty much every client. Also, the `files/meta/` endpoint looks a bit redundant, but it all started with the `files/` API being so unflexible. Too late to change now. --- docker-app/qfieldcloud/core/models.py | 11 +- .../qfieldcloud/core/tests/test_delta.py | 11 +- .../qfieldcloud/core/tests/test_qgis_file.py | 137 ++++++------------ docker-app/qfieldcloud/core/urls.py | 10 ++ .../qfieldcloud/core/utils2/__init__.py | 6 + docker-app/qfieldcloud/core/utils2/jobs.py | 46 +++--- docker-app/qfieldcloud/core/utils2/storage.py | 68 +++++++++ .../qfieldcloud/core/views/files_views.py | 63 ++++---- 8 files changed, 203 insertions(+), 149 deletions(-) create mode 100644 docker-app/qfieldcloud/core/utils2/__init__.py diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 4c7e14cac..6d7ebab53 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -23,7 +23,6 @@ from django.utils.translation import gettext as _ from model_utils.managers import InheritanceManager from qfieldcloud.core import geodb_utils, utils, validators -from qfieldcloud.core.utils import get_s3_object_url from timezone_field import TimeZoneField # http://springmeblog.com/2018/how-to-implement-multiple-user-types-with-django/ @@ -420,7 +419,10 @@ class UserAccount(models.Model): @property def avatar_url(self): if self.avatar_uri: - return get_s3_object_url(self.avatar_uri) + return reverse_lazy( + "public_files", + kwargs={"filename": self.avatar_uri}, + ) else: return None @@ -895,7 +897,10 @@ class Meta: @property def thumbnail_url(self): if self.thumbnail_uri: - return get_s3_object_url(self.thumbnail_uri) + return reverse_lazy( + "project_metafiles", + kwargs={"projectid": self.id, "filename": self.thumbnail_uri[51:]}, + ) else: return None diff --git a/docker-app/qfieldcloud/core/tests/test_delta.py b/docker-app/qfieldcloud/core/tests/test_delta.py index 851fc2179..572bc2d4f 100644 --- a/docker-app/qfieldcloud/core/tests/test_delta.py +++ b/docker-app/qfieldcloud/core/tests/test_delta.py @@ -4,9 +4,8 @@ import time import fiona -import requests import rest_framework -from django.http.response import HttpResponse, HttpResponseRedirect +from django.http.response import FileResponse, HttpResponse from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core import utils from qfieldcloud.core.models import Job, Project, ProjectCollaborator, User @@ -590,13 +589,13 @@ def test_change_and_delete_pushed_only_features(self): def get_file_contents(self, project, filename): response = self.client.get(f"/api/v1/files/{project.id}/{filename}/") - if isinstance(response, HttpResponseRedirect): - response = requests.get(response.url) - self.assertTrue(status.is_success(response.status_code)) self.assertEqual(get_filename(response), filename) - return response.content + if isinstance(response, FileResponse): + return b"".join(response.streaming_content) + else: + return response.content def upload_deltas(self, project, delta_filename): delta_file = testdata_path(f"delta/deltas/{delta_filename}") diff --git a/docker-app/qfieldcloud/core/tests/test_qgis_file.py b/docker-app/qfieldcloud/core/tests/test_qgis_file.py index c131995c7..21b428174 100644 --- a/docker-app/qfieldcloud/core/tests/test_qgis_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qgis_file.py @@ -1,12 +1,11 @@ -import filecmp import io import logging import tempfile import time +from pathlib import PurePath -import requests from django.core.management import call_command -from django.http.response import HttpResponseRedirect +from django.http import FileResponse from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core import utils from qfieldcloud.core.models import Project, User, UserAccount @@ -49,6 +48,21 @@ def tearDown(self): # Remove credentials self.client.credentials() + def get_file_contents(self, project, filename, version=None): + qs = "" + if version: + qs = f"?version={version}" + + response = self.client.get(f"/api/v1/files/{project.id}/{filename}/{qs}") + + self.assertTrue(status.is_success(response.status_code)) + self.assertEqual(get_filename(response), PurePath(filename).name) + + if isinstance(response, FileResponse): + return b"".join(response.streaming_content) + else: + return response.content + def test_push_file(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -85,24 +99,10 @@ def test_push_download_file(self): self.assertTrue(status.is_success(response.status_code)) self.assertEqual(Project.objects.get(pk=self.project1.pk).files_count, 1) - # Pull the file - response = self.client.get(f"/api/v1/files/{self.project1.id}/file.txt/") - - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file.txt"))) + self.assertEqual( + self.get_file_contents(self.project1, "file.txt"), + open(testdata_path("file.txt"), "rb").read(), + ) def test_push_download_file_with_path(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -124,24 +124,17 @@ def test_push_download_file_with_path(self): # Pull the file response = self.client.get( - f"/api/v1/files/{self.project1.id}/foo/bar/file.txt/" + f"/api/v1/files/{self.project1.id}/foo/bar/file.txt/", + stream=True, ) - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "foo/bar/file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) + self.assertEqual(get_filename(response), "file.txt") - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file.txt"))) + self.assertEqual( + self.get_file_contents(self.project1, "foo/bar/file.txt"), + open(testdata_path("file.txt"), "rb").read(), + ) def test_push_list_file(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) @@ -308,25 +301,15 @@ def test_push_download_specific_version_file(self): self.assertTrue(status.is_success(response.status_code)) self.assertEqual(Project.objects.get(pk=self.project1.pk).files_count, 1) - # Pull the last file (without version parameter) - response = self.client.get(f"/api/v1/files/{self.project1.id}/file.txt/") - - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) + self.assertNotEqual( + self.get_file_contents(self.project1, "file.txt"), + open(testdata_path("file.txt"), "rb").read(), + ) - self.assertFalse(filecmp.cmp(temp_file.name, testdata_path("file.txt"))) - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file2.txt"))) + self.assertEqual( + self.get_file_contents(self.project1, "file.txt"), + open(testdata_path("file2.txt"), "rb").read(), + ) # List files response = self.client.get("/api/v1/files/{}/".format(self.project1.id)) @@ -337,49 +320,21 @@ def test_push_download_specific_version_file(self): ) # Pull the oldest version - response = self.client.get( - f"/api/v1/files/{self.project1.id}/file.txt/", - {"version": versions[0]["version_id"]}, + self.assertEqual( + self.get_file_contents( + self.project1, "file.txt", versions[0]["version_id"] + ), + open(testdata_path("file.txt"), "rb").read(), ) - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file.txt"))) - # Pull the newest version - response = self.client.get( - f"/api/v1/files/{self.project1.id}/file.txt/", - {"version": versions[1]["version_id"]}, + self.assertEqual( + self.get_file_contents( + self.project1, "file.txt", versions[1]["version_id"] + ), + open(testdata_path("file2.txt"), "rb").read(), ) - self.assertIsInstance(response, HttpResponseRedirect) - - response = requests.get(response.url, stream=True) - - self.assertTrue(status.is_success(response.status_code)) - self.assertEqual(get_filename(response), "file.txt") - - temp_file = tempfile.NamedTemporaryFile() - - with open(temp_file.name, "wb") as f: - for chunk in response.iter_content(): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - - self.assertTrue(filecmp.cmp(temp_file.name, testdata_path("file2.txt"))) - def test_push_delete_file(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) diff --git a/docker-app/qfieldcloud/core/urls.py b/docker-app/qfieldcloud/core/urls.py index 51f7eb6b0..a975caf02 100644 --- a/docker-app/qfieldcloud/core/urls.py +++ b/docker-app/qfieldcloud/core/urls.py @@ -62,6 +62,16 @@ files_views.DownloadPushDeleteFileView.as_view(), name="project_file_download", ), + path( + "files/meta//", + files_views.ProjectMetafilesView.as_view(), + name="project_metafiles", + ), + path( + "files/public/", + files_views.PublicFilesView.as_view(), + name="public_files", + ), path( "packages//latest/", package_views.LatestPackageView.as_view(), diff --git a/docker-app/qfieldcloud/core/utils2/__init__.py b/docker-app/qfieldcloud/core/utils2/__init__.py new file mode 100644 index 000000000..dfd92040c --- /dev/null +++ b/docker-app/qfieldcloud/core/utils2/__init__.py @@ -0,0 +1,6 @@ +import qfieldcloud.core.utils2.audit as audit +import qfieldcloud.core.utils2.db as db +import qfieldcloud.core.utils2.jobs as jobs +import qfieldcloud.core.utils2.storage as storage + +__all__ = ["audit", "db", "jobs", "storage"] diff --git a/docker-app/qfieldcloud/core/utils2/jobs.py b/docker-app/qfieldcloud/core/utils2/jobs.py index 14303370c..4d4bb40dd 100644 --- a/docker-app/qfieldcloud/core/utils2/jobs.py +++ b/docker-app/qfieldcloud/core/utils2/jobs.py @@ -1,36 +1,40 @@ import logging -from typing import Optional +from typing import List, Optional +import qfieldcloud.core.models as models from django.db.models import Q from qfieldcloud.core import exceptions -from qfieldcloud.core.models import ApplyJob, Delta, Job, PackageJob, Project, User logger = logging.getLogger(__name__) def apply_deltas( - project, user, project_file, overwrite_conflicts, delta_ids=None -) -> Optional[ApplyJob]: + project: "models.Project", + user: "models.User", + project_file: str, + overwrite_conflicts: bool, + delta_ids: List[str] = None, +) -> Optional["models.ApplyJob"]: """Apply a deltas""" logger.info( f"Requested apply_deltas on {project} with {project_file}; overwrite_conflicts: {overwrite_conflicts}; delta_ids: {delta_ids}" ) - apply_jobs = ApplyJob.objects.filter( + apply_jobs = models.ApplyJob.objects.filter( project=project, status=[ - Job.Status.PENDING, - Job.Status.QUEUED, + models.Job.Status.PENDING, + models.Job.Status.QUEUED, ], ) if len(apply_jobs) > 0: return apply_jobs[0] - pending_deltas = Delta.objects.filter( + pending_deltas = models.Delta.objects.filter( project=project, - last_status=Delta.Status.PENDING, + last_status=models.Delta.Status.PENDING, ) if delta_ids is not None: @@ -39,7 +43,7 @@ def apply_deltas( if len(pending_deltas) == 0: return None - apply_job = ApplyJob.objects.create( + apply_job = models.ApplyJob.objects.create( project=project, created_by=user, overwrite_conflicts=overwrite_conflicts, @@ -48,7 +52,7 @@ def apply_deltas( return apply_job -def repackage(project: Project, user: User) -> PackageJob: +def repackage(project: "models.Project", user: "models.User") -> "models.PackageJob": """Returns an unfinished or freshly created package job. Checks if there is already an unfinished package job and returns it, @@ -59,20 +63,22 @@ def repackage(project: Project, user: User) -> PackageJob: # Check if active package job already exists query = Q(project=project) & ( - Q(status=PackageJob.Status.PENDING) - | Q(status=PackageJob.Status.QUEUED) - | Q(status=PackageJob.Status.STARTED) + Q(status=models.PackageJob.Status.PENDING) + | Q(status=models.PackageJob.Status.QUEUED) + | Q(status=models.PackageJob.Status.STARTED) ) - if PackageJob.objects.filter(query).count(): - return PackageJob.objects.get(query) + if models.PackageJob.objects.filter(query).count(): + return models.PackageJob.objects.get(query) - package_job = PackageJob.objects.create(project=project, created_by=user) + package_job = models.PackageJob.objects.create(project=project, created_by=user) return package_job -def repackage_if_needed(project: Project, user: User) -> PackageJob: +def repackage_if_needed( + project: "models.Project", user: "models.User" +) -> "models.PackageJob": if not project.project_filename: raise exceptions.NoQGISProjectError() @@ -80,7 +86,9 @@ def repackage_if_needed(project: Project, user: User) -> PackageJob: package_job = repackage(project, user) else: package_job = ( - PackageJob.objects.filter(project=project).order_by("started_at").get() + models.PackageJob.objects.filter(project=project) + .order_by("started_at") + .get() ) return package_job diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index df969174a..469593215 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -1,10 +1,15 @@ from __future__ import annotations import logging +from pathlib import PurePath from typing import IO, List import qfieldcloud.core.models import qfieldcloud.core.utils +from django.conf import settings +from django.core.files.base import ContentFile +from django.http import FileResponse, HttpRequest +from django.http.response import HttpResponse, HttpResponseBase logger = logging.getLogger(__name__) @@ -26,6 +31,69 @@ def staticfile_prefix(project: "Project", filename: str) -> str: # noqa: F821 return "" +def file_response( + request: HttpRequest, + key: str, + presigned: bool = False, + expires: int = 60, + version: str = None, + as_attachment: bool = False, +) -> HttpResponseBase: + url = "" + filename = PurePath(key).name + extra_params = {} + + if version is not None: + extra_params["VersionId"] = version + + # check if we are in NGINX proxy + if request.META.get("HTTP_HOST", "").split(":")[-1] == request.META.get( + "WEB_HTTPS_PORT" + ): + if presigned: + if as_attachment: + extra_params["ResponseContentType"] = "application/force-download" + extra_params[ + "ResponseContentDisposition" + ] = f'attachment;filename="{filename}"' + + url = qfieldcloud.core.utils.get_s3_client().generate_presigned_url( + "get_object", + Params={ + **extra_params, + "Key": key, + "Bucket": qfieldcloud.core.utils.get_s3_bucket().name, + }, + ExpiresIn=expires, + HttpMethod="GET", + ) + else: + url = qfieldcloud.core.utils.get_s3_object_url(key) + + # Let's NGINX handle the redirect to the storage and streaming the file contents back to the client + response = HttpResponse() + response["X-Accel-Redirect"] = "/storage-download/" + response["redirect_uri"] = url + + return response + elif settings.DEBUG: + return_file = ContentFile(b"") + qfieldcloud.core.utils.get_s3_bucket().download_fileobj( + key, + return_file, + extra_params, + ) + + return FileResponse( + return_file.open(), + as_attachment=as_attachment, + filename=filename, + content_type="text/html", + ) + + raise Exception("Expected to either run behind nginx proxy or in debug mode.") + + def upload_user_avatar(user: "User", file: IO, mimetype: str) -> str: # noqa: F821 """Uploads a picture as a user avatar. diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index fb87f80b7..43ab762d2 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -1,6 +1,6 @@ from pathlib import PurePath -from django.http.response import HttpResponse, HttpResponseRedirect +import qfieldcloud.core.utils2 as utils2 from django.utils import timezone from qfieldcloud.core import exceptions, permissions_utils, utils from qfieldcloud.core.models import ProcessProjectfileJob, Project @@ -107,38 +107,19 @@ class DownloadPushDeleteFileView(views.APIView): def get(self, request, projectid, filename): Project.objects.get(id=projectid) - extra_args = {} + version = None if "version" in self.request.query_params: version = self.request.query_params["version"] - extra_args["VersionId"] = version - - filekey = utils.safe_join("projects/{}/files/".format(projectid), filename) - - url = utils.get_s3_client().generate_presigned_url( - "get_object", - Params={ - **extra_args, - "Key": filekey, - "Bucket": utils.get_s3_bucket().name, - "ResponseContentType": "application/force-download", - "ResponseContentDisposition": f'attachment;filename="{filename}"', - }, - ExpiresIn=600, - HttpMethod="GET", - ) - - if request.META.get("HTTP_HOST", "").split(":")[-1] == request.META.get( - "WEB_HTTPS_PORT" - ): - # Let's NGINX handle the redirect to the storage and streaming the file contents back to the client - response = HttpResponse() - response["X-Accel-Redirect"] = "/storage-download/" - response["redirect_uri"] = url - return response - else: - # requesting the Django development webserver - return HttpResponseRedirect(url) + key = utils.safe_join("projects/{}/files/".format(projectid), filename) + return utils2.storage.file_response( + request, + key, + presigned=True, + expires=600, + version=version, + as_attachment=True, + ) def post(self, request, projectid, filename, format=None): project = Project.objects.get(id=projectid) @@ -224,3 +205,25 @@ def delete(self, request, projectid, filename): ) return Response(status=status.HTTP_200_OK) + + +class ProjectMetafilesView(views.APIView): + parser_classes = [MultiPartParser] + permission_classes = [ + permissions.IsAuthenticated, + DownloadPushDeleteFileViewPermissions, + ] + + def get(self, request, projectid, filename): + key = utils.safe_join("projects/{}/meta/".format(projectid), filename) + return utils2.storage.file_response(request, key, presigned=True) + + +class PublicFilesView(views.APIView): + parser_classes = [MultiPartParser] + permission_classes = [ + permissions.IsAuthenticated, + ] + + def get(self, request, filename): + return utils2.storage.file_response(request, filename) From f7ae4916a67d67090aa42e307476936c3db429e8 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 11 Feb 2022 02:24:07 +0200 Subject: [PATCH 128/185] Debug improvements Note the recommended local development docker-compose is the one with `local` suffix. --- README.md | 2 +- docker-compose.override.dev.yml | 3 --- docker-compose.override.local.yml | 2 ++ 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 4e0899f3b..106330915 100644 --- a/README.md +++ b/README.md @@ -94,7 +94,7 @@ debugpy.wait_for_client() # optional Or alternativley, prefix your commands with `python -m debugpy --listen 0.0.0.0:5678`. ```shell -docker-compose run app python -m debugpy --listen 0.0.0.0:5678 manage.py test +docker-compose run app -p 5678:5678 python -m debugpy --listen 0.0.0.0:5678 manage.py test ``` Then, configure your IDE to connect (example given for VSCode's `.vscode/launch.json`, triggered with `F5`): diff --git a/docker-compose.override.dev.yml b/docker-compose.override.dev.yml index f9f64ad3e..0676ce03b 100644 --- a/docker-compose.override.dev.yml +++ b/docker-compose.override.dev.yml @@ -5,9 +5,6 @@ services: app: depends_on: - geodb - ports: - # debugpy - - "5678:5678" build: args: DEBUG_BUILD: ${DEBUG} diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index d43bd0102..5a34c5ee1 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -6,6 +6,8 @@ services: ports: # allow direct access without nginx - "5001:8000" + # debugpy + - "5678:5678" volumes: # mount the source for live reload - ./docker-app/qfieldcloud:/usr/src/app/qfieldcloud From dfb41fa7a30aa08e52f40d0f2d43a59b5db335f7 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 14 Feb 2022 17:25:45 +0200 Subject: [PATCH 129/185] Add debug to the worker wrapper. Use different port so you can debug in parallel. --- README.md | 14 +++++++++++++- docker-compose.override.local.yml | 3 +++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 106330915..681701181 100644 --- a/README.md +++ b/README.md @@ -103,14 +103,26 @@ Then, configure your IDE to connect (example given for VSCode's `.vscode/launch. "version": "0.2.0", "configurations": [ { - "name": "QFieldCloud - Remote attach", + "name": "QFC debug app", "type": "python", "request": "attach", + "justMyCode": false, "connect": {"host": "localhost", "port": 5678}, "pathMappings": [{ "localRoot": "${workspaceFolder}/docker-app/qfieldcloud", "remoteRoot": "/usr/src/app/qfieldcloud" }] + }, + { + "name": "QFC debug worker_wrapper", + "type": "python", + "request": "attach", + "justMyCode": false, + "connect": {"host": "localhost", "port": 5679}, + "pathMappings": [{ + "localRoot": "${workspaceFolder}/docker-app/qfieldcloud", + "remoteRoot": "/usr/src/app/qfieldcloud" + }] } ] } diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 5a34c5ee1..bdb5f908a 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -24,6 +24,9 @@ services: - smtp4dev worker_wrapper: + ports: + # debugpy + - "5679:5679" volumes: # mount the source for live reload - ./docker-app/qfieldcloud:/usr/src/app/qfieldcloud From f42246568ca5352adaee9888b8db5ff67b27f916 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 11 Feb 2022 07:09:03 +0200 Subject: [PATCH 130/185] When using local docker-compose, set DEBUG_BUILD=1 by default --- docker-compose.override.local.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index bdb5f908a..8c2360f1f 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -3,6 +3,9 @@ version: '3.7' services: app: + build: + args: + - DEBUG_BUILD=1 ports: # allow direct access without nginx - "5001:8000" @@ -24,6 +27,9 @@ services: - smtp4dev worker_wrapper: + build: + args: + - DEBUG_BUILD=1 ports: # debugpy - "5679:5679" From d00120f01112cc13095ae7926d59ca975dcdfe11 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 14 Feb 2022 20:04:17 +0200 Subject: [PATCH 131/185] Move to entirely to `docker-compose run` for debugging --- README.md | 5 +++-- docker-compose.override.local.yml | 5 ----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 681701181..584e3ba98 100644 --- a/README.md +++ b/README.md @@ -92,9 +92,10 @@ print("debugpy waiting for debugger... 🐛") debugpy.wait_for_client() # optional ``` -Or alternativley, prefix your commands with `python -m debugpy --listen 0.0.0.0:5678`. +Or alternativley, prefix your commands with `python -m debugpy --listen 0.0.0.0:5678 --wait-for-client`. ```shell -docker-compose run app -p 5678:5678 python -m debugpy --listen 0.0.0.0:5678 manage.py test +docker-compose run app -p 5678:5678 python -m debugpy --listen 0.0.0.0:5678 --wait-for-client manage.py test +docker-compose run worker_wrapper -p 5679:5679 python -m debugpy --listen 0.0.0.0:5679 --wait-for-client manage.py test ``` Then, configure your IDE to connect (example given for VSCode's `.vscode/launch.json`, triggered with `F5`): diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 8c2360f1f..999a985f8 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -9,8 +9,6 @@ services: ports: # allow direct access without nginx - "5001:8000" - # debugpy - - "5678:5678" volumes: # mount the source for live reload - ./docker-app/qfieldcloud:/usr/src/app/qfieldcloud @@ -30,9 +28,6 @@ services: build: args: - DEBUG_BUILD=1 - ports: - # debugpy - - "5679:5679" volumes: # mount the source for live reload - ./docker-app/qfieldcloud:/usr/src/app/qfieldcloud From 71ea226bc53dd080a269a94788316e6e1d9bcab1 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 14 Feb 2022 20:06:56 +0200 Subject: [PATCH 132/185] Move IN_TEST_SUITE from global variable to a settings config --- docker-app/qfieldcloud/core/rest_utils.py | 3 +-- docker-app/qfieldcloud/settings.py | 3 +++ docker-app/qfieldcloud/testing.py | 7 ++----- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/docker-app/qfieldcloud/core/rest_utils.py b/docker-app/qfieldcloud/core/rest_utils.py index 19dfc5d0e..3c1b1d8d7 100644 --- a/docker-app/qfieldcloud/core/rest_utils.py +++ b/docker-app/qfieldcloud/core/rest_utils.py @@ -3,7 +3,6 @@ from django.conf import settings from django.core import exceptions from qfieldcloud.core import exceptions as qfieldcloud_exceptions -from qfieldcloud.testing import IN_TEST_SUITE from rest_framework import exceptions as rest_exceptions from rest_framework.response import Response @@ -29,7 +28,7 @@ def exception_handler(exc, context): else: # When running tests, we rethrow the exception, so we get a full trace to # help with debugging - if IN_TEST_SUITE: + if settings.IN_TEST_SUITE: raise exc logging.exception(exc) exc = qfieldcloud_exceptions.QFieldCloudException(detail=str(exc)) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index 4a0557d55..c656c25fc 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -305,6 +305,9 @@ DEFAULT_AUTO_FIELD = "django.db.models.AutoField" +# Whether we are currently running tests +# NOTE automatically set when running tests, don't change manually! +IN_TEST_SUITE = False QFIELDCLOUD_TOKEN_SERIALIZER = "qfieldcloud.core.serializers.TokenSerializer" QFIELDCLOUD_USER_SERIALIZER = "qfieldcloud.core.serializers.CompleteUserSerializer" diff --git a/docker-app/qfieldcloud/testing.py b/docker-app/qfieldcloud/testing.py index 7869a31dc..138cc54fa 100644 --- a/docker-app/qfieldcloud/testing.py +++ b/docker-app/qfieldcloud/testing.py @@ -1,11 +1,8 @@ +from django.conf import settings from django.test.runner import DiscoverRunner -# Whether we are currently running tests -IN_TEST_SUITE = False - class QfcTestSuiteRunner(DiscoverRunner): def __init__(self, *args, **kwargs): - global IN_TEST_SUITE - IN_TEST_SUITE = True + settings.IN_TEST_SUITE = True super().__init__(*args, **kwargs) From b6edf2d857464b2403cfce21be169a1251e32263 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 14 Feb 2022 18:49:35 +0200 Subject: [PATCH 133/185] Fix tests --- docker-app/qfieldcloud/core/utils2/storage.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index 469593215..05f37211f 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -76,7 +76,7 @@ def file_response( response["redirect_uri"] = url return response - elif settings.DEBUG: + elif settings.DEBUG or settings.IN_TEST_SUITE: return_file = ContentFile(b"") qfieldcloud.core.utils.get_s3_bucket().download_fileobj( key, @@ -91,7 +91,9 @@ def file_response( content_type="text/html", ) - raise Exception("Expected to either run behind nginx proxy or in debug mode.") + raise Exception( + "Expected to either run behind nginx proxy, debug mode or within a test suite." + ) def upload_user_avatar(user: "User", file: IO, mimetype: str) -> str: # noqa: F821 From 99efc876ff1d2cf2e952a37f065d34abbb62cd2c Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 11 Feb 2022 10:30:26 +0200 Subject: [PATCH 134/185] Make qfield-files responses use nginx X-Accel-Redirect instead of HTTP redirect So no longer weird redirect cases for any type of files in QFC. --- .../qfieldcloud/core/tests/test_packages.py | 20 +++++++------------ docker-app/qfieldcloud/core/urls.py | 2 +- .../qfieldcloud/core/views/package_views.py | 19 +++++------------- 3 files changed, 13 insertions(+), 28 deletions(-) diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py index b1d12687d..2607e9d1a 100644 --- a/docker-app/qfieldcloud/core/tests/test_packages.py +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -5,8 +5,7 @@ from typing import List, Tuple import psycopg2 -import requests -from django.http.response import HttpResponseRedirect +from django.http import FileResponse from django.utils import timezone from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core.geodb_utils import delete_db_and_role @@ -179,20 +178,15 @@ def upload_files_and_check_package( if tempdir: for filename in expected_files: response = self.client.get( - f"/api/v1/qfield-files/{self.project1.id}/project_qfield.qgs/" + f"/api/v1/packages/{self.project1.id}/latest/files/project_qfield.qgs/" ) local_file = os.path.join(tempdir, filename) - self.assertIsInstance(response, HttpResponseRedirect) + self.assertIsInstance(response, FileResponse) - # We cannot use the self.client HTTP client, since it does not support - # requests outside the current Django App - # Using the rest_api_framework.RequestsClient is not much better, so better - # use the `requests` module - with requests.get(response.url, stream=True) as r: - with open(local_file, "wb") as f: - for chunk in r.iter_content(): - f.write(chunk) + with open(local_file, "wb") as f: + for chunk in response.streaming_content: + f.write(chunk) for layer_id in package_payload["layers"]: layer_data = package_payload["layers"][layer_id] @@ -227,7 +221,7 @@ def test_list_files_for_qfield(self): expected_files=["data.gpkg", "project_qfield.qgs"], ) - def test_list_files_missing_project_filename(self): + def test_list_files_missing_qgis_project_file(self): self.upload_files_and_check_package( token=self.token1.key, project=self.project1, diff --git a/docker-app/qfieldcloud/core/urls.py b/docker-app/qfieldcloud/core/urls.py index a975caf02..63c2df070 100644 --- a/docker-app/qfieldcloud/core/urls.py +++ b/docker-app/qfieldcloud/core/urls.py @@ -77,7 +77,7 @@ package_views.LatestPackageView.as_view(), ), path( - "packages//latest/files/", + "packages//latest/files//", package_views.LatestPackageDownloadFilesView.as_view(), ), path("qfield-files//", qfield_files_views.ListFilesView.as_view()), diff --git a/docker-app/qfieldcloud/core/views/package_views.py b/docker-app/qfieldcloud/core/views/package_views.py index 323b72366..13de9a0e9 100644 --- a/docker-app/qfieldcloud/core/views/package_views.py +++ b/docker-app/qfieldcloud/core/views/package_views.py @@ -1,5 +1,5 @@ +import qfieldcloud.core.utils2 as utils2 from django.core.exceptions import ObjectDoesNotExist -from django.http.response import HttpResponseRedirect from qfieldcloud.core import exceptions, permissions_utils, utils from qfieldcloud.core.models import PackageJob, Project from qfieldcloud.core.utils import check_s3_key, get_project_package_files @@ -95,17 +95,8 @@ def get(self, request, project_id, filename): "Packaging has never been triggered or successful for this project." ) - file_key = f"projects/{project_id}/export/{filename}" - url = utils.get_s3_client().generate_presigned_url( - "get_object", - Params={ - "Key": file_key, - "Bucket": utils.get_s3_bucket().name, - "ResponseContentType": "application/force-download", - "ResponseContentDisposition": f'attachment;filename="{filename}"', - }, - ExpiresIn=60, - HttpMethod="GET", - ) + key = utils.safe_join("projects/{}/export/".format(project_id), filename) - return HttpResponseRedirect(url) + return utils2.storage.file_response( + request, key, presigned=True, expires=600, as_attachment=True + ) From f19c7ca933f77915e8be0af184fc843acf3b9c19 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Tue, 15 Feb 2022 17:15:45 +0100 Subject: [PATCH 135/185] remove useless tearDown methods seems they that could be linked to flakyness... - django tests already reset the database at then end of test case - same for client, which is also recreated for each test case --- docker-app/qfieldcloud/core/tests/test_delta.py | 17 ----------------- docker-app/qfieldcloud/core/tests/test_geodb.py | 3 --- .../qfieldcloud/core/tests/test_organization.py | 4 ---- .../qfieldcloud/core/tests/test_packages.py | 12 ------------ .../qfieldcloud/core/tests/test_permission.py | 10 ---------- .../qfieldcloud/core/tests/test_project.py | 11 ----------- .../qfieldcloud/core/tests/test_qfield_file.py | 12 ------------ .../qfieldcloud/core/tests/test_qgis_file.py | 14 -------------- .../qfieldcloud/core/tests/test_queryset.py | 4 ---- docker-app/qfieldcloud/core/tests/test_user.py | 4 ---- 10 files changed, 91 deletions(-) diff --git a/docker-app/qfieldcloud/core/tests/test_delta.py b/docker-app/qfieldcloud/core/tests/test_delta.py index 572bc2d4f..6347dd2b4 100644 --- a/docker-app/qfieldcloud/core/tests/test_delta.py +++ b/docker-app/qfieldcloud/core/tests/test_delta.py @@ -51,23 +51,6 @@ def setUp(self): role=ProjectCollaborator.Roles.ADMIN, ) - def tearDown(self): - # Remove credentials - self.client.credentials() - - @classmethod - def tearDownClass(cls): - # Remove all projects avoiding bulk delete in order to use - # the overridden delete() function in the model - for p in Project.objects.all(): - bucket = utils.get_s3_bucket() - prefix = utils.safe_join(f"projects/{p.id}/") - bucket.objects.filter(Prefix=prefix).delete() - - p.delete() - - User.objects.all().delete() - def fail(self, msg: str, job: Job = None): if job: msg += f"\n\nOutput:\n================\n{job.output}\n================" diff --git a/docker-app/qfieldcloud/core/tests/test_geodb.py b/docker-app/qfieldcloud/core/tests/test_geodb.py index 65bf7c95d..c0c3ffd61 100644 --- a/docker-app/qfieldcloud/core/tests/test_geodb.py +++ b/docker-app/qfieldcloud/core/tests/test_geodb.py @@ -45,6 +45,3 @@ def test_create_db(self): ) conn.commit() - - def tearDown(self): - User.objects.all().delete() diff --git a/docker-app/qfieldcloud/core/tests/test_organization.py b/docker-app/qfieldcloud/core/tests/test_organization.py index e7c35694b..d08538682 100644 --- a/docker-app/qfieldcloud/core/tests/test_organization.py +++ b/docker-app/qfieldcloud/core/tests/test_organization.py @@ -30,10 +30,6 @@ def setUp(self): organization_owner=self.user1, ) - def tearDown(self): - # Remove credentials - self.client.credentials() - def test_list_members(self): # Set user2 as member of organization1 diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py index 2607e9d1a..b75248b3d 100644 --- a/docker-app/qfieldcloud/core/tests/test_packages.py +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -52,18 +52,6 @@ def setUp(self): port=5432, ) - def tearDown(self): - self.conn.close() - - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - p.delete() - - User.objects.all().delete() - # Remove credentials - self.client.credentials() - def upload_files( self, token: str, diff --git a/docker-app/qfieldcloud/core/tests/test_permission.py b/docker-app/qfieldcloud/core/tests/test_permission.py index 7d3cf1570..847cf5bea 100644 --- a/docker-app/qfieldcloud/core/tests/test_permission.py +++ b/docker-app/qfieldcloud/core/tests/test_permission.py @@ -34,16 +34,6 @@ def setUp(self): ) self.project1.save() - def tearDown(self): - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - p.delete() - - User.objects.all().delete() - # Remove credentials - self.client.credentials() - def test_collaborator_project_takeover(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) diff --git a/docker-app/qfieldcloud/core/tests/test_project.py b/docker-app/qfieldcloud/core/tests/test_project.py index fde2e0ff0..c7c361974 100644 --- a/docker-app/qfieldcloud/core/tests/test_project.py +++ b/docker-app/qfieldcloud/core/tests/test_project.py @@ -22,17 +22,6 @@ def setUp(self): self.user3 = User.objects.create_user(username="user3", password="abc123") self.token3 = AuthToken.objects.get_or_create(user=self.user3)[0] - def tearDown(self): - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - p.delete() - - User.objects.all().delete() - - # Remove credentials - self.client.credentials() - def test_create_project(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + self.token1.key) response = self.client.post( diff --git a/docker-app/qfieldcloud/core/tests/test_qfield_file.py b/docker-app/qfieldcloud/core/tests/test_qfield_file.py index a310e26f4..7c75883f9 100644 --- a/docker-app/qfieldcloud/core/tests/test_qfield_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qfield_file.py @@ -52,18 +52,6 @@ def setUp(self): port=5432, ) - def tearDown(self): - self.conn.close() - - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - p.delete() - - User.objects.all().delete() - # Remove credentials - self.client.credentials() - def fail(self, msg: str, job: Job = None): if job: msg += f"\n\nOutput:\n================\n{job.output}\n================" diff --git a/docker-app/qfieldcloud/core/tests/test_qgis_file.py b/docker-app/qfieldcloud/core/tests/test_qgis_file.py index 21b428174..0caa98dc6 100644 --- a/docker-app/qfieldcloud/core/tests/test_qgis_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qgis_file.py @@ -34,20 +34,6 @@ def setUp(self): ) self.project1.save() - def tearDown(self): - # Remove all projects avoiding bulk delete in order to use - # the overrided delete() function in the model - for p in Project.objects.all(): - bucket = utils.get_s3_bucket() - prefix = utils.safe_join("projects/{}/".format(p.id)) - bucket.objects.filter(Prefix=prefix).delete() - - p.delete() - - User.objects.all().delete() - # Remove credentials - self.client.credentials() - def get_file_contents(self, project, filename, version=None): qs = "" if version: diff --git a/docker-app/qfieldcloud/core/tests/test_queryset.py b/docker-app/qfieldcloud/core/tests/test_queryset.py index 7ba8e1db2..a1bf44491 100644 --- a/docker-app/qfieldcloud/core/tests/test_queryset.py +++ b/docker-app/qfieldcloud/core/tests/test_queryset.py @@ -115,10 +115,6 @@ def setUp(self): role=ProjectCollaborator.Roles.EDITOR, ) - def tearDown(self): - # Remove credentials - self.client.credentials() - def test_get_users(self): # should get all the available users queryset = querysets_utils.get_users("") diff --git a/docker-app/qfieldcloud/core/tests/test_user.py b/docker-app/qfieldcloud/core/tests/test_user.py index d79c5da08..59380a3f9 100644 --- a/docker-app/qfieldcloud/core/tests/test_user.py +++ b/docker-app/qfieldcloud/core/tests/test_user.py @@ -67,10 +67,6 @@ def setUp(self): is_public=True, ).save() - def tearDown(self): - # Remove credentials - self.client.credentials() - def test_login(self): response = self.client.post( From 0c60fc2780ddab7cafed6df35d8a85fe817e3448 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 15 Feb 2022 13:14:50 +0200 Subject: [PATCH 136/185] better fix for local minio seems there is a way to have the same URL both interally and form the host, so it should work more reliably. slighty different on linux than on mac/win though... see win: https://stackoverflow.com/a/67388365/ lin: https://stackoverflow.com/a/48547074/ --- .env.example | 7 +++---- README.md | 1 + docker-app/qfieldcloud/core/utils.py | 2 +- docker-app/qfieldcloud/settings.py | 1 - docker-compose.override.local.yml | 2 +- docker-compose.yml | 1 - 6 files changed, 6 insertions(+), 8 deletions(-) diff --git a/.env.example b/.env.example index e59d7aeb5..e4ea574b7 100644 --- a/.env.example +++ b/.env.example @@ -15,10 +15,9 @@ STORAGE_ACCESS_KEY_ID=minioadmin STORAGE_SECRET_ACCESS_KEY=minioadmin STORAGE_BUCKET_NAME=qfieldcloud-local STORAGE_REGION_NAME= -# Internal URL to the storage endpoint (from python code) -STORAGE_ENDPOINT_URL=http://minio:9000 -# Public URL to the storage endpoint (external storage should be equivalent to STORAGE_ENDPOINT_URL, local development only, no trailing slash) -STORAGE_ENDPOINT_URL_EXTERNAL=http://localhost:80/minio +# URL to the storage endpoint (must be reachable both from withing docker and from the host) +STORAGE_ENDPOINT_URL=http://172.17.0.1:9000 +# (on windows/mac, use http://host.docker.internal:9000 instead) # Public port to the storage endpoint browser (local development only) STORAGE_BROWSER_PORT=8010 diff --git a/README.md b/README.md index 584e3ba98..c8955ea32 100644 --- a/README.md +++ b/README.md @@ -237,6 +237,7 @@ Based on this example | postgres | 5433 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | | redis | 6379 | REDIS_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | | geodb | 5432 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :x: | +| minio | 9000 | | :white_check_mark: | :x: | :x: | | minio browser | 8010 | STORAGE_BROWSER_PORT | :white_check_mark: | :x: | :x: | | smtp web | 5000 | | :white_check_mark: | :x: | :x: | | smtp | 25 | | :white_check_mark: | :x: | :x: | diff --git a/docker-app/qfieldcloud/core/utils.py b/docker-app/qfieldcloud/core/utils.py index 449c65104..1d8c57408 100644 --- a/docker-app/qfieldcloud/core/utils.py +++ b/docker-app/qfieldcloud/core/utils.py @@ -348,7 +348,7 @@ def get_s3_object_url( Returns: str: URL """ - return f"{settings.STORAGE_ENDPOINT_URL_EXTERNAL}/{bucket.name}/{key}" + return f"{settings.STORAGE_ENDPOINT_URL}/{bucket.name}/{key}" def list_files( diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index c656c25fc..a85bbfb29 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -191,7 +191,6 @@ STORAGE_BUCKET_NAME = os.environ.get("STORAGE_BUCKET_NAME") STORAGE_REGION_NAME = os.environ.get("STORAGE_REGION_NAME") STORAGE_ENDPOINT_URL = os.environ.get("STORAGE_ENDPOINT_URL") -STORAGE_ENDPOINT_URL_EXTERNAL = os.environ.get("STORAGE_ENDPOINT_URL_EXTERNAL") AUTH_USER_MODEL = "core.User" diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 999a985f8..b42402d71 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -15,7 +15,6 @@ services: environment: # run flush, migrate and collectstatic in entrypoint DEBUG: 1 - STORAGE_ENDPOINT_URL_EXTERNAL: ${STORAGE_ENDPOINT_URL_EXTERNAL} STORAGE_BROWSER_PORT: ${STORAGE_BROWSER_PORT} command: python3 manage.py runserver 0.0.0.0:8000 depends_on: @@ -104,6 +103,7 @@ services: retries: 5 ports: - ${STORAGE_BROWSER_PORT}:9001 + - 9000:9000 createbuckets: image: minio/mc diff --git a/docker-compose.yml b/docker-compose.yml index c966bb654..9c9a7601b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -40,7 +40,6 @@ services: STORAGE_BUCKET_NAME: ${STORAGE_BUCKET_NAME} STORAGE_REGION_NAME: ${STORAGE_REGION_NAME} STORAGE_ENDPOINT_URL: ${STORAGE_ENDPOINT_URL} - STORAGE_ENDPOINT_URL_EXTERNAL: ${STORAGE_ENDPOINT_URL} QFIELDCLOUD_DEFAULT_NETWORK: ${QFIELDCLOUD_DEFAULT_NETWORK} SENTRY_DSN: ${SENTRY_DSN} SENTRY_SERVER_NAME: ${QFIELDCLOUD_HOST} From 0f4398f2634a2e229e184a5b8228aeb1aeef3815 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 15 Feb 2022 12:59:22 +0200 Subject: [PATCH 137/185] Rename STORAGE_BROWSER_PORT to MINIO_BROWSER_PORT --- .env.example | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.env.example b/.env.example index e4ea574b7..687941a36 100644 --- a/.env.example +++ b/.env.example @@ -15,9 +15,14 @@ STORAGE_ACCESS_KEY_ID=minioadmin STORAGE_SECRET_ACCESS_KEY=minioadmin STORAGE_BUCKET_NAME=qfieldcloud-local STORAGE_REGION_NAME= -# URL to the storage endpoint (must be reachable both from withing docker and from the host) + +# URL to the storage endpoint either minio, or external (e.g. S3). +# The URL must be reachable both from within docker and from the host, the default value is the `bridge` docker URL. +# Read more on https://docs.docker.com/network/network-tutorial-standalone/ . +# NOTE: to use minio on windows/mac, change the value to "http://host.docker.internal:8009" +# DEFAULT: http://172.17.0.1:9000 STORAGE_ENDPOINT_URL=http://172.17.0.1:9000 -# (on windows/mac, use http://host.docker.internal:9000 instead) + # Public port to the storage endpoint browser (local development only) STORAGE_BROWSER_PORT=8010 From 8ceff99996914c91a105e642817ac11e8ba0a2c7 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 15 Feb 2022 13:07:53 +0200 Subject: [PATCH 138/185] Rename STORAGE_BROWSER_PORT to MINIO_BROWSER_PORT --- .env.example | 6 ++++-- README.md | 2 +- docker-compose.override.local.yml | 5 ++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.env.example b/.env.example index 687941a36..f4f329ebd 100644 --- a/.env.example +++ b/.env.example @@ -23,8 +23,10 @@ STORAGE_REGION_NAME= # DEFAULT: http://172.17.0.1:9000 STORAGE_ENDPOINT_URL=http://172.17.0.1:9000 -# Public port to the storage endpoint browser (local development only) -STORAGE_BROWSER_PORT=8010 +# Public port to the minio browser endpoint. +# NOTE: active only when minio is the configured as storage endpoint. Mostly for local development. +# DEFAULT: 8010 +MINIO_BROWSER_PORT=8010 WEB_HTTP_PORT=80 WEB_HTTPS_PORT=443 diff --git a/README.md b/README.md index c8955ea32..1f9ef2b3f 100644 --- a/README.md +++ b/README.md @@ -238,7 +238,7 @@ Based on this example | redis | 6379 | REDIS_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | | geodb | 5432 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :x: | | minio | 9000 | | :white_check_mark: | :x: | :x: | -| minio browser | 8010 | STORAGE_BROWSER_PORT | :white_check_mark: | :x: | :x: | +| minio browser | 8010 | MINIO_BROWSER_PORT | :white_check_mark: | :x: | :x: | | smtp web | 5000 | | :white_check_mark: | :x: | :x: | | smtp | 25 | | :white_check_mark: | :x: | :x: | | imap | 143 | | :white_check_mark: | :x: | :x: | diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index b42402d71..285da6cbb 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -15,7 +15,6 @@ services: environment: # run flush, migrate and collectstatic in entrypoint DEBUG: 1 - STORAGE_BROWSER_PORT: ${STORAGE_BROWSER_PORT} command: python3 manage.py runserver 0.0.0.0:8000 depends_on: - db @@ -87,7 +86,7 @@ services: environment: MINIO_ROOT_USER: ${STORAGE_ACCESS_KEY_ID} MINIO_ROOT_PASSWORD: ${STORAGE_SECRET_ACCESS_KEY} - MINIO_BROWSER_REDIRECT_URL: http://${QFIELDCLOUD_HOST}:${STORAGE_BROWSER_PORT} + MINIO_BROWSER_REDIRECT_URL: http://${QFIELDCLOUD_HOST}:${MINIO_BROWSER_PORT} command: server /data{1...4} --console-address :9001 healthcheck: test: [ @@ -102,7 +101,7 @@ services: timeout: 20s retries: 5 ports: - - ${STORAGE_BROWSER_PORT}:9001 + - ${MINIO_BROWSER_PORT}:9001 - 9000:9000 createbuckets: From e4ec9754b0a8bbafc126925ff8747d35fdb2f145 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 15 Feb 2022 13:12:17 +0200 Subject: [PATCH 139/185] Add MINIO_API_PORT as a configuration --- .env.example | 9 +++++++-- README.md | 2 +- docker-compose.override.local.yml | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/.env.example b/.env.example index f4f329ebd..8192cde3d 100644 --- a/.env.example +++ b/.env.example @@ -20,8 +20,13 @@ STORAGE_REGION_NAME= # The URL must be reachable both from within docker and from the host, the default value is the `bridge` docker URL. # Read more on https://docs.docker.com/network/network-tutorial-standalone/ . # NOTE: to use minio on windows/mac, change the value to "http://host.docker.internal:8009" -# DEFAULT: http://172.17.0.1:9000 -STORAGE_ENDPOINT_URL=http://172.17.0.1:9000 +# DEFAULT: http://172.17.0.1:8009 +STORAGE_ENDPOINT_URL=http://172.17.0.1:8009 + +# Public port to the minio API endpoint. It must match the configured port in `STORAGE_ENDPOINT_URL`. +# NOTE: active only when minio is the configured as storage endpoint. Mostly for local development. +# DEFAULT: 8009 +MINIO_API_PORT=8009 # Public port to the minio browser endpoint. # NOTE: active only when minio is the configured as storage endpoint. Mostly for local development. diff --git a/README.md b/README.md index 1f9ef2b3f..c21979880 100644 --- a/README.md +++ b/README.md @@ -237,7 +237,7 @@ Based on this example | postgres | 5433 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | | redis | 6379 | REDIS_PORT | :white_check_mark: | :white_check_mark: | :white_check_mark: | | geodb | 5432 | HOST_POSTGRES_PORT | :white_check_mark: | :white_check_mark: | :x: | -| minio | 9000 | | :white_check_mark: | :x: | :x: | +| minio API | 8009 | MINIO_API_PORT | :white_check_mark: | :x: | :x: | | minio browser | 8010 | MINIO_BROWSER_PORT | :white_check_mark: | :x: | :x: | | smtp web | 5000 | | :white_check_mark: | :x: | :x: | | smtp | 25 | | :white_check_mark: | :x: | :x: | diff --git a/docker-compose.override.local.yml b/docker-compose.override.local.yml index 285da6cbb..f3d073d0b 100644 --- a/docker-compose.override.local.yml +++ b/docker-compose.override.local.yml @@ -102,7 +102,7 @@ services: retries: 5 ports: - ${MINIO_BROWSER_PORT}:9001 - - 9000:9000 + - ${MINIO_API_PORT}:9000 createbuckets: image: minio/mc From dd087f0bbbd53c68cea5197ffc5c511a388eaed8 Mon Sep 17 00:00:00 2001 From: olivierdalang Date: Thu, 17 Feb 2022 12:53:28 +0100 Subject: [PATCH 140/185] remove useless tearDown methods followup restore some parts of tearDown logic that may cause test failures --- docker-app/qfieldcloud/core/tests/test_packages.py | 3 +++ docker-app/qfieldcloud/core/tests/test_qfield_file.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py index b75248b3d..ec408268e 100644 --- a/docker-app/qfieldcloud/core/tests/test_packages.py +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -52,6 +52,9 @@ def setUp(self): port=5432, ) + def tearDown(self): + self.conn.close() + def upload_files( self, token: str, diff --git a/docker-app/qfieldcloud/core/tests/test_qfield_file.py b/docker-app/qfieldcloud/core/tests/test_qfield_file.py index 7c75883f9..f20500b7e 100644 --- a/docker-app/qfieldcloud/core/tests/test_qfield_file.py +++ b/docker-app/qfieldcloud/core/tests/test_qfield_file.py @@ -52,6 +52,9 @@ def setUp(self): port=5432, ) + def tearDown(self): + self.conn.close() + def fail(self, msg: str, job: Job = None): if job: msg += f"\n\nOutput:\n================\n{job.output}\n================" From 80221dd60a3ae304e4b3e5f76c6fe6362261bec6 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 16 Feb 2022 20:17:09 +0200 Subject: [PATCH 141/185] Bump QGIS version --- docker-qgis/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-qgis/Dockerfile b/docker-qgis/Dockerfile index b6dd29bb9..25eca7d22 100644 --- a/docker-qgis/Dockerfile +++ b/docker-qgis/Dockerfile @@ -1,4 +1,4 @@ -FROM qgis/qgis:final-3_22_1 +FROM qgis/qgis:final-3_22_3 RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install -y \ From 419d67c450a2b7264cc293d67cbf49325f44367c Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 16 Feb 2022 20:17:27 +0200 Subject: [PATCH 142/185] Make sure the packaged QGIS file is not empty before uploading --- docker-qgis/entrypoint.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 01572619b..b4369f35e 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -13,6 +13,7 @@ import qfieldcloud.qgis.process_projectfile from libqfieldsync.offline_converter import ExportType, OfflineConverter from libqfieldsync.project import ProjectConfiguration +from libqfieldsync.utils.file_utils import get_project_in_folder from qfieldcloud.qgis.utils import Step, StepOutput, WorkDirPath, Workflow from qgis.core import ( QgsApplication, @@ -260,6 +261,9 @@ def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> Dict qgis_app.exitQgis() + if Path(get_project_in_folder(str(package_dir))).stat().st_size == 0: + raise Exception("The packaged QGIS project file is empty.") + return layer_checks From 60828947c431e8ae098010bbcedab2218191ec48 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 19:36:04 +0200 Subject: [PATCH 143/185] Make sure the packaged QGIS file is not empty before uploading --- docker-qgis/entrypoint.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index b4369f35e..9609c3b3c 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -261,7 +261,8 @@ def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> Dict qgis_app.exitQgis() - if Path(get_project_in_folder(str(package_dir))).stat().st_size == 0: + packaged_project_filename = get_project_in_folder(str(package_dir)) + if Path(packaged_project_filename).stat().st_size == 0: raise Exception("The packaged QGIS project file is empty.") return layer_checks From 566f33a28d70b6538f824dac816178bba7ffabaf Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 16 Feb 2022 20:30:21 +0200 Subject: [PATCH 144/185] Calculate the md5sum --- docker-qgis/entrypoint.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 9609c3b3c..c6edadc75 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -68,6 +68,18 @@ def _get_sha256sum(filepath): return hasher.hexdigest() +def _get_md5sum(filepath): + """Calculate sha256sum of a file""" + BLOCKSIZE = 65536 + hasher = hashlib.md5() + with filepath as f: + buf = f.read(BLOCKSIZE) + while len(buf) > 0: + hasher.update(buf) + buf = f.read(BLOCKSIZE) + return hasher.hexdigest() + + def _download_project_directory(project_id: str, download_dir: Path = None) -> Path: """Download the files in the project "working" directory from the S3 Storage into a temporary directory. Returns the directory path""" @@ -126,6 +138,9 @@ def _upload_project_directory( with open(elem, "rb") as e: sha256sum = _get_sha256sum(e) + with open(elem, "rb") as e: + md5sum = _get_md5sum(e) + # Create the key filename = str(elem.relative_to(*elem.parts[:4])) key = "/".join([prefix, filename]) @@ -140,9 +155,10 @@ def _upload_project_directory( ) # Check if the file is different on the storage + # TODO switch to etag/md5sum comparison if metadata["sha256sum"] != storage_metadata["sha256sum"]: logging.info( - f'Uploading file "{key}", size: {elem.stat().st_size} bytes, sha256sum: "{sha256sum}" ' + f'Uploading file "{key}", size: {elem.stat().st_size} bytes, md5sum: {md5sum}, sha256sum: "{sha256sum}" ' ) bucket.upload_file(str(elem), key, ExtraArgs={"Metadata": metadata}) From f8b124627bb2f5e2393ccd2c97d742af0ad42ca3 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 16 Feb 2022 20:30:36 +0200 Subject: [PATCH 145/185] Show logs when downloading files --- docker-qgis/entrypoint.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index c6edadc75..cb1b2d607 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -106,6 +106,10 @@ def _download_project_directory(project_id: str, download_dir: Path = None) -> P absolute_filename = download_dir.joinpath(relative_filename) absolute_filename.parent.mkdir(parents=True, exist_ok=True) + logging.info( + f'Downloading file "{obj.key}", size: {obj.size} bytes, md5sum: "{obj.e_tag}" ' + ) + bucket.download_file(obj.key, str(absolute_filename)) return download_dir From be25c4985169a993bff2fe863444d9f4877c6ff8 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 19:31:28 +0200 Subject: [PATCH 146/185] Print layer check results --- docker-app/qfieldcloud/core/serializers.py | 16 ++-------- docker-qgis/entrypoint.py | 34 +--------------------- 2 files changed, 3 insertions(+), 47 deletions(-) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index e8e3f870b..59c298747 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -273,6 +273,7 @@ class Meta: class ExportJobSerializer(serializers.ModelSerializer): + # TODO layers used to hold information about layer validity. No longer needed. layers = serializers.SerializerMethodField() status = serializers.SerializerMethodField(initial="STATUS_ERROR") @@ -286,20 +287,7 @@ def get_initial(self): return super().get_initial() def get_layers(self, obj): - if not obj.feedback: - return None - - if obj.status != Job.Status.FINISHED: - return None - - if obj.feedback.get("feedback_version") == "2.0": - return obj.feedback["outputs"]["package_project"]["layer_checks"] - else: - steps = obj.feedback.get("steps", []) - if len(steps) > 2 and steps[1].get("stage", 1) == 2: - return steps[1]["outputs"]["layer_checks"] - - return None + return {} def get_status(self, obj): if obj.status == Job.Status.PENDING: diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index cb1b2d607..688fe53d3 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -6,7 +6,6 @@ import os import tempfile from pathlib import Path, PurePath -from typing import Dict import boto3 import qfieldcloud.qgis.apply_deltas @@ -167,7 +166,7 @@ def _upload_project_directory( bucket.upload_file(str(elem), key, ExtraArgs={"Metadata": metadata}) -def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> Dict: +def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> None: """Call the function of QFieldSync to package a project for QField""" argvb = list(map(os.fsencode, [""])) @@ -182,33 +181,6 @@ def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> Dict raise Exception(f"Unable to open file with QGIS: {project_filename}") layers = project.mapLayers() - # Check if the layers are valid (i.e. if the datasources are available) - layer_checks = {} - for layer in layers.values(): - is_valid = True - status = "ok" - if layer: - if layer.dataProvider(): - if not layer.dataProvider().isValid(): - is_valid = False - status = "invalid_dataprovider" - # there might be another reason why the layer is not valid, other than the data provider - elif not layer.isValid(): - is_valid = False - status = "invalid_layer" - else: - is_valid = False - status = "missing_dataprovider" - else: - is_valid = False - status = "missing_layer" - - layer_checks[layer.id()] = { - "name": layer.name(), - "valid": is_valid, - "status": status, - } - project_config = ProjectConfiguration(project) vl_extent_wkt = QgsRectangle() vl_extent_crs = project.crs().authid() @@ -285,8 +257,6 @@ def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> Dict if Path(packaged_project_filename).stat().st_size == 0: raise Exception("The packaged QGIS project file is empty.") - return layer_checks - def cmd_package_project(args): workflow = Workflow( @@ -313,8 +283,6 @@ def cmd_package_project(args): "package_dir": WorkDirPath("export", mkdir=True), }, method=_call_qfieldsync_packager, - return_names=["layer_checks"], - outputs=["layer_checks"], ), Step( id="upload_packaged_project", From 83b34441c98b821763214dc15542dc5f1d8a22ee Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 00:08:10 +0200 Subject: [PATCH 147/185] Print QGIS project layer checks in process_projectfile job --- docker-qgis/process_projectfile.py | 40 ++++++++++++++++++++++++++++++ docker-qgis/requirements.txt | 1 + 2 files changed, 41 insertions(+) diff --git a/docker-qgis/process_projectfile.py b/docker-qgis/process_projectfile.py index be804c46a..d793f4d1a 100644 --- a/docker-qgis/process_projectfile.py +++ b/docker-qgis/process_projectfile.py @@ -9,6 +9,7 @@ from qgis.core import QgsMapRendererParallelJob, QgsMapSettings, QgsProject from qgis.PyQt.QtCore import QEventLoop, QSize from qgis.PyQt.QtGui import QColor +from tabulate import tabulate logging.basicConfig( stream=sys.stderr, level=logging.DEBUG, format="%(asctime)s %(levelname)s %(msg)s" @@ -133,6 +134,7 @@ def on_project_read(doc): else None, "type": layer.type(), "type_name": layer.type().name, + "error_code": "no_error", "error_summary": error.summary() if error.messageList() else "", "error_message": layer.error().message(), "filename": layer_source.filename, @@ -148,6 +150,12 @@ def on_project_read(doc): if data_provider: data_provider_error = data_provider.error() + if data_provider.isValid(): + # there might be another reason why the layer is not valid, other than the data provider + layers_by_id[layer_id]["error_code"] = "invalid_layer" + else: + layers_by_id[layer_id]["error_code"] = "invalid_dataprovider" + layers_by_id[layer_id]["provider_error_summary"] = ( data_provider_error.summary() if data_provider_error.messageList() @@ -179,10 +187,42 @@ def on_project_read(doc): f'Layer "{layer.name()}" seems to be invalid: {layers_by_id[layer_id]["provider_error_summary"]}' ) else: + layers_by_id[layer_id]["error_code"] = "missing_dataprovider" layers_by_id[layer_id][ "provider_error_summary" ] = "No data provider available" + # Print layer check results + table = [ + [ + d["name"], + f'...{d["id"][-6:]}', + d["is_valid"], + d["error_code"], + d["error_summary"], + d["provider_error_summary"], + ] + for d in layers_by_id.values() + ] + logging.info( + "\n".join( + [ + "QGIS project layer checks", + tabulate( + table, + headers=[ + "Layer Name", + "Layer Id", + "Is Valid", + "Status", + "Error Summary", + "Provider Summary", + ], + ), + ] + ) + ) + details["layers_by_id"] = layers_by_id details["ordered_layer_ids"] = ordered_layer_ids diff --git a/docker-qgis/requirements.txt b/docker-qgis/requirements.txt index 5d28f8f0c..dd819318e 100644 --- a/docker-qgis/requirements.txt +++ b/docker-qgis/requirements.txt @@ -1,4 +1,5 @@ jsonschema>=3.2.0,<3.3 typing-extensions>=3.7.4.3,<3.7.5 boto3>=1.16.28,<1.17 +tabulate==v0.8.9 sentry-sdk From 4f81d8c672768b0a444baadbcca77f7d14dbf839 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 00:08:46 +0200 Subject: [PATCH 148/185] Fix error when s3 object version has size of None for whatever reason --- docker-app/qfieldcloud/core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/utils.py b/docker-app/qfieldcloud/core/utils.py index 1d8c57408..96c26b7f9 100644 --- a/docker-app/qfieldcloud/core/utils.py +++ b/docker-app/qfieldcloud/core/utils.py @@ -261,7 +261,7 @@ def get_s3_project_size(project_id: str) -> int: total_size = 0 for version in bucket.object_versions.filter(Prefix=prefix): - total_size += version.size + total_size += version.size or 0 return round(total_size / (1024 * 1024), 3) From 22180d4e9d5b25c294720d731d0dcb2328e7f256 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 00:09:07 +0200 Subject: [PATCH 149/185] Add better typing for containers --- docker-app/worker_wrapper/wrapper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index df0723f43..51cb156ce 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -14,6 +14,7 @@ from django.db import transaction from django.forms.models import model_to_dict from django.utils import timezone +from docker.models.containers import Container from qfieldcloud.core.models import ( ApplyJob, ApplyJobDelta, @@ -204,7 +205,7 @@ def _run_docker( logger.info(f"Execute: {' '.join(command)}") volumes.append(f"{TRANSFORMATION_GRIDS_VOLUME_NAME}:/transformation_grids:ro") - container = client.containers.run( + container: Container = client.containers.run( # type:ignore QGIS_CONTAINER_NAME, command, environment={ From 7a327077b24d50a1e76a99b40e4df90519cca691 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 00:12:05 +0200 Subject: [PATCH 150/185] Admin: add "id", "data_last_updated_at", "data_last_packaged_at" and "project_details__pre" to project --- docker-app/qfieldcloud/core/admin.py | 30 ++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/docker-app/qfieldcloud/core/admin.py b/docker-app/qfieldcloud/core/admin.py index 0fa81f999..e8c24e8e6 100644 --- a/docker-app/qfieldcloud/core/admin.py +++ b/docker-app/qfieldcloud/core/admin.py @@ -274,8 +274,28 @@ class ProjectAdmin(admin.ModelAdmin): "created_at", "updated_at", ) - fields = ("name", "description", "is_public", "owner", "storage_size") - readonly_fields = ("storage_size",) + fields = ( + "id", + "name", + "description", + "is_public", + "owner", + "storage_size", + "created_at", + "updated_at", + "data_last_updated_at", + "data_last_packaged_at", + "project_details__pre", + ) + readonly_fields = ( + "id", + "storage_size", + "created_at", + "updated_at", + "data_last_updated_at", + "data_last_packaged_at", + "project_details__pre", + ) inlines = (ProjectCollaboratorInline,) search_fields = ( "id", @@ -283,6 +303,12 @@ class ProjectAdmin(admin.ModelAdmin): "owner__username__iexact", ) + def project_details__pre(self, instance): + if instance.project_details is None: + return "" + + return format_pre_json(instance.project_details) + class DeltaInline(admin.TabularInline): model = ApplyJob.deltas_to_apply.through From 330c384dbc3a68da5d0537ee9e17dcaa782657c1 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 02:50:41 +0200 Subject: [PATCH 151/185] Remove layers key from package response No longer needed, since the same information is available in project_details and is checked on file upload --- docker-app/qfieldcloud/core/views/package_views.py | 11 ----------- .../qfieldcloud/core/views/qfield_files_views.py | 12 +----------- 2 files changed, 1 insertion(+), 22 deletions(-) diff --git a/docker-app/qfieldcloud/core/views/package_views.py b/docker-app/qfieldcloud/core/views/package_views.py index 13de9a0e9..85b7ea48f 100644 --- a/docker-app/qfieldcloud/core/views/package_views.py +++ b/docker-app/qfieldcloud/core/views/package_views.py @@ -52,20 +52,9 @@ def get(self, request, project_id): if not files: raise exceptions.InvalidJobError("Empty project package.") - if last_job.feedback.get("feedback_version") == "2.0": - layers = last_job.feedback["outputs"]["package_project"]["layer_checks"] - else: - steps = last_job.feedback.get("steps", []) - layers = ( - steps[1]["outputs"]["layer_checks"] - if len(steps) > 2 and steps[1].get("stage", 1) == 2 - else None - ) - return Response( { "files": files, - "layers": layers, "status": last_job.status, "package_id": last_job.pk, "packaged_at": last_job.project.data_last_packaged_at, diff --git a/docker-app/qfieldcloud/core/views/qfield_files_views.py b/docker-app/qfieldcloud/core/views/qfield_files_views.py index 1157c2f8e..34aac09c6 100644 --- a/docker-app/qfieldcloud/core/views/qfield_files_views.py +++ b/docker-app/qfieldcloud/core/views/qfield_files_views.py @@ -151,20 +151,10 @@ def get(self, request, projectid): } ) - if export_job.feedback.get("feedback_version") == "2.0": - layers = export_job.feedback["outputs"]["package_project"]["layer_checks"] - else: - steps = export_job.feedback.get("steps", []) - layers = ( - steps[1]["outputs"]["layer_checks"] - if len(steps) > 2 and steps[1].get("stage", 1) == 2 - else None - ) - return Response( { "files": files, - "layers": layers, + "layers": {}, "exported_at": export_job.updated_at, "export_id": export_job.pk, } From 4bbda0ff51806251292dc865864b34cd012bd4b9 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 16:40:39 +0200 Subject: [PATCH 152/185] Add a separate get_layers_data utils function --- docker-qgis/process_projectfile.py | 120 +++-------------------------- docker-qgis/utils.py | 104 +++++++++++++++++++++++++ 2 files changed, 114 insertions(+), 110 deletions(-) diff --git a/docker-qgis/process_projectfile.py b/docker-qgis/process_projectfile.py index d793f4d1a..70289cdb1 100644 --- a/docker-qgis/process_projectfile.py +++ b/docker-qgis/process_projectfile.py @@ -4,12 +4,15 @@ from typing import Dict from xml.etree import ElementTree -from libqfieldsync.layer import LayerSource -from qfieldcloud.qgis.utils import BaseException, has_ping, is_localhost, start_app +from qfieldcloud.qgis.utils import ( + BaseException, + get_layers_data, + layers_data_to_string, + start_app, +) from qgis.core import QgsMapRendererParallelJob, QgsMapSettings, QgsProject from qgis.PyQt.QtCore import QEventLoop, QSize from qgis.PyQt.QtGui import QColor -from tabulate import tabulate logging.basicConfig( stream=sys.stderr, level=logging.DEBUG, format="%(asctime)s %(levelname)s %(msg)s" @@ -116,116 +119,13 @@ def on_project_read(doc): logging.info("Extracting layer and datasource details...") - ordered_layer_ids = [] - layers_by_id = {} - - for layer in project.mapLayers().values(): - error = layer.error() - layer_id = layer.id() - layer_source = LayerSource(layer) - ordered_layer_ids.append(layer_id) - layers_by_id[layer_id] = { - "id": layer_id, - "name": layer.name(), - "crs": layer.crs().authid() if layer.crs() else None, - "is_valid": layer.isValid(), - "datasource": layer.dataProvider().uri().uri() - if layer.dataProvider() - else None, - "type": layer.type(), - "type_name": layer.type().name, - "error_code": "no_error", - "error_summary": error.summary() if error.messageList() else "", - "error_message": layer.error().message(), - "filename": layer_source.filename, - "provider_error_summary": None, - "provider_error_message": None, - } - - if layers_by_id[layer_id]["is_valid"]: - continue - - data_provider = layer.dataProvider() - - if data_provider: - data_provider_error = data_provider.error() - - if data_provider.isValid(): - # there might be another reason why the layer is not valid, other than the data provider - layers_by_id[layer_id]["error_code"] = "invalid_layer" - else: - layers_by_id[layer_id]["error_code"] = "invalid_dataprovider" - - layers_by_id[layer_id]["provider_error_summary"] = ( - data_provider_error.summary() - if data_provider_error.messageList() - else "" - ) - layers_by_id[layer_id][ - "provider_error_message" - ] = data_provider_error.message() - - if not layers_by_id[layer_id]["provider_error_summary"]: - service = data_provider.uri().service() - if service: - layers_by_id[layer_id][ - "provider_error_summary" - ] = f'Unable to connect to service "{service}"' - - host = data_provider.uri().host() - port = ( - int(data_provider.uri().port()) - if data_provider.uri().port() - else None - ) - if host and (is_localhost(host, port) or has_ping(host)): - layers_by_id[layer_id][ - "provider_error_summary" - ] = f'Unable to connect to host "{host}"' - - logging.info( - f'Layer "{layer.name()}" seems to be invalid: {layers_by_id[layer_id]["provider_error_summary"]}' - ) - else: - layers_by_id[layer_id]["error_code"] = "missing_dataprovider" - layers_by_id[layer_id][ - "provider_error_summary" - ] = "No data provider available" - - # Print layer check results - table = [ - [ - d["name"], - f'...{d["id"][-6:]}', - d["is_valid"], - d["error_code"], - d["error_summary"], - d["provider_error_summary"], - ] - for d in layers_by_id.values() - ] + details["layers_by_id"] = get_layers_data(project) + details["ordered_layer_ids"] = list(details["layers_by_id"].keys()) + logging.info( - "\n".join( - [ - "QGIS project layer checks", - tabulate( - table, - headers=[ - "Layer Name", - "Layer Id", - "Is Valid", - "Status", - "Error Summary", - "Provider Summary", - ], - ), - ] - ) + f'QGIS project layer checks\n{layers_data_to_string(details["layers_by_id"])}', ) - details["layers_by_id"] = layers_by_id - details["ordered_layer_ids"] = ordered_layer_ids - return details diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index 1f93431e9..ef6058174 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -14,6 +14,7 @@ from pathlib import Path from typing import IO, Any, Callable, Dict, List, Optional, Union +from libqfieldsync.layer import LayerSource from qgis.core import ( Qgis, QgsApplication, @@ -23,6 +24,7 @@ QgsProviderRegistry, ) from qgis.PyQt import QtCore, QtGui +from tabulate import tabulate qgs_stderr_logger = logging.getLogger("QGIS_STDERR") qgs_stderr_logger.setLevel(logging.DEBUG) @@ -457,3 +459,105 @@ def run_workflow( json.dump(feedback, f, indent=2, sort_keys=True, default=json_default) return feedback + + +def get_layers_data(project: QgsProject) -> Dict[str, Dict]: + layers_by_id = {} + + for layer in project.mapLayers().values(): + error = layer.error() + layer_id = layer.id() + layer_source = LayerSource(layer) + layers_by_id[layer_id] = { + "id": layer_id, + "name": layer.name(), + "crs": layer.crs().authid() if layer.crs() else None, + "is_valid": layer.isValid(), + "datasource": layer.dataProvider().uri().uri() + if layer.dataProvider() + else None, + "type": layer.type(), + "type_name": layer.type().name, + "error_code": "no_error", + "error_summary": error.summary() if error.messageList() else "", + "error_message": layer.error().message(), + "filename": layer_source.filename, + "provider_error_summary": None, + "provider_error_message": None, + } + + if layers_by_id[layer_id]["is_valid"]: + continue + + data_provider = layer.dataProvider() + + if data_provider: + data_provider_error = data_provider.error() + + if data_provider.isValid(): + # there might be another reason why the layer is not valid, other than the data provider + layers_by_id[layer_id]["error_code"] = "invalid_layer" + else: + layers_by_id[layer_id]["error_code"] = "invalid_dataprovider" + + layers_by_id[layer_id]["provider_error_summary"] = ( + data_provider_error.summary() + if data_provider_error.messageList() + else "" + ) + layers_by_id[layer_id][ + "provider_error_message" + ] = data_provider_error.message() + + if not layers_by_id[layer_id]["provider_error_summary"]: + service = data_provider.uri().service() + if service: + layers_by_id[layer_id][ + "provider_error_summary" + ] = f'Unable to connect to service "{service}"' + + host = data_provider.uri().host() + port = ( + int(data_provider.uri().port()) + if data_provider.uri().port() + else None + ) + if host and (is_localhost(host, port) or has_ping(host)): + layers_by_id[layer_id][ + "provider_error_summary" + ] = f'Unable to connect to host "{host}"' + + else: + layers_by_id[layer_id]["error_code"] = "missing_dataprovider" + layers_by_id[layer_id][ + "provider_error_summary" + ] = "No data provider available" + + return layers_by_id + + +def layers_data_to_string(layers_by_id): + # Print layer check results + table = [ + [ + d["name"], + f'...{d["id"][-6:]}', + d["is_valid"], + d["error_code"], + d["error_summary"], + d["provider_error_summary"], + ] + for d in layers_by_id.values() + ] + + return tabulate( + table, + headers=[ + "Layer Name", + "Layer Id", + "Is Valid", + "Status", + "Error Summary", + "Provider Summary", + ], + ) From bbef2fd3ac3cc49a2a71530585a92a83d3fa59fe Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 19:29:56 +0200 Subject: [PATCH 153/185] Abandon the custom QgsApplication code, use start_app --- docker-qgis/entrypoint.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 688fe53d3..439b68eca 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -13,9 +13,8 @@ from libqfieldsync.offline_converter import ExportType, OfflineConverter from libqfieldsync.project import ProjectConfiguration from libqfieldsync.utils.file_utils import get_project_in_folder -from qfieldcloud.qgis.utils import Step, StepOutput, WorkDirPath, Workflow +from qfieldcloud.qgis.utils import Step, StepOutput, WorkDirPath, Workflow, start_app from qgis.core import ( - QgsApplication, QgsCoordinateTransform, QgsOfflineEditing, QgsProject, @@ -169,9 +168,7 @@ def _upload_project_directory( def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> None: """Call the function of QFieldSync to package a project for QField""" - argvb = list(map(os.fsencode, [""])) - qgis_app = QgsApplication(argvb, True) - qgis_app.initQgis() + start_app() project = QgsProject.instance() if not project_filename.exists(): @@ -251,8 +248,6 @@ def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> None offline_converter.project_configuration.create_base_map = False offline_converter.convert() - qgis_app.exitQgis() - packaged_project_filename = get_project_in_folder(str(package_dir)) if Path(packaged_project_filename).stat().st_size == 0: raise Exception("The packaged QGIS project file is empty.") From 0a602cb9845ca07eec62ad2d8855b2fbdfabdf60 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 19:39:13 +0200 Subject: [PATCH 154/185] Added "packaged_layer_data" step to the package workflow --- docker-qgis/entrypoint.py | 53 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 51 insertions(+), 2 deletions(-) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 439b68eca..c4bfac1c4 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -6,6 +6,7 @@ import os import tempfile from pathlib import Path, PurePath +from typing import Dict, Union import boto3 import qfieldcloud.qgis.apply_deltas @@ -13,7 +14,15 @@ from libqfieldsync.offline_converter import ExportType, OfflineConverter from libqfieldsync.project import ProjectConfiguration from libqfieldsync.utils.file_utils import get_project_in_folder -from qfieldcloud.qgis.utils import Step, StepOutput, WorkDirPath, Workflow, start_app +from qfieldcloud.qgis.utils import ( + Step, + StepOutput, + WorkDirPath, + Workflow, + get_layers_data, + layers_data_to_string, + start_app, +) from qgis.core import ( QgsCoordinateTransform, QgsOfflineEditing, @@ -165,7 +174,7 @@ def _upload_project_directory( bucket.upload_file(str(elem), key, ExtraArgs={"Metadata": metadata}) -def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> None: +def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> str: """Call the function of QFieldSync to package a project for QField""" start_app() @@ -252,6 +261,23 @@ def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> None if Path(packaged_project_filename).stat().st_size == 0: raise Exception("The packaged QGIS project file is empty.") + return packaged_project_filename + + +def _extract_layer_data(project_filename: Union[str, Path]) -> Dict: + start_app() + + project_filename = str(project_filename) + project = QgsProject.instance() + project.read(project_filename) + layers_by_id = get_layers_data(project) + + logging.info( + f"QGIS project layer checks\n{layers_data_to_string(layers_by_id)}", + ) + + return layers_by_id + def cmd_package_project(args): workflow = Workflow( @@ -270,6 +296,16 @@ def cmd_package_project(args): method=_download_project_directory, return_names=["tmp_project_dir"], ), + Step( + id="qgis_layers_data", + name="QGIS Layers Data", + arguments={ + "project_filename": WorkDirPath("files", args.project_file), + }, + method=_extract_layer_data, + return_names=["layers_by_id"], + outputs=["layers_by_id"], + ), Step( id="package_project", name="Package Project", @@ -278,6 +314,19 @@ def cmd_package_project(args): "package_dir": WorkDirPath("export", mkdir=True), }, method=_call_qfieldsync_packager, + return_names=["qfield_project_filename"], + ), + Step( + id="qfield_layer_data", + name="Packaged Layers Data", + arguments={ + "project_filename": StepOutput( + "package_project", "qfield_project_filename" + ), + }, + method=_extract_layer_data, + return_names=["layers_by_id"], + outputs=["layers_by_id"], ), Step( id="upload_packaged_project", From c576e63c21ba052a50e5ee4240a511408be25c24 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 17 Feb 2022 21:20:58 +0200 Subject: [PATCH 155/185] Restore get_layers --- docker-app/qfieldcloud/core/serializers.py | 15 ++++++++++++++- .../qfieldcloud/core/views/package_views.py | 11 +++++++++++ .../qfieldcloud/core/views/qfield_files_views.py | 16 +++++++++++++++- 3 files changed, 40 insertions(+), 2 deletions(-) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index 59c298747..9f6968a3a 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -287,7 +287,20 @@ def get_initial(self): return super().get_initial() def get_layers(self, obj): - return {} + if not obj.feedback: + return None + + if obj.status != Job.Status.FINISHED: + return None + + if obj.feedback.get("feedback_version") == "2.0": + return obj.feedback["outputs"]["qgis_layers_data"]["layers_by_id"] + else: + steps = obj.feedback.get("steps", []) + if len(steps) > 2 and steps[1].get("stage", 1) == 2: + return steps[1]["outputs"]["layer_checks"] + + return None def get_status(self, obj): if obj.status == Job.Status.PENDING: diff --git a/docker-app/qfieldcloud/core/views/package_views.py b/docker-app/qfieldcloud/core/views/package_views.py index 85b7ea48f..805023672 100644 --- a/docker-app/qfieldcloud/core/views/package_views.py +++ b/docker-app/qfieldcloud/core/views/package_views.py @@ -52,9 +52,20 @@ def get(self, request, project_id): if not files: raise exceptions.InvalidJobError("Empty project package.") + if last_job.feedback.get("feedback_version") == "2.0": + layers = last_job.feedback["outputs"]["qgis_layers_data"]["layers_by_id"] + else: + steps = last_job.feedback.get("steps", []) + layers = ( + steps[1]["outputs"]["layer_checks"] + if len(steps) > 2 and steps[1].get("stage", 1) == 2 + else None + ) + return Response( { "files": files, + "layers": layers, "status": last_job.status, "package_id": last_job.pk, "packaged_at": last_job.project.data_last_packaged_at, diff --git a/docker-app/qfieldcloud/core/views/qfield_files_views.py b/docker-app/qfieldcloud/core/views/qfield_files_views.py index 34aac09c6..bd60611a3 100644 --- a/docker-app/qfieldcloud/core/views/qfield_files_views.py +++ b/docker-app/qfieldcloud/core/views/qfield_files_views.py @@ -151,10 +151,24 @@ def get(self, request, projectid): } ) + if export_job.feedback.get("feedback_version") == "2.0": + layers = export_job.feedback["outputs"]["qgis_layers_data"]["layers_by_id"] + + for data in layers.values(): + data["valid"] = data["is_valid"] + data["status"] = data["error_code"] + else: + steps = export_job.feedback.get("steps", []) + layers = ( + steps[1]["outputs"]["layer_checks"] + if len(steps) > 2 and steps[1].get("stage", 1) == 2 + else None + ) + return Response( { "files": files, - "layers": {}, + "layers": layers, "exported_at": export_job.updated_at, "export_id": export_job.pk, } From 63c9a34f70f614b2be90b7619513b310952cc22f Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 18 Feb 2022 10:23:56 +0200 Subject: [PATCH 156/185] Fix tests: renamed keys, missing ping and QGIS cleanup (.gpkg-wal) --- .../qfieldcloud/core/tests/test_packages.py | 4 +-- docker-qgis/Dockerfile | 1 + docker-qgis/entrypoint.py | 2 ++ docker-qgis/utils.py | 30 +++++++++++-------- 4 files changed, 22 insertions(+), 15 deletions(-) diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py index ec408268e..31814a6d7 100644 --- a/docker-app/qfieldcloud/core/tests/test_packages.py +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -183,9 +183,9 @@ def upload_files_and_check_package( layer_data = package_payload["layers"][layer_id] if layer_id in invalid_layers: - self.assertFalse(layer_data["valid"], layer_id) + self.assertFalse(layer_data["is_valid"], layer_id) else: - self.assertTrue(layer_data["valid"], layer_id) + self.assertTrue(layer_data["is_valid"], layer_id) return elif payload["status"] == Job.Status.FAILED: diff --git a/docker-qgis/Dockerfile b/docker-qgis/Dockerfile index 25eca7d22..104a914f0 100644 --- a/docker-qgis/Dockerfile +++ b/docker-qgis/Dockerfile @@ -4,6 +4,7 @@ RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install -y \ python3-pip \ xvfb \ + iputils-ping \ && apt-get clean COPY ./requirements.txt /tmp/ diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index c4bfac1c4..bd764c235 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -22,6 +22,7 @@ get_layers_data, layers_data_to_string, start_app, + stop_app, ) from qgis.core import ( QgsCoordinateTransform, @@ -126,6 +127,7 @@ def _upload_project_directory( project_id: str, local_dir: Path, should_delete: bool = False ) -> None: """Upload the files in the local_dir to the storage""" + stop_app() bucket = _get_s3_bucket() # either "files" or "package" diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index ef6058174..2954a8477 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -142,8 +142,9 @@ def stop_app(): """ global QGISAPP - QGISAPP.exitQgis() - del QGISAPP + if QGISAPP is not None: + QGISAPP.exitQgis() + del QGISAPP class WorkflowValidationException(Exception): @@ -284,17 +285,20 @@ def is_localhost(hostname: str, port: int = None) -> bool: """returns True if the hostname points to the localhost, otherwise False.""" if port is None: port = 22 # no port specified, lets just use the ssh port - hostname = socket.getfqdn(hostname) - if hostname in ("localhost", "0.0.0.0"): - return True - localhost = socket.gethostname() - localaddrs = socket.getaddrinfo(localhost, port) - targetaddrs = socket.getaddrinfo(hostname, port) - for (_family, _socktype, _proto, _canonname, sockaddr) in localaddrs: - for (_rfamily, _rsocktype, _rproto, _rcanonname, rsockaddr) in targetaddrs: - if rsockaddr[0] == sockaddr[0]: - return True - return False + try: + hostname = socket.getfqdn(hostname) + if hostname in ("localhost", "0.0.0.0"): + return True + localhost = socket.gethostname() + localaddrs = socket.getaddrinfo(localhost, port) + targetaddrs = socket.getaddrinfo(hostname, port) + for (_family, _socktype, _proto, _canonname, sockaddr) in localaddrs: + for (_rfamily, _rsocktype, _rproto, _rcanonname, rsockaddr) in targetaddrs: + if rsockaddr[0] == sockaddr[0]: + return True + return False + except Exception: + return False def has_ping(hostname: str) -> bool: From a7ec38868354ce18f1e5f88265128215188629f2 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 18 Feb 2022 10:43:22 +0200 Subject: [PATCH 157/185] Extra feedback why a job has failed --- docker-app/qfieldcloud/core/tests/test_packages.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docker-app/qfieldcloud/core/tests/test_packages.py b/docker-app/qfieldcloud/core/tests/test_packages.py index 31814a6d7..509cbe0db 100644 --- a/docker-app/qfieldcloud/core/tests/test_packages.py +++ b/docker-app/qfieldcloud/core/tests/test_packages.py @@ -1,3 +1,4 @@ +import json import logging import os import tempfile @@ -189,6 +190,12 @@ def upload_files_and_check_package( return elif payload["status"] == Job.Status.FAILED: + print( + "Job feedback:", + json.dumps( + Job.objects.get(id=job_id).feedback, sort_keys=True, indent=2 + ), + ) self.fail("Worker failed with error") self.fail("Worker didn't finish") From a4d16017174e9b8b9e78edcd7f41961f6f690065 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 22 Feb 2022 22:51:50 +0200 Subject: [PATCH 158/185] More verbose logs --- docker-app/qfieldcloud/core/rest_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/rest_utils.py b/docker-app/qfieldcloud/core/rest_utils.py index 3c1b1d8d7..d6aceb9a7 100644 --- a/docker-app/qfieldcloud/core/rest_utils.py +++ b/docker-app/qfieldcloud/core/rest_utils.py @@ -47,7 +47,7 @@ def exception_handler(exc, context): "detail": exc.detail, } - logging.info(exc) + logging.exception(exc) return Response( body, From 4484f0b3c16ce33040b286a52dd39477d2ce2ff4 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 22 Feb 2022 22:52:17 +0200 Subject: [PATCH 159/185] Safely assume there are online vector layers if we don't know for sure --- docker-app/qfieldcloud/core/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index b21ae75b4..dd106a842 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -949,8 +949,9 @@ def users(self): @property def has_online_vector_data(self) -> bool: + # it's safer to assume there is an online vector layer if not self.project_details: - return False + return True layers_by_id = self.project_details.get("layers_by_id", {}) has_online_vector_layers = False From 96fecc0a2175abe7e0b95b4c15b339db98d79759 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 10:29:08 +0200 Subject: [PATCH 160/185] Configure the worker timeout using an environment variable --- .env.example | 3 +++ docker-app/qfieldcloud/settings.py | 2 ++ docker-app/worker_wrapper/wrapper.py | 3 ++- docker-compose.yml | 1 + 4 files changed, 8 insertions(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 014fb9d04..b3c946538 100644 --- a/.env.example +++ b/.env.example @@ -74,6 +74,9 @@ COMPOSE_PROJECT_NAME=qfieldcloud QFIELDCLOUD_DEFAULT_NETWORK=qfieldcloud_default QFIELDCLOUD_ADMIN_URI=admin/ +# Timeout in seconds to wait for a job container to finish, otherwise terminate it. +QFIELDCLOUD_WORKER_TIMEOUT_S=60 + GUNICORN_TIMEOUT_S=300 GUNICORN_MAX_REQUESTS=300 GUNICORN_WORKERS=3 diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index a85bbfb29..f6bc9dcbf 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -310,3 +310,5 @@ QFIELDCLOUD_TOKEN_SERIALIZER = "qfieldcloud.core.serializers.TokenSerializer" QFIELDCLOUD_USER_SERIALIZER = "qfieldcloud.core.serializers.CompleteUserSerializer" + +WORKER_TIMEOUT_S = int(os.environ.get("QFIELDCLOUD_WORKER_TIMEOUT_S", 60)) diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index 51cb156ce..f881d08d0 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -11,6 +11,7 @@ import docker import qfieldcloud.core.utils2.storage import requests +from django.conf import settings from django.db import transaction from django.forms.models import model_to_dict from django.utils import timezone @@ -40,7 +41,7 @@ class QgisException(Exception): class JobRun: - container_timeout_secs = 10 * 60 + container_timeout_secs = settings.WORKER_TIMEOUT_S job_class = Job command = [] diff --git a/docker-compose.yml b/docker-compose.yml index 9c9a7601b..172a5c6e7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -66,6 +66,7 @@ services: QFIELDCLOUD_ADMIN_URI: ${QFIELDCLOUD_ADMIN_URI} WEB_HTTP_PORT: ${WEB_HTTP_PORT} WEB_HTTPS_PORT: ${WEB_HTTPS_PORT} + QFIELDCLOUD_WORKER_TIMEOUT_S: ${QFIELDCLOUD_WORKER_TIMEOUT_S} TRANSFORMATION_GRIDS_VOLUME_NAME: ${COMPOSE_PROJECT_NAME}_transformation_grids depends_on: - redis From 49c4157c4a1d15d887914a6d16a784cee9a2a985 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 10:31:27 +0200 Subject: [PATCH 161/185] Add cron to remove terminated workers without final status --- docker-app/qfieldcloud/core/cron.py | 38 +++++++++++++++++++++++++++++ docker-app/qfieldcloud/settings.py | 1 + 2 files changed, 39 insertions(+) diff --git a/docker-app/qfieldcloud/core/cron.py b/docker-app/qfieldcloud/core/cron.py index 9d63ec854..3148c52a4 100644 --- a/docker-app/qfieldcloud/core/cron.py +++ b/docker-app/qfieldcloud/core/cron.py @@ -1,12 +1,20 @@ import logging +import os +import docker +from django.utils import timezone from django_cron import CronJobBase, Schedule from invitations.utils import get_invitation_model +from ..core.models import Job from .invitations_utils import send_invitation logger = logging.getLogger(__name__) +QGIS_CONTAINER_NAME = os.environ.get("QGIS_CONTAINER_NAME", None) + +assert QGIS_CONTAINER_NAME + class DeleteExpiredInvitationsJob(CronJobBase): schedule = Schedule(run_every_mins=60) @@ -35,3 +43,33 @@ def do(self): logger.info( f'Resend {len(invitation_emails)} previously failed invitation(s) to: {", ".join(invitation_emails)}' ) + + +class SetTerminatedWorkersToFinalStatusJob(CronJobBase): + # arbitrary number 3 here, it just feel a good number since the configuration is 10 mins + schedule = Schedule(run_every_mins=3) + code = "qfieldcloud.set_terminated_workers_to_final_status" + + def do(self): + + client = docker.from_env() + qgis_containers = client.containers.list( + sparse=True, filters={"ancestor": QGIS_CONTAINER_NAME} + ) + qgis_container_ids = [c.id for c in qgis_containers] + + jobs = Job.objects.filter( + status__in=[Job.Status.QUEUED, Job.Status.STARTED], + ).exclude(container_id__in=qgis_container_ids) + + jobs.update( + status=Job.Status.FAILED, + finished_at=timezone.now(), + feedback={ + "error_stack": "", + "error": "Worker unexpectedly terminated.", + "error_origin": "worker_wrapper", + "container_exit_code": -2, + }, + output="Job unexpectedly terminated.", + ) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index f6bc9dcbf..b627773ba 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -99,6 +99,7 @@ "qfieldcloud.notifs.cron.SendNotificationsJob", # "qfieldcloud.core.cron.DeleteExpiredInvitationsJob", "qfieldcloud.core.cron.ResendFailedInvitationsJob", + "qfieldcloud.core.cron.SetTerminatedWorkersToFinalStatusJob", ] ROOT_URLCONF = "qfieldcloud.urls" From 67122643764c46666dd057131a1ef9956ca3dff4 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 13:38:33 +0200 Subject: [PATCH 162/185] Send logs to sentry --- docker-app/qfieldcloud/core/cron.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/cron.py b/docker-app/qfieldcloud/core/cron.py index 3148c52a4..d5690753d 100644 --- a/docker-app/qfieldcloud/core/cron.py +++ b/docker-app/qfieldcloud/core/cron.py @@ -5,6 +5,7 @@ from django.utils import timezone from django_cron import CronJobBase, Schedule from invitations.utils import get_invitation_model +from sentry_sdk import capture_message from ..core.models import Job from .invitations_utils import send_invitation @@ -62,12 +63,17 @@ def do(self): status__in=[Job.Status.QUEUED, Job.Status.STARTED], ).exclude(container_id__in=qgis_container_ids) + for job in jobs: + capture_message( + f'Job "{job.id}" was with status "{job.status}", but worker container no longer exists. Job unexpectedly terminated.' + ) + jobs.update( status=Job.Status.FAILED, finished_at=timezone.now(), feedback={ "error_stack": "", - "error": "Worker unexpectedly terminated.", + "error": "Job unexpectedly terminated.", "error_origin": "worker_wrapper", "container_exit_code": -2, }, From ec502067dc2d7d6799d4cca0dad49aa130e38261 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 15:44:51 +0200 Subject: [PATCH 163/185] Make the expired jobs cleanup when the worker containers are running on other hosts --- docker-app/qfieldcloud/core/cron.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/docker-app/qfieldcloud/core/cron.py b/docker-app/qfieldcloud/core/cron.py index d5690753d..26e598c55 100644 --- a/docker-app/qfieldcloud/core/cron.py +++ b/docker-app/qfieldcloud/core/cron.py @@ -1,7 +1,8 @@ import logging import os +from datetime import timedelta -import docker +from django.conf import settings from django.utils import timezone from django_cron import CronJobBase, Schedule from invitations.utils import get_invitation_model @@ -52,16 +53,12 @@ class SetTerminatedWorkersToFinalStatusJob(CronJobBase): code = "qfieldcloud.set_terminated_workers_to_final_status" def do(self): - - client = docker.from_env() - qgis_containers = client.containers.list( - sparse=True, filters={"ancestor": QGIS_CONTAINER_NAME} - ) - qgis_container_ids = [c.id for c in qgis_containers] - jobs = Job.objects.filter( status__in=[Job.Status.QUEUED, Job.Status.STARTED], - ).exclude(container_id__in=qgis_container_ids) + # add extra seconds just to make sure a properly finished job properly updated the status. + started_at__lt=timezone.now() + - timedelta(seconds=settings.WORKER_TIMEOUT_S + 10), + ) for job in jobs: capture_message( From 42215b145c45fce06ce2788e8133162244acbd89 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 16:08:30 +0200 Subject: [PATCH 164/185] Added a log message with the container ID for easier debug --- docker-app/worker_wrapper/wrapper.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-app/worker_wrapper/wrapper.py b/docker-app/worker_wrapper/wrapper.py index f881d08d0..25768f3d5 100644 --- a/docker-app/worker_wrapper/wrapper.py +++ b/docker-app/worker_wrapper/wrapper.py @@ -227,6 +227,8 @@ def _run_docker( detach=True, ) + logger.info(f"Starting worker {container.id} ...") + response = {"StatusCode": TIMEOUT_ERROR_EXIT_CODE} try: From b3a7d9131c618c017207f8aa139f15b22081dbee Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 17:13:12 +0200 Subject: [PATCH 165/185] Remove no longer used minio endpoint --- conf/nginx/templates/default.conf.template | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index cd5c8be12..e014adc1b 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -54,23 +54,6 @@ server { proxy_pass http://django; } - location /minio/ { - proxy_buffering off; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Host $http_host; - - proxy_connect_timeout 300; - # Default is HTTP/1, keepalive is only enabled in HTTP/1.1 - proxy_http_version 1.1; - proxy_set_header Connection ""; - chunked_transfer_encoding off; - - set $target http://minio1:9000/; - proxy_pass $target; - } - location /storage-download/ { # Only allow internal redirects internal; From 4f2c3dd82a773e03c242d0bbbefb40dcaff832d1 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 17:15:36 +0200 Subject: [PATCH 166/185] Bump to latest libqfieldsync --- docker-qgis/libqfieldsync | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-qgis/libqfieldsync b/docker-qgis/libqfieldsync index e45bf4a94..7389735b8 160000 --- a/docker-qgis/libqfieldsync +++ b/docker-qgis/libqfieldsync @@ -1 +1 @@ -Subproject commit e45bf4a94d70fd03cebc5f2d10faaac45d35969b +Subproject commit 7389735b8f270633c0cc5537eeae41a5950e650b From f33c60a1b6b655517c0b1d4bc2122561e93d4a8f Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 19:31:12 +0200 Subject: [PATCH 167/185] No need to have an invitation to register --- docker-app/qfieldcloud/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/settings.py b/docker-app/qfieldcloud/settings.py index b627773ba..c8f3d0890 100644 --- a/docker-app/qfieldcloud/settings.py +++ b/docker-app/qfieldcloud/settings.py @@ -276,7 +276,7 @@ # Django invitations configurations # https://github.com/bee-keeper/django-invitations#additional-configuration INVITATIONS_INVITATION_EXPIRY = 365 # integer in days, 0 disables invitations -INVITATIONS_INVITATION_ONLY = True +INVITATIONS_INVITATION_ONLY = False INVITATIONS_ACCEPT_INVITE_AFTER_SIGNUP = True INVITATIONS_GONE_ON_ACCEPT_ERROR = False From b27d1a52ec8c4470ba75b58a84c91d66d50dd078 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 22:57:30 +0200 Subject: [PATCH 168/185] Make sure the stop_app() when exiting python does not fail --- docker-qgis/utils.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index 2954a8477..1218add25 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -129,6 +129,7 @@ def start_app(): QtCore.qInstallMessageHandler(_qt_message_handler) QgsApplication.messageLog().messageReceived.connect(_write_log_message) + # make sure the app is closed, otherwise the container exists with non-zero @atexit.register def exitQgis(): stop_app() @@ -142,6 +143,10 @@ def stop_app(): """ global QGISAPP + # note that if this function is called from @atexit.register, the globals are cleaned up + if "QGISAPP" not in globals(): + return + if QGISAPP is not None: QGISAPP.exitQgis() del QGISAPP From d482423d44e296822324fb818d1108ad35f6cd63 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 23 Feb 2022 22:58:06 +0200 Subject: [PATCH 169/185] Improve logging when files are uploaded --- docker-qgis/entrypoint.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index bd764c235..8af7f96fe 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -135,9 +135,13 @@ def _upload_project_directory( prefix = "/".join(["projects", project_id, subdir]) if should_delete: + logging.info("Deleting older file versions...") + # Remove existing package directory on the storage bucket.objects.filter(Prefix=prefix).delete() + uploaded_files_count = 0 + # Loop recursively in the local package directory for elem in Path(local_dir).rglob("*.*"): # Don't upload .qgs~ and .qgz~ files @@ -170,11 +174,17 @@ def _upload_project_directory( # Check if the file is different on the storage # TODO switch to etag/md5sum comparison if metadata["sha256sum"] != storage_metadata["sha256sum"]: + uploaded_files_count += 1 logging.info( f'Uploading file "{key}", size: {elem.stat().st_size} bytes, md5sum: {md5sum}, sha256sum: "{sha256sum}" ' ) bucket.upload_file(str(elem), key, ExtraArgs={"Metadata": metadata}) + if uploaded_files_count == 0: + logging.info("No files need to be uploaded.") + else: + logging.info(f"{uploaded_files_count} file(s) uploaded.") + def _call_qfieldsync_packager(project_filename: Path, package_dir: Path) -> str: """Call the function of QFieldSync to package a project for QField""" From e5af6dd4316ad7187be2771376aa7e63b724f8a0 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 24 Feb 2022 00:51:48 +0200 Subject: [PATCH 170/185] Fix packaging if layers_by_id is missing --- docker-app/qfieldcloud/core/models.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index dd106a842..94c0e65be 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -953,7 +953,12 @@ def has_online_vector_data(self) -> bool: if not self.project_details: return True - layers_by_id = self.project_details.get("layers_by_id", {}) + layers_by_id = self.project_details.get("layers_by_id") + + # it's safer to assume there is an online vector layer + if layers_by_id is None: + return True + has_online_vector_layers = False for layer_data in layers_by_id.values(): From 3ed20a46be0da5d1d6122d04d861cb43328420d9 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 24 Feb 2022 02:54:47 +0200 Subject: [PATCH 171/185] Print QGIS version withing the worker --- docker-qgis/utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index 1218add25..081c9e286 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -112,7 +112,9 @@ def start_app(): global QGISAPP if QGISAPP is None: - qgs_stderr_logger.info("Starting QGIS app...") + qgs_stderr_logger.info( + f"Starting QGIS app version {Qgis.versionInt()} ({Qgis.devVersion()})..." + ) argvb = [] # Note: QGIS_PREFIX_PATH is evaluated in QgsApplication - @@ -148,6 +150,7 @@ def stop_app(): return if QGISAPP is not None: + qgs_stderr_logger.info("Stopping QGIS app...") QGISAPP.exitQgis() del QGISAPP From 677a160f165e4de851eab24ff075c329e0434eb2 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 24 Feb 2022 02:55:08 +0200 Subject: [PATCH 172/185] Better QGIS_CUSTOM_CONFIG_PATH dirname --- docker-qgis/utils.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docker-qgis/utils.py b/docker-qgis/utils.py index 081c9e286..57c54da03 100644 --- a/docker-qgis/utils.py +++ b/docker-qgis/utils.py @@ -123,9 +123,7 @@ def start_app(): QGISAPP = QgsApplication(argvb, gui_flag) QtCore.qInstallMessageHandler(_qt_message_handler) - os.environ["QGIS_CUSTOM_CONFIG_PATH"] = tempfile.mkdtemp( - "", "QGIS-PythonTestConfigPath" - ) + os.environ["QGIS_CUSTOM_CONFIG_PATH"] = tempfile.mkdtemp("", "QGIS_CONFIG") QGISAPP.initQgis() QtCore.qInstallMessageHandler(_qt_message_handler) From 412dced11569ec02dd995f1f7466f1168c11a56a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 24 Feb 2022 02:55:26 +0200 Subject: [PATCH 173/185] Remove double quoting around md5sum --- docker-qgis/entrypoint.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-qgis/entrypoint.py b/docker-qgis/entrypoint.py index 8af7f96fe..24ed8d671 100755 --- a/docker-qgis/entrypoint.py +++ b/docker-qgis/entrypoint.py @@ -114,8 +114,9 @@ def _download_project_directory(project_id: str, download_dir: Path = None) -> P absolute_filename = download_dir.joinpath(relative_filename) absolute_filename.parent.mkdir(parents=True, exist_ok=True) + # NOTE the E_TAG already is surrounded by double quotes logging.info( - f'Downloading file "{obj.key}", size: {obj.size} bytes, md5sum: "{obj.e_tag}" ' + f'Downloading file "{obj.key}", size: {obj.size} bytes, md5sum: {obj.e_tag} ' ) bucket.download_file(obj.key, str(absolute_filename)) From 80a3a4bb67d8fe43bb0958aadfcb7eca03face71 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 24 Feb 2022 02:59:05 +0200 Subject: [PATCH 174/185] Set XDG_RUNTIME_DIR to prevent failures While neither /run/user/0 nor /var/run/user/0 really exist, this prevents certain types of failure, such as: - core dumped, as it happens when it is set to a custom dir within the tmpdir with 700 perms - error from the dev server: `WARNING:QGIS_STDERR:QStandardPaths: wrong ownership on runtime directory /tmp/runtime-root, -2 instead of 0` which causes the internet providers to fail miserably --- docker-qgis/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-qgis/Dockerfile b/docker-qgis/Dockerfile index 104a914f0..dbeb24f4b 100644 --- a/docker-qgis/Dockerfile +++ b/docker-qgis/Dockerfile @@ -28,4 +28,6 @@ ENV LIBC_FATAL_STDERR_=1 ENV LANG=C.UTF-8 ENV PYTHONPATH="/usr/src/app/lib:${PYTHONPATH}" +ENV XDG_RUNTIME_DIR="/run/user/0" + ENTRYPOINT ["/bin/sh", "-c", "/usr/bin/xvfb-run -a \"$@\"", ""] From ecffe8dd15e61878a806f77d8b0a433163183ec1 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 25 Feb 2022 01:34:23 +0200 Subject: [PATCH 175/185] No need to check for authentication since the files should be public anyways, otherwise 403 from s3 --- docker-app/qfieldcloud/core/views/files_views.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docker-app/qfieldcloud/core/views/files_views.py b/docker-app/qfieldcloud/core/views/files_views.py index 43ab762d2..efe22498e 100644 --- a/docker-app/qfieldcloud/core/views/files_views.py +++ b/docker-app/qfieldcloud/core/views/files_views.py @@ -221,9 +221,7 @@ def get(self, request, projectid, filename): class PublicFilesView(views.APIView): parser_classes = [MultiPartParser] - permission_classes = [ - permissions.IsAuthenticated, - ] + permission_classes = [] def get(self, request, filename): return utils2.storage.file_response(request, filename) From acdbb4364dd6ce1aeee0f361ae2094cf07e76f58 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Sat, 26 Feb 2022 04:11:14 +0200 Subject: [PATCH 176/185] Make sure the files can be served with X-Accel-Redirect on prod --- docker-app/qfieldcloud/core/utils2/storage.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/docker-app/qfieldcloud/core/utils2/storage.py b/docker-app/qfieldcloud/core/utils2/storage.py index 05f37211f..aeeb28d45 100644 --- a/docker-app/qfieldcloud/core/utils2/storage.py +++ b/docker-app/qfieldcloud/core/utils2/storage.py @@ -1,6 +1,7 @@ from __future__ import annotations import logging +import os from pathlib import PurePath from typing import IO, List @@ -13,6 +14,9 @@ logger = logging.getLogger(__name__) +QFIELDCLOUD_HOST = os.environ.get("QFIELDCLOUD_HOST", None) +WEB_HTTPS_PORT = os.environ.get("WEB_HTTPS_PORT", None) + def staticfile_prefix(project: "Project", filename: str) -> str: # noqa: F821 """Returns the staticfile dir where the file belongs to or empty string if it does not. @@ -47,9 +51,10 @@ def file_response( extra_params["VersionId"] = version # check if we are in NGINX proxy - if request.META.get("HTTP_HOST", "").split(":")[-1] == request.META.get( - "WEB_HTTPS_PORT" - ): + http_host = request.META.get("HTTP_HOST", "") + https_port = http_host.split(":")[-1] if ":" in http_host else "443" + + if https_port == WEB_HTTPS_PORT and not settings.IN_TEST_SUITE: if presigned: if as_attachment: extra_params["ResponseContentType"] = "application/force-download" From c5e29730f857c0cc23e4ace61cda19fe69006e2c Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 25 Feb 2022 01:39:09 +0200 Subject: [PATCH 177/185] Return absolute avatar URL --- docker-app/qfieldcloud/core/serializers.py | 24 ++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index 9f6968a3a..f9aaa3ca8 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -1,4 +1,8 @@ +import os +from typing import Optional + from django.contrib.auth import get_user_model +from django.contrib.sites.models import Site from qfieldcloud.authentication.models import AuthToken from qfieldcloud.core import exceptions from qfieldcloud.core.models import ( @@ -19,6 +23,14 @@ User = get_user_model() +def get_avatar_url(user: User) -> Optional[str]: + if hasattr(user, "useraccount") and user.useraccount.avatar_url: + site = Site.objects.get_current() + port = os.environ.get("WEB_HTTP_PORT") + return f"http://{site.domain}:{port}{user.useraccount.avatar_url}" + return None + + class UserSerializer: class Meta: model = User @@ -80,7 +92,7 @@ class CompleteUserSerializer(serializers.ModelSerializer): avatar_url = serializers.SerializerMethodField() def get_avatar_url(self, obj): - return obj.useraccount.avatar_url if hasattr(obj, "useraccount") else None + return get_avatar_url(obj) class Meta: model = User @@ -101,7 +113,7 @@ class PublicInfoUserSerializer(serializers.ModelSerializer): username_display = serializers.SerializerMethodField() def get_avatar_url(self, obj): - return obj.useraccount.avatar_url if hasattr(obj, "useraccount") else None + return get_avatar_url(obj) def get_username_display(self, obj): if obj.user_type == obj.TYPE_TEAM: @@ -139,7 +151,7 @@ def get_members(self, obj): ] def get_avatar_url(self, obj): - return obj.useraccount.avatar_url if hasattr(obj, "useraccount") else None + return get_avatar_url(obj) class Meta: model = Organization @@ -189,11 +201,7 @@ def get_email(self, obj): return obj.user.email def get_avatar_url(self, obj): - return ( - obj.user.useraccount.avatar_url - if hasattr(obj.user, "useraccount") - else None - ) + return get_avatar_url(obj.user) class Meta: model = AuthToken From f4e075eb3cfe3f15f91bf497320004a3c13144bf Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 28 Feb 2022 21:49:21 +0200 Subject: [PATCH 178/185] Block requests by IP, only allow requests with proper host header --- conf/nginx/templates/default.conf.template | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index e014adc1b..4a13908c7 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -16,6 +16,11 @@ server { root /var/www/certbot; } + # prevent access by IP + if ($http_host != "${QFIELDCLOUD_HOST}") { + return 444; + } + # use $WEB_HTTPS_PORT in the configuration, since it might be different from 443 (e.g. localhost) location / { return 302 https://$host:${WEB_HTTPS_PORT}$request_uri; @@ -35,8 +40,14 @@ server { # path for static files (only needed for serving local staticfiles) root /var/www/html/; + # upgrade to https error_page 497 https://$host:${WEB_HTTPS_PORT}$request_uri; + # prevent access by IP + if ($http_host != "${QFIELDCLOUD_HOST}") { + return 444; + } + # checks for static file, if not found proxy to app location / { try_files $uri @proxy_to_app; From 9c9f753f99e974a5726b870e720955fa34dd46b5 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 28 Feb 2022 21:49:39 +0200 Subject: [PATCH 179/185] Do not save redis state, nobody needs persistent redis as of now --- docker-redis/redis.conf | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-redis/redis.conf b/docker-redis/redis.conf index 7a5b9e44c..6669be89e 100644 --- a/docker-redis/redis.conf +++ b/docker-redis/redis.conf @@ -307,6 +307,7 @@ always-show-logo yes save 900 1 save 300 10 save 60 10000 +save "" # By default Redis will stop accepting writes if RDB snapshots are enabled # (at least one save point) and the latest background save failed. @@ -321,7 +322,7 @@ save 60 10000 # and persistence, you may want to disable this feature so that Redis will # continue to work as usual even if there are problems with disk, # permissions, and so forth. -stop-writes-on-bgsave-error yes +stop-writes-on-bgsave-error no # Compress string objects using LZF when dump .rdb databases? # For default that's set to 'yes' as it's almost always a win. From 15d24702e7835cfe7cde37aa12db5b5b33e32133 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 28 Feb 2022 21:54:13 +0200 Subject: [PATCH 180/185] Fix first_name, last_name etc fields in token serializer --- docker-app/qfieldcloud/core/serializers.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index f9aaa3ca8..1731d68aa 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -187,12 +187,12 @@ class Meta: class TokenSerializer(serializers.ModelSerializer): - username = serializers.StringRelatedField(source="user") + username = serializers.CharField(source="user.username") expires_at = serializers.DateTimeField() - user_type = serializers.StringRelatedField(source="user") - first_name = serializers.StringRelatedField(source="user") - last_name = serializers.StringRelatedField(source="user") - full_name = serializers.StringRelatedField(source="user") + user_type = serializers.CharField(source="user.user_type") + first_name = serializers.CharField(source="user.first_name") + last_name = serializers.CharField(source="user.last_name") + full_name = serializers.CharField(source="user.full_name") token = serializers.CharField(source="key") email = serializers.SerializerMethodField() avatar_url = serializers.SerializerMethodField() From efe8fe28838529a3b411479101308f9d8fb5da20 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 28 Feb 2022 21:55:46 +0200 Subject: [PATCH 181/185] Fix full_name having and whitespace character if either first and last name are not set --- docker-app/qfieldcloud/core/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-app/qfieldcloud/core/models.py b/docker-app/qfieldcloud/core/models.py index 94c0e65be..d959130a7 100644 --- a/docker-app/qfieldcloud/core/models.py +++ b/docker-app/qfieldcloud/core/models.py @@ -342,7 +342,7 @@ def is_team(self): @property def full_name(self) -> str: - return f"{self.first_name} {self.last_name}" + return f"{self.first_name} {self.last_name}".strip() @property def username_with_full_name(self) -> str: From bf611c1b0f950ce4ce88618eb0ba149dc677f23e Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 1 Mar 2022 01:05:43 +0200 Subject: [PATCH 182/185] Added tests for the auth app --- .../qfieldcloud/authentication/models.py | 11 +- .../authentication/tests/__init__.py | 0 .../tests/test_authentication.py | 204 ++++++++++++++++++ .../qfieldcloud/authentication/views.py | 2 +- 4 files changed, 213 insertions(+), 4 deletions(-) create mode 100644 docker-app/qfieldcloud/authentication/tests/__init__.py create mode 100644 docker-app/qfieldcloud/authentication/tests/test_authentication.py diff --git a/docker-app/qfieldcloud/authentication/models.py b/docker-app/qfieldcloud/authentication/models.py index 71ebd31d7..8fceb6980 100644 --- a/docker-app/qfieldcloud/authentication/models.py +++ b/docker-app/qfieldcloud/authentication/models.py @@ -40,17 +40,22 @@ def guess_client_type(user_agent: str) -> ClientType: if user_agent.startswith("cli|"): return AuthToken.ClientType.CLI - if re.search(r" QGIS/[34]\d{4}$", user_agent): + if re.search(r" QGIS/[34]\d{4}(\/.*)?$", user_agent): return AuthToken.ClientType.QFIELDSYNC if re.search( - r"Mozilla/5.0 .+ AppleWebKit/\d+.\d+ (KHTML, like Gecko)", user_agent + r"Mozilla\/5.0 .+(AppleWebKit\/\d+.\d+ \(KHTML, like Gecko\)|Firefox\/[\d\.]+)", + user_agent, ): return AuthToken.ClientType.BROWSER return AuthToken.ClientType.UNKNOWN - single_token_clients = [ClientType.QFIELD, ClientType.QFIELDSYNC] + single_token_clients = [ + ClientType.QFIELD, + ClientType.QFIELDSYNC, + ClientType.UNKNOWN, + ] user = models.ForeignKey( get_user_model(), diff --git a/docker-app/qfieldcloud/authentication/tests/__init__.py b/docker-app/qfieldcloud/authentication/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/docker-app/qfieldcloud/authentication/tests/test_authentication.py b/docker-app/qfieldcloud/authentication/tests/test_authentication.py new file mode 100644 index 000000000..a916133c5 --- /dev/null +++ b/docker-app/qfieldcloud/authentication/tests/test_authentication.py @@ -0,0 +1,204 @@ +import logging + +from django.utils.timezone import datetime, now +from qfieldcloud.authentication.models import AuthToken +from qfieldcloud.core.models import User +from rest_framework.test import APITransactionTestCase + +logging.disable(logging.CRITICAL) + + +class QfcTestCase(APITransactionTestCase): + def setUp(self): + # Create a user + self.user1 = User.objects.create_user(username="user1", password="abc123") + + def assertTokenMatch(self, token, payload): + expires_at = payload.pop("expires_at") + avatar_url = payload.pop("avatar_url") + self.assertDictEqual( + payload, + { + "token": token.key, + # "expires_at": tokens[0].expires_at.isoformat(), + "username": token.user.username, + "email": "", + "first_name": "", + "full_name": "", + "last_name": "", + "user_type": "1", + }, + ) + self.assertTrue(datetime.fromisoformat(expires_at) == token.expires_at) + self.assertTrue(datetime.fromisoformat(expires_at) > now()) + self.assertTrue(avatar_url is None or avatar_url.startswith("http")) + self.assertTrue( + avatar_url is None + or avatar_url.endswith( + f"/api/v1/files/public/users/{token.user.username}/avatar.svg" + ) + ) + + def login(self, username, password, user_agent=""): + response = self.client.post( + "/api/v1/auth/login/", + { + "username": username, + "password": password, + }, + HTTP_USER_AGENT=user_agent, + ) + + self.assertEqual(response.status_code, 200) + + return response + + def test_login_logout(self): + response = self.login("user1", "abc123") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 1) + self.assertTokenMatch(tokens[0], response.json()) + self.assertGreater(tokens[0].expires_at, now()) + + # set auth token + self.client.credentials(HTTP_AUTHORIZATION="Token " + tokens[0].key) + + # logout + response = self.client.post("/api/v1/auth/logout/") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEqual(response.status_code, 200) + + self.assertEquals(len(tokens), 1) + self.assertLess(tokens[0].expires_at, now()) + + def test_multiple_logins(self): + # first single active token login + response = self.login("user1", "abc123", "Mozilla/5.0 QGIS/32203") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 1) + self.assertTokenMatch(tokens[0], response.json()) + + # second single active token login + response = self.login("user1", "abc123", "Mozilla/5.0 QGIS/32203") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 2) + self.assertTokenMatch(tokens[0], response.json()) + self.assertNotEqual(tokens[0], tokens[1]) + self.assertGreater(tokens[0].expires_at, now()) + self.assertLess(tokens[1].expires_at, now()) + + # first single active token login + response = self.login("user1", "abc123", "sdk|py|dev python-requests|2.26.0") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 3) + self.assertTokenMatch(tokens[0], response.json()) + + # second single active token login + response = self.login("user1", "abc123", "sdk|py|dev python-requests|2.26.0") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 4) + self.assertTokenMatch(tokens[0], response.json()) + self.assertNotEqual(tokens[0], tokens[1]) + self.assertGreater(tokens[0].expires_at, now()) + self.assertGreater(tokens[1].expires_at, now()) + + def test_client_type(self): + # QFIELDSYNC login + response = self.login("user1", "abc123", "Mozilla/5.0 QGIS/32203") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.QFIELDSYNC) + + response = self.login( + "user1", "abc123", "Mozilla/5.0 QGIS/32400/Ubuntu 20.04.4 LTS" + ) + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.QFIELDSYNC) + + # SDK login + response = self.login("user1", "abc123", "sdk|py|dev python-requests|2.26.0") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.SDK) + + # BROWSER login + response = self.login( + "user1", + "abc123", + "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:97.0) Gecko/20100101 Firefox/97.0", + ) + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.BROWSER) + + response = self.login( + "user1", + "abc123", + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.105 Safari/537.36", + ) + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.BROWSER) + + # QFIELD login + response = self.login( + "user1", + "abc123", + "qfield|1.0.0|local - dev|3515ce8cba0f0e0abb92e06bf30a00531810656f| QGIS/31900", + ) + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.QFIELD) + + # UNKNOWN login + response = self.login("user1", "abc123", "Слава Україні!") + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertTokenMatch(tokens[0], response.json()) + self.assertEqual(tokens[0].client_type, AuthToken.ClientType.UNKNOWN) + + def test_last_used_at(self): + response = self.login("user1", "abc123") + + tokens = self.user1.auth_tokens.order_by("-created_at").all() + + self.assertEquals(len(tokens), 1) + self.assertTokenMatch(tokens[0], response.json()) + self.assertIsNone(tokens[0].last_used_at) + + # set auth token + self.client.credentials(HTTP_AUTHORIZATION="Token " + tokens[0].key) + + # first token usage + response = self.client.get(f"/api/v1/users/{self.user1.username}/") + + self.assertEqual(response.status_code, 200) + + tokens = self.user1.auth_tokens.order_by("-created_at").all() + first_used_at = tokens[0].last_used_at + + self.assertEquals(len(tokens), 1) + + # second token usage + response = self.client.get(f"/api/v1/users/{self.user1.username}/") + + self.assertEqual(response.status_code, 200) + + tokens = self.user1.auth_tokens.order_by("-created_at").all() + second_used_at = tokens[0].last_used_at + + self.assertEquals(len(tokens), 1) + self.assertLess(first_used_at, second_used_at) diff --git a/docker-app/qfieldcloud/authentication/views.py b/docker-app/qfieldcloud/authentication/views.py index 8993490bc..cc5baeff8 100644 --- a/docker-app/qfieldcloud/authentication/views.py +++ b/docker-app/qfieldcloud/authentication/views.py @@ -88,7 +88,7 @@ def post(self, request, *args, **kwargs): def logout(self, request): try: now = timezone.now() - request.user.auth_token.filter(expired_at__gt=now).update(expired_at=now) + request.user.auth_tokens.filter(expires_at__gt=now).update(expires_at=now) except (AttributeError, ObjectDoesNotExist): pass From 3b3a39a25e53f91a09865fdf2f58ada22f9ca1a5 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 1 Mar 2022 05:37:45 +0200 Subject: [PATCH 183/185] Yet another fix on the avatars --- docker-app/qfieldcloud/core/serializers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker-app/qfieldcloud/core/serializers.py b/docker-app/qfieldcloud/core/serializers.py index 1731d68aa..83788a925 100644 --- a/docker-app/qfieldcloud/core/serializers.py +++ b/docker-app/qfieldcloud/core/serializers.py @@ -26,8 +26,9 @@ def get_avatar_url(user: User) -> Optional[str]: if hasattr(user, "useraccount") and user.useraccount.avatar_url: site = Site.objects.get_current() - port = os.environ.get("WEB_HTTP_PORT") - return f"http://{site.domain}:{port}{user.useraccount.avatar_url}" + port = os.environ.get("WEB_HTTPS_PORT") + port = f":{port}" if port != "443" else "" + return f"https://{site.domain}{port}{user.useraccount.avatar_url}" return None From 903c660069807cf4638366011f7b3d220c6a0713 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 1 Mar 2022 06:50:26 +0200 Subject: [PATCH 184/185] Cleanup apply delta logs a bit --- docker-qgis/apply_deltas.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/docker-qgis/apply_deltas.py b/docker-qgis/apply_deltas.py index 4742542f9..aecdd2ff3 100755 --- a/docker-qgis/apply_deltas.py +++ b/docker-qgis/apply_deltas.py @@ -193,15 +193,12 @@ def delta_apply( start_app() project = QgsProject.instance() - logging.info(project_filename) - logging.info(delta_filename) + logging.info(f'Loading project file "{project_filename}"...') project.read(str(project_filename)) - logging.info(project.mapLayers()) + logging.info(f'Loading delta file "{delta_filename}"...') delta_file = delta_file_file_loader({"delta_file": delta_filename}) # type: ignore - logging.info(delta_file) - if not delta_file: raise Exception("Missing delta file") From 6fba262dfa5252a7746013c289cb032bd406fd37 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 1 Mar 2022 06:52:08 +0200 Subject: [PATCH 185/185] deny annoying bot --- conf/nginx/templates/default.conf.template | 3 +++ 1 file changed, 3 insertions(+) diff --git a/conf/nginx/templates/default.conf.template b/conf/nginx/templates/default.conf.template index 4a13908c7..a96f9c83f 100644 --- a/conf/nginx/templates/default.conf.template +++ b/conf/nginx/templates/default.conf.template @@ -48,6 +48,9 @@ server { return 444; } + # deny annoying bot + deny 34.215.13.216; + # checks for static file, if not found proxy to app location / { try_files $uri @proxy_to_app;