diff --git a/.circleci/config.yml b/.circleci/config.yml
deleted file mode 100644
index 43b67a443..000000000
--- a/.circleci/config.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-version: 2.1
-
-orbs:
- redhat-openshift: circleci/redhat-openshift@0.2.0
-
-jobs:
- build-in-Openshift:
- executor: redhat-openshift/default
- steps:
- - checkout
- - redhat-openshift/login-and-update-kubeconfig:
- insecure-skip-tls-verify: true
- openshift-platform-version: 3.x
- server-address: $OC_SERVER_ADDRESS
- token: $OC_TOKEN
- - run:
- name: test oc connection to path finder
- command: oc projects
- - run:
- name: build frontend
- command: |
- oc start-build envoy --wait=true -n tbiwaq-tools
- oc start-build frontend --wait=true -n tbiwaq-tools
- oc start-build python-backend --wait=true -n tbiwaq-tools
-workflows:
- version: 2
- build-deploy:
- jobs:
- - build-in-Openshift
\ No newline at end of file
diff --git a/.github/workflows/build-new-python-image.yaml b/.github/workflows/build-new-python-image.yaml
new file mode 100644
index 000000000..d2220811f
--- /dev/null
+++ b/.github/workflows/build-new-python-image.yaml
@@ -0,0 +1,37 @@
+## For each release, the value of workflow name, branches, PR_NUMBER and RELEASE_NAME need to be adjusted accordingly
+## Also change the .pipelin/lib/config.js version number
+name: new-python-image
+
+on:
+ push:
+ branches: [ new-python39-image-1.42.0 ]
+ workflow_dispatch:
+ workflow_call:
+
+env:
+ ## The pull request number of the Tracking pull request to merge the release branch to main
+ PR_NUMBER: 1102
+ RELEASE_NAME: new-python39-image-1.42.0
+
+jobs:
+
+ ## This is the CI job
+ build:
+
+ name: Build ZEVA on Openshift
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+
+ steps:
+
+ ## it will checkout to /home/runner/work/zeva/zeva
+ - name: Check out repository
+ uses: actions/checkout@v2
+
+ # open it when zeva updated the python packages
+ - name: Run django tests
+ uses: kuanfandevops/django-test-action@zeva-django-test
+ with:
+ settings-dir-path: "backend/zeva"
+ requirements-file: "backend/requirements.txt"
+ managepy-dir: backend
diff --git a/.github/workflows/build-release.yaml b/.github/workflows/build-release.yaml
index e27eee2f7..65ed875ce 100644
--- a/.github/workflows/build-release.yaml
+++ b/.github/workflows/build-release.yaml
@@ -1,17 +1,17 @@
## For each release, the value of workflow name, branches, PR_NUMBER and RELEASE_NAME need to be adjusted accordingly
## Also change the .pipelin/lib/config.js version number
-name: CI/CD ZEVA release-1.41.0
+name: ZEVA v1.42.0
on:
push:
- branches: [ release-1.41.0 ]
+ branches: [ release-1.42.0 ]
workflow_dispatch:
workflow_call:
env:
## The pull request number of the Tracking pull request to merge the release branch to main
- PR_NUMBER: 1072
- RELEASE_NAME: release-1.41.0
+ PR_NUMBER: 1091
+ RELEASE_NAME: release-1.42.0
jobs:
@@ -28,6 +28,14 @@ jobs:
- name: Check out repository
uses: actions/checkout@v2
+ # open it when zeva updated the python packages
+ #- name: Run django tests
+ # uses: kuanfandevops/django-test-action@zeva-django-test
+ # with:
+ # settings-dir-path: "backend/zeva"
+ # requirements-file: "backend/requirements.txt"
+ # managepy-dir: backend
+
## Log in to Openshift with a token of service account
- name: Log in to Openshift
uses: redhat-actions/oc-login@v1
@@ -44,12 +52,39 @@ jobs:
npm install
npm run build -- --pr=${{ env.PR_NUMBER }} --env=build
+ deploy-on-dev:
+
+ name: Deploy ZEVA on Dev Environment
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ needs: build
+
+ steps:
+
+ - name: Check out repository
+ uses: actions/checkout@v2
+
+ - name: Log in to Openshift
+ uses: redhat-actions/oc-login@v1
+ with:
+ openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }}
+ openshift_token: ${{ secrets.OPENSHIFT_TOKEN }}
+ insecure_skip_tls_verify: true
+ namespace: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools
+
+ #- name: Run BCDK deployment on ZEVA Dev environment
+ # run: |
+ # cd .pipeline
+ # echo "Deploying ZEVA ${{ env.RELEASE_NAME }} on Dev"
+ # npm install
+ # npm run deploy -- --pr=${{ env.PR_NUMBER }} --env=dev
+
deploy-on-test:
name: Deploy ZEVA on Test Environment
runs-on: ubuntu-latest
timeout-minutes: 60
- needs: build
+ needs: deploy-on-dev
steps:
@@ -112,4 +147,4 @@ jobs:
cd .pipeline
echo "Deploying ZEVA ${{ env.RELEASE_NAME }} on Prod"
npm install
- npm run deploy -- --pr=${{ env.PR_NUMBER }} --env=prod
\ No newline at end of file
+ npm run deploy -- --pr=${{ env.PR_NUMBER }} --env=prod
diff --git a/.jenkins/.pipeline/lib/config.js b/.jenkins/.pipeline/lib/config.js
index 15dfcbb36..4bee555ce 100644
--- a/.jenkins/.pipeline/lib/config.js
+++ b/.jenkins/.pipeline/lib/config.js
@@ -1,7 +1,7 @@
'use strict';
const options= require('@bcgov/pipeline-cli').Util.parseArguments()
const changeId = options.pr //aka pull-request
-const version = '1.0.0'
+const version = '1.42.0'
const name = 'jenkins'
const ocpName = 'apps.silver.devops'
diff --git a/.pipeline/lib/config.js b/.pipeline/lib/config.js
index 9340aa0b5..b0febdc47 100644
--- a/.pipeline/lib/config.js
+++ b/.pipeline/lib/config.js
@@ -1,7 +1,7 @@
'use strict';
const options= require('@bcgov/pipeline-cli').Util.parseArguments()
const changeId = options.pr //aka pull-request
-const version = '1.41.0'
+const version = '1.42.0'
const name = 'zeva'
const ocpName = 'apps.silver.devops'
diff --git a/.pipeline/lib/deploy.js b/.pipeline/lib/deploy.js
index 6ec09f087..29e4368cf 100755
--- a/.pipeline/lib/deploy.js
+++ b/.pipeline/lib/deploy.js
@@ -148,6 +148,7 @@ module.exports = settings => {
}))
//deploy schemaspy
+ /*
if(phase === 'dev') {
objects = objects.concat(oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/templates/schemaspy/schemaspy-dc.yaml`, {
'param': {
@@ -161,7 +162,7 @@ module.exports = settings => {
'OCP_NAME': phases[phase].ocpName
}
}))
- }
+ }*/
/**
//deploy rabbitmq, use docker image directly
diff --git a/backend/.s2i/bin/assemble b/backend/.s2i/bin/assemble
old mode 100644
new mode 100755
index 0767891df..1b547cbc4
--- a/backend/.s2i/bin/assemble
+++ b/backend/.s2i/bin/assemble
@@ -8,58 +8,104 @@ function should_collectstatic() {
is_django_installed && [[ -z "$DISABLE_COLLECTSTATIC" ]]
}
-# Install pipenv to the separate virtualenv to isolate it
+function virtualenv_bin() {
+ # New versions of Python (>3.6) should use venv module
+ # from stdlib instead of virtualenv package
+ python3.9 -m venv $1
+}
+
+# Install pipenv or micropipenv to the separate virtualenv to isolate it
# from system Python packages and packages in the main
# virtualenv. Executable is simlinked into ~/.local/bin
# to be accessible. This approach is inspired by pipsi
# (pip script installer).
-function install_pipenv() {
- echo "---> Installing pipenv packaging tool ..."
- VENV_DIR=$HOME/.local/venvs/pipenv
- virtualenv $VENV_DIR
- $VENV_DIR/bin/pip --isolated install -U pipenv
+function install_tool() {
+ echo "---> Installing $1 packaging tool ..."
+ VENV_DIR=$HOME/.local/venvs/$1
+ virtualenv_bin "$VENV_DIR"
+ # First, try to install the tool without --isolated which means that if you
+ # have your own PyPI mirror, it will take it from there. If this try fails, try it
+ # again with --isolated which ignores external pip settings (env vars, config file)
+ # and installs the tool from PyPI (needs internet connetion).
+ # $1$2 combines package name with [extras] or version specifier if is defined as $2```
+ if ! $VENV_DIR/bin/pip install -U $1$2; then
+ echo "WARNING: Installation of $1 failed, trying again from official PyPI with pip --isolated install"
+ $VENV_DIR/bin/pip install --isolated -U $1$2 # Combines package name with [extras] or version specifier if is defined as $2```
+ fi
mkdir -p $HOME/.local/bin
- ln -s $VENV_DIR/bin/pipenv $HOME/.local/bin/pipenv
+ ln -s $VENV_DIR/bin/$1 $HOME/.local/bin/$1
}
set -e
+# First of all, check that we don't have disallowed combination of ENVs
+if [[ ! -z "$ENABLE_PIPENV" && ! -z "$ENABLE_MICROPIPENV" ]]; then
+ echo "ERROR: Pipenv and micropipenv cannot be enabled at the same time!"
+ # podman/buildah does not relay this exit code but it will be fixed hopefuly
+ # https://github.com/containers/buildah/issues/2305
+ exit 3
+fi
+
shopt -s dotglob
echo "---> Installing application source ..."
-mv /tmp/src/* ./
+mv /tmp/src/* "$HOME"
-if [[ ! -z "$UPGRADE_PIP_TO_LATEST" || ! -z "$ENABLE_PIPENV" ]]; then
- echo "---> Upgrading pip to latest version ..."
- pip install -U pip setuptools wheel
+# set permissions for any installed artifacts
+fix-permissions /opt/app-root -P
+
+
+if [[ ! -z "$UPGRADE_PIP_TO_LATEST" ]]; then
+ echo "---> Upgrading pip, setuptools and wheel to latest version ..."
+ if ! pip install -U pip setuptools wheel; then
+ echo "WARNING: Installation of the latest pip, setuptools and wheel failed, trying again from official PyPI with pip --isolated install"
+ pip install --isolated -U pip setuptools wheel
+ fi
fi
if [[ ! -z "$ENABLE_PIPENV" ]]; then
- install_pipenv
+ if [[ ! -z "$PIN_PIPENV_VERSION" ]]; then
+ # Add == as a prefix to pipenv version, if defined
+ PIN_PIPENV_VERSION="==$PIN_PIPENV_VERSION"
+ fi
+ install_tool "pipenv" "$PIN_PIPENV_VERSION"
echo "---> Installing dependencies via pipenv ..."
if [[ -f Pipfile ]]; then
pipenv install --deploy
elif [[ -f requirements.txt ]]; then
pipenv install -r requirements.txt
fi
- pipenv check
+ # pipenv check
+elif [[ ! -z "$ENABLE_MICROPIPENV" ]]; then
+ install_tool "micropipenv" "[toml]"
+ echo "---> Installing dependencies via micropipenv ..."
+ # micropipenv detects Pipfile.lock and requirements.txt in this order
+ micropipenv install --deploy
elif [[ -f requirements.txt ]]; then
- echo "---> Installing dependencies ..."
- pip install -i https://$ARTIFACTORY_USER:$ARTIFACTORY_PASSWORD@artifacts.developer.gov.bc.ca/artifactory/api/pypi/pypi-remote/simple --upgrade pip
- pip install -i https://$ARTIFACTORY_USER:$ARTIFACTORY_PASSWORD@artifacts.developer.gov.bc.ca/artifactory/api/pypi/pypi-remote/simple -r requirements.txt
-elif [[ -f setup.py ]]; then
- echo "---> Installing application ..."
- python setup.py develop
+ if [[ -z "${ARTIFACTORY_USER}" ]]; then
+ echo "---> Installing dependencies from external repo ..."
+ pip install -r requirements.txt
+ else
+ echo "---> Installing dependencies from artifactory ..."
+ pip install -i https://$ARTIFACTORY_USER:$ARTIFACTORY_PASSWORD@artifacts.developer.gov.bc.ca/artifactory/api/pypi/pypi-remote/simple -r requirements.txt
+ fi
fi
-
+if [[ -f setup.py && -z "$DISABLE_SETUP_PY_PROCESSING" ]]; then
+ echo "---> Installing application ..."
+ pip install .
+fi
if should_collectstatic; then
(
echo "---> Collecting Django static files ..."
- APP_HOME=${APP_HOME:-.}
- # Look for 'manage.py' in the directory specified by APP_HOME, or the current directory
- manage_file=$APP_HOME/manage.py
+ APP_HOME=$(readlink -f "${APP_HOME:-.}")
+ # Change the working directory to APP_HOME
+ PYTHONPATH="$(pwd)${PYTHONPATH:+:$PYTHONPATH}"
+ cd "$APP_HOME"
+
+ # Look for 'manage.py' in the current directory
+ manage_file=./manage.py
if [[ ! -f "$manage_file" ]]; then
echo "WARNING: seems that you're using Django, but we could not find a 'manage.py' file."
@@ -75,20 +121,8 @@ if should_collectstatic; then
fi
python $manage_file collectstatic --noinput
-
)
fi
-echo "---> current folder is "
-pwd
-
-# Run unit tests in build stage with the code below
-# echo "--> running Django unit tests"
-
-# python $manage_file test
-
# set permissions for any installed artifacts
-fix-permissions /opt/app-root
-echo "---> current folder2 is "
-pwd
-ls -lrt
+fix-permissions /opt/app-root -P
\ No newline at end of file
diff --git a/backend/Dockerfile b/backend/Dockerfile
index a937e1ddd..1343bdc97 100644
--- a/backend/Dockerfile
+++ b/backend/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.6
+FROM python:3.9
ENV PYTHONUNBUFFERED=1
diff --git a/backend/api/models/sales_submission_content.py b/backend/api/models/sales_submission_content.py
index 00d8ec528..398eae4da 100644
--- a/backend/api/models/sales_submission_content.py
+++ b/backend/api/models/sales_submission_content.py
@@ -84,7 +84,10 @@ def vehicle(self):
@property
def icbc_verification(self):
- q = 'select * from icbc_registration_data where vin=\'{}\' limit 1'.format(self.xls_vin)
+ q = 'select * from icbc_registration_data join icbc_upload_date on \
+ icbc_upload_date.id = icbc_upload_date_id where \
+ vin=\'{}\' and upload_date < \'{}\' limit 1'.format(
+ self.xls_vin, self.update_timestamp)
registration = IcbcRegistrationData.objects.raw(q)
if registration:
return registration[0]
@@ -113,6 +116,8 @@ def is_already_awarded(self):
submission_id=self.submission_id
).filter(
vin=self.xls_vin
+ ).exclude(
+ create_timestamp__gte=self.update_timestamp
).first()
if has_been_awarded:
diff --git a/backend/api/serializers/sales_submission_comment.py b/backend/api/serializers/sales_submission_comment.py
index cd53f9529..727740659 100644
--- a/backend/api/serializers/sales_submission_comment.py
+++ b/backend/api/serializers/sales_submission_comment.py
@@ -22,10 +22,15 @@ def get_create_user(self, obj):
serializer = MemberSerializer(user, read_only=True)
return serializer.data
+ def update(self, instance, validated_data):
+ instance.comment = validated_data.get("comment")
+ instance.save()
+ return instance
+
class Meta:
model = SalesSubmissionComment
fields = (
- 'id', 'comment', 'create_timestamp', 'create_user','to_govt'
+ 'id', 'comment', 'create_timestamp', 'create_user','to_govt', 'update_timestamp'
)
read_only_fields = (
'id',
diff --git a/backend/api/serializers/vehicle.py b/backend/api/serializers/vehicle.py
index c2b4aa774..b258f1af0 100644
--- a/backend/api/serializers/vehicle.py
+++ b/backend/api/serializers/vehicle.py
@@ -5,6 +5,7 @@
from api.models.model_year import ModelYear
from api.models.credit_class import CreditClass
+from api.models.organization import Organization
from api.models.vehicle import Vehicle
from api.models.vehicle_attachment import VehicleAttachment
from api.models.vehicle_change_history import VehicleChangeHistory
@@ -150,6 +151,45 @@ class Meta:
fields = ('create_timestamp', 'create_user', 'validation_status')
+class VehicleListSerializer(
+ ModelSerializer, EnumSupportSerializerMixin
+):
+ organization = SerializerMethodField()
+ validation_status = EnumField(VehicleDefinitionStatuses, read_only=True)
+ credit_value = SerializerMethodField()
+ credit_class = SerializerMethodField()
+ model_year = SerializerMethodField()
+ vehicle_zev_type = SerializerMethodField()
+
+ def get_organization(self, obj):
+ organization = Organization.objects.get(id=obj.organization_id)
+ name = organization.name
+ short_name = organization.short_name
+ return {'name': name, 'short_name': short_name}
+
+ def get_credit_value(self, instance):
+ return instance.get_credit_value()
+
+ def get_credit_class(self, instance):
+ return instance.get_credit_class()
+
+ def get_model_year(self, obj):
+ model_year = ModelYear.objects.get(id=obj.model_year_id)
+ return model_year.name
+
+ def get_vehicle_zev_type(self, obj):
+ zev_type = ZevType.objects.filter(id=obj.vehicle_zev_type_id).first()
+ return zev_type.vehicle_zev_code
+
+ class Meta:
+ model = Vehicle
+ fields = ('id', 'organization', 'validation_status',
+ 'credit_value', 'credit_class',
+ 'model_year', 'model_name', 'make',
+ 'range', 'vehicle_zev_type', 'is_active'
+ )
+
+
class VehicleSerializer(
ModelSerializer, EnumSupportSerializerMixin
):
diff --git a/backend/api/services/send_email.py b/backend/api/services/send_email.py
index a2a3976b7..e7dfd192a 100644
--- a/backend/api/services/send_email.py
+++ b/backend/api/services/send_email.py
@@ -271,8 +271,10 @@ def notifications_zev_model(request: object, validation_status: str):
def subscribed_users(notifications: list, request: object, request_type: str, email_type: str):
user_email = None
try:
- subscribed_users = NotificationSubscription.objects.values_list('user_profile_id', flat=True).filter(notification__id__in=notifications)
-
+ subscribed_users = NotificationSubscription.objects.values_list('user_profile_id', flat=True).filter(
+ notification__id__in=notifications).filter(
+ user_profile__is_active=True
+ )
if subscribed_users:
govt_org = Organization.objects.filter(is_government=True).first()
if request_type == 'credit_transfer':
diff --git a/backend/api/tests/test_credit_requests.py b/backend/api/tests/test_credit_requests.py
index dd73154ac..cde6d57a0 100644
--- a/backend/api/tests/test_credit_requests.py
+++ b/backend/api/tests/test_credit_requests.py
@@ -7,6 +7,7 @@
from ..models.sales_submission import SalesSubmission
from ..models.vehicle import Vehicle
from ..models.vin_statuses import VINStatuses
+from ..models.sales_submission_statuses import SalesSubmissionStatuses
class TestSales(BaseTestCase):
@@ -45,7 +46,7 @@ def test_validate_validation_status(self):
sub = SalesSubmission.objects.create(
organization=self.users['RTAN_BCEID'].organization,
submission_sequence=1,
- validation_status='DRAFT'
+ validation_status=SalesSubmissionStatuses.NEW
)
request = {
@@ -55,17 +56,15 @@ def test_validate_validation_status(self):
# try changing from status NEW to VALIDATED, this should fail
# ie it should throw a Validation Error
self.assertRaises(
- ValidationError, sub.validate_validation_status(
- 'VALIDATED', request
- )
+ ValidationError, sub.validate_validation_status, SalesSubmissionStatuses.VALIDATED, request
)
- sub.validation_status = 'RECOMMEND_APPROVAL'
+ sub.validation_status = SalesSubmissionStatuses.RECOMMEND_APPROVAL
sub.save()
# try changing from status RECOMMEND_APPROVAL to DELETED, this should
# fail
# ie it should throw a Validation Error
self.assertRaises(
- ValidationError, sub.validate_validation_status('DELETED', request)
+ ValidationError, sub.validate_validation_status, SalesSubmissionStatuses.DELETED, request
)
diff --git a/backend/api/tests/test_model_year_report_status.py b/backend/api/tests/test_model_year_report_status.py
deleted file mode 100644
index f79249796..000000000
--- a/backend/api/tests/test_model_year_report_status.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# from django.utils.datetime_safe import datetime
-# from rest_framework.serializers import ValidationError
-
-# from .base_test_case import BaseTestCase
-# from ..models.model_year_report import ModelYearReport
-# from ..models.supplemental_report import SupplementalReport
-# from ..models.model_year_report_statuses import ModelYearReportStatuses
-# from ..models.organization import Organization
-# from ..models.model_year import ModelYear
-
-
-# class TestModelYearReports(BaseTestCase):
-# def setUp(self):
-# super().setUp()
-
-# org1 = self.users['EMHILLIE_BCEID'].organization
-# gov = self.users['RTAN'].organization
-
-# model_year_report = ModelYearReport.objects.create(
-# organization=org1,
-# create_user='EMHILLIE_BCEID',
-# validation_status=ModelYearReportStatuses.ASSESSED,
-# organization_name=Organization.objects.get('BMW Canada Inc.'),
-# supplier_class='M',
-# model_year=ModelYear.objects.get('2021'),
-# credit_reduction_selection='A'
-# )
-# supplementary_report = SupplementalReport.objects.create(
-# create_user='EMHILLIE_BCEID',
-# validation_status=ModelYearReportStatuses.DRAFT,
-# )
-# reassessment_report = SupplementalReport.objects.create(
-# create_user='RTAN',
-# validation_status=ModelYearReportStatuses.DRAFT,
-# )
-
-# def test_status(self):
-# response = self.clients['EMHILLIE_BCEID'].get("/api/compliance/reports")
-# self.assertEqual(response.status_code, 200)
-# result = response.data
-# print('(((((((((())))))))))')
-# print(result)
-# print('(((((((((())))))))))')
-# # self.assertEqual(len(result), 1)
diff --git a/backend/api/tests/test_model_year_reports.py b/backend/api/tests/test_model_year_reports.py
new file mode 100644
index 000000000..95d5b030a
--- /dev/null
+++ b/backend/api/tests/test_model_year_reports.py
@@ -0,0 +1,121 @@
+from email import header
+import json
+from django.utils.datetime_safe import datetime
+from rest_framework.serializers import ValidationError
+
+from .base_test_case import BaseTestCase
+from ..models.model_year_report import ModelYearReport
+from ..models.supplemental_report import SupplementalReport
+from ..models.model_year_report_statuses import ModelYearReportStatuses
+from ..models.model_year_report_assessment import ModelYearReportAssessment
+from ..models.model_year_report_assessment_descriptions import ModelYearReportAssessmentDescriptions
+from ..models.model_year_report_ldv_sales import ModelYearReportLDVSales
+from ..models.organization import Organization
+from ..models.model_year import ModelYear
+
+
+class TestModelYearReports(BaseTestCase):
+ def setUp(self):
+ super().setUp()
+
+ org1 = self.users['EMHILLIE_BCEID'].organization
+ gov = self.users['RTAN'].organization
+
+ model_year_report = ModelYearReport.objects.create(
+ organization=org1,
+ create_user='EMHILLIE_BCEID',
+ validation_status=ModelYearReportStatuses.ASSESSED,
+ organization_name=Organization.objects.get(name='BMW Canada Inc.'),
+ supplier_class='M',
+ model_year=ModelYear.objects.get(effective_date='2021-01-01'),
+ credit_reduction_selection='A'
+ )
+ supplementary_report = SupplementalReport.objects.create(
+ model_year_report=model_year_report,
+ create_user='EMHILLIE_BCEID',
+ status=ModelYearReportStatuses.DRAFT,
+ )
+ model_year_report_assessment_description = ModelYearReportAssessmentDescriptions.objects.create(
+ description='test',
+ display_order=1
+ )
+ model_year_report_assessment = ModelYearReportAssessment.objects.create(
+ model_year_report=model_year_report,
+ model_year_report_assessment_description=model_year_report_assessment_description,
+ penalty=20.00
+ )
+ reassessment_report = SupplementalReport.objects.create(
+ model_year_report=model_year_report,
+ supplemental_id=supplementary_report.id,
+ create_user='RTAN',
+ status=ModelYearReportStatuses.DRAFT,
+ )
+
+ def test_status(self):
+ response = self.clients['EMHILLIE_BCEID'].get("/api/compliance/reports")
+ self.assertEqual(response.status_code, 200)
+ result = response.data
+ self.assertEqual(len(result), 1)
+
+
+ def test_assessment_patch_response(self):
+ makes = ["TESLATRUCK", "TESLA", "TEST"]
+ sales = {"2020":25}
+ data = json.dumps({"makes":makes, "sales":sales})
+ response = self.clients['RTAN'].patch("/api/compliance/reports/1/assessment_patch", data=data, content_type='application/json')
+ self.assertEqual(response.status_code, 200)
+ response = self.clients['RTAN'].patch("/api/compliance/reports/999/assessment_patch", data=data, content_type='application/json')
+ self.assertEqual(response.status_code, 404)
+
+
+ def test_assessment_patch_logic(self):
+ makes = ["TESLATRUCK", "TESLA", "TEST"]
+ sales = {"2020":25}
+ model_year = ModelYear.objects.filter(id=2).first()
+ model_year_report = ModelYearReport.objects.filter(id=1).first()
+ data = json.dumps({"makes":makes, "sales":sales})
+
+ modelYearReportLDVSales1 = ModelYearReportLDVSales.objects.create(
+ model_year=model_year,
+ ldv_sales=10,
+ model_year_report=model_year_report
+ )
+
+ response = self.clients['RTAN'].patch("/api/compliance/reports/1/assessment_patch", data=data, content_type='application/json')
+
+ sales_records = ModelYearReportLDVSales.objects.filter(
+ model_year_id=model_year.id,
+ model_year_report=model_year_report)
+
+ # Check that second record is created
+ self.assertEqual(sales_records.count(), 2)
+
+ data = json.dumps({"makes":makes, "sales":{"2020":10}})
+ response = self.clients['RTAN'].patch("/api/compliance/reports/1/assessment_patch", data=data, content_type='application/json')
+
+ sales_records = ModelYearReportLDVSales.objects.filter(
+ model_year_id=model_year.id,
+ model_year_report=model_year_report)
+
+ # Check for proper deletion of first record
+ self.assertEqual(sales_records.count(), 1)
+
+ modelYearReportLDVSales2 = ModelYearReportLDVSales.objects.create(
+ model_year=model_year,
+ ldv_sales=10,
+ from_gov=True,
+ model_year_report=model_year_report
+ )
+
+ data = json.dumps({"makes":makes, "sales":{"2020":50}})
+ response = self.clients['RTAN'].patch("/api/compliance/reports/1/assessment_patch", data=data, content_type='application/json')
+
+ sales_records = ModelYearReportLDVSales.objects.filter(
+ model_year_id=model_year.id,
+ model_year_report=model_year_report)
+
+ sales_record = ModelYearReportLDVSales.objects.filter(id=3).first()
+
+ # check that second record is updated, and no new record created
+ self.assertEqual(sales_records.count(), 2)
+ self.assertEqual(sales_record.ldv_sales, 50)
diff --git a/backend/api/viewsets/credit_request.py b/backend/api/viewsets/credit_request.py
index b734bacd5..9a18ee2dc 100644
--- a/backend/api/viewsets/credit_request.py
+++ b/backend/api/viewsets/credit_request.py
@@ -8,10 +8,13 @@
from django.db.models import Subquery, Count, Q
from django.db.models.expressions import RawSQL
from django.http import HttpResponse, HttpResponseForbidden
+from api.models.sales_submission_comment import SalesSubmissionComment
+from api.serializers.sales_submission_comment import SalesSubmissionCommentSerializer
from rest_framework import mixins, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
+from rest_framework import status
from api.models.icbc_registration_data import IcbcRegistrationData
from api.models.record_of_sale import RecordOfSale
@@ -478,3 +481,28 @@ def download_details(self, request, pk):
)
)
return response
+
+ @action(detail=True, methods=["PATCH"])
+ def update_comment(self, request, pk):
+ comment_text = request.data.get("comment")
+ username = request.user.username
+ comment = SalesSubmissionComment.objects.get(
+ id=pk
+ )
+ if username == comment.create_user:
+ serializer = SalesSubmissionCommentSerializer(comment, data={'comment': comment_text}, partial=True)
+ serializer.is_valid(raise_exception=True)
+ serializer.save()
+ return Response(serializer.data)
+ return Response(status=status.HTTP_403_FORBIDDEN)
+
+ @action(detail=True, methods=["PATCH"])
+ def delete_comment(self, request, pk):
+ username = request.user.username
+ comment = SalesSubmissionComment.objects.get(
+ id=pk
+ )
+ if username == comment.create_user:
+ comment.delete()
+ return Response(status=status.HTTP_200_OK)
+ return Response(status=status.HTTP_403_FORBIDDEN)
diff --git a/backend/api/viewsets/model_year_report.py b/backend/api/viewsets/model_year_report.py
index a73ef3cd4..9d256e634 100644
--- a/backend/api/viewsets/model_year_report.py
+++ b/backend/api/viewsets/model_year_report.py
@@ -1,7 +1,7 @@
import uuid
from django.db.models import Q
from django.shortcuts import get_object_or_404
-from django.http import HttpResponse
+from django.http import HttpResponse, HttpResponseForbidden
from rest_framework.response import Response
from rest_framework import mixins, viewsets
from rest_framework.decorators import action
@@ -562,6 +562,32 @@ def comment_save(self, request, pk):
return Response(serializer.data)
+
+ @action(detail=True, methods=['patch'])
+ def comment_patch(self, request, pk):
+ # only government users can edit comments
+ if not request.user.is_government:
+ return HttpResponseForbidden()
+
+ id = request.data.get('id')
+ comment = request.data.get('comment')
+
+ modelYearReportAssessmentComment = get_object_or_404(ModelYearReportAssessmentComment, pk=id)
+
+ # only the original commenter can edit a comment
+ if request.user.username != modelYearReportAssessmentComment.create_user:
+ return HttpResponseForbidden()
+
+ modelYearReportAssessmentComment.comment = comment
+ modelYearReportAssessmentComment.save()
+
+ report = get_object_or_404(ModelYearReport, pk=pk)
+
+ serializer = ModelYearReportSerializer(report, context={'request': request})
+
+ return Response(serializer.data)
+
+
@action(detail=True, methods=['get'])
def assessment(self, request, pk):
report = get_object_or_404(ModelYearReport, pk=pk)
diff --git a/backend/api/viewsets/vehicle.py b/backend/api/viewsets/vehicle.py
index 1b8a3ae1a..b46942a3d 100644
--- a/backend/api/viewsets/vehicle.py
+++ b/backend/api/viewsets/vehicle.py
@@ -15,7 +15,8 @@
from api.serializers.vehicle import ModelYearSerializer, \
VehicleZevTypeSerializer, VehicleClassSerializer, \
VehicleSaveSerializer, VehicleSerializer, \
- VehicleStatusChangeSerializer, VehicleIsActiveChangeSerializer
+ VehicleStatusChangeSerializer, VehicleIsActiveChangeSerializer, \
+ VehicleListSerializer
from api.services.minio import minio_put_object
from auditable.views import AuditableMixin
from api.models.vehicle import VehicleDefinitionStatuses
@@ -30,6 +31,7 @@ class VehicleViewSet(
serializer_classes = {
'default': VehicleSerializer,
+ 'list': VehicleListSerializer,
'state_change': VehicleStatusChangeSerializer,
'is_active_change': VehicleIsActiveChangeSerializer,
'create': VehicleSaveSerializer,
diff --git a/backend/gunicorn.cfg b/backend/gunicorn.cfg
deleted file mode 100644
index 32cbf5339..000000000
--- a/backend/gunicorn.cfg
+++ /dev/null
@@ -1,49 +0,0 @@
-# Gunicorn configuration file.
-
-# Worker processes
-#
-# workers - The number of worker processes that this server
-# should keep alive for handling requests.
-#
-# A positive integer generally in the 2-4 x $(NUM_CORES)
-# range. You'll want to vary this a bit to find the best
-# for your particular application's work load.
-#
-# worker_class - The type of workers to use. The default
-# sync class should handle most 'normal' types of work
-# loads. You'll want to read
-# http://docs.gunicorn.org/en/latest/design.html#choosing-a-worker-type
-# for information on when you might want to choose one
-# of the other worker classes.
-#
-# A string referring to a Python path to a subclass of
-# gunicorn.workers.base.Worker. The default provided values
-# can be seen at
-# http://docs.gunicorn.org/en/latest/settings.html#worker-class
-#
-# worker_connections - For the eventlet and gevent worker classes
-# this limits the maximum number of simultaneous clients that
-# a single process can handle.
-#
-# A positive integer generally set to around 1000.
-#
-# timeout - If a worker does not notify the master process in this
-# number of seconds it is killed and a new worker is spawned
-# to replace it.
-#
-# Generally set to thirty seconds. Only set this noticeably
-# higher if you're sure of the repercussions for sync workers.
-# For the non sync workers it just means that the worker
-# process is still communicating and is not tied to the length
-# of time required to handle a single request.
-#
-# keepalive - The number of seconds to wait for the next request
-# on a Keep-Alive HTTP connection.
-#
-# A positive integer. Generally set in the 1-5 seconds range.
-#
-
-workers = 8
-timeout = 1800
-graceful_timeout = 1800
-keepalive = 5
diff --git a/backend/gunicorn.cfg.py b/backend/gunicorn.cfg.py
new file mode 100644
index 000000000..ce8d7e77b
--- /dev/null
+++ b/backend/gunicorn.cfg.py
@@ -0,0 +1,2 @@
+bind = "0.0.0.0:8080"
+workers = 2
\ No newline at end of file
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 14be5a8ae..85ee9c349 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -12,7 +12,7 @@ cryptography==3.4.7
Django==3.1.12
django-celery-beat==1.5.0
django-cors-headers==3.2.1
-django-enumfields==2.0.0
+django-enumfields==2.1.1
django-filter==2.4.0
django-timezone-field==4.0
djangorestframework==3.12.4
diff --git a/frontend/.prettierignore b/frontend/.prettierignore
new file mode 100644
index 000000000..10f9d5722
--- /dev/null
+++ b/frontend/.prettierignore
@@ -0,0 +1,6 @@
+# Ignore artifacts:
+build
+coverage
+
+# Ignore all HTML files:
+*.html
\ No newline at end of file
diff --git a/frontend/.s2i/bin/assemble b/frontend/.s2i/bin/assemble
old mode 100644
new mode 100755
index 33bcf8c8a..19faefa24
--- a/frontend/.s2i/bin/assemble
+++ b/frontend/.s2i/bin/assemble
@@ -1,36 +1,130 @@
+sh-5.1$ cat assemble
#!/bin/bash
# Prevent running assemble in builders different than official STI image.
-# The official nodejs:0.10-onbuild already run npm install and use different
+# The official nodejs:8-onbuild already run npm install and use different
# application folder.
-# if /user/src/app directory exists, quit this script with status 0
[ -d "/usr/src/app" ] && exit 0
-# from "help set", it says "-e Exit immediately if a command exits with a non-zero status."
set -e
-# there are options which modify the behavior of bash, they can be set or unset using shopt
-# -s means If optnames are specified, set those options. If no optnames are specified, list all options that are currently set.
-# -u can Unset optnames.
-shopt -s dotglob
+# FIXME: Linking of global modules is disabled for now as it causes npm failures
+# under RHEL7
+# Global modules good to have
+# npmgl=$(grep "^\s*[^#\s]" ../etc/npm_global_module_list | sort -u)
+# Available global modules; only match top-level npm packages
+#global_modules=$(npm ls -g 2> /dev/null | perl -ne 'print "$1\n" if /^\S+\s(\S+)\@[\d\.-]+/' | sort -u)
+# List all modules in common
+#module_list=$(/usr/bin/comm -12 <(echo "${global_modules}") | tr '\n' ' ')
+# Link the modules
+#npm link $module_list
+
+safeLogging () {
+ if [[ $1 =~ http[s]?://.*@.*$ ]]; then
+ echo $1 | sed 's/^.*@/redacted@/'
+ else
+ echo $1
+ fi
+}
+shopt -s dotglob
+if [ -d /tmp/artifacts ] && [ "$(ls /tmp/artifacts/ 2>/dev/null)" ]; then
+ echo "---> Restoring previous build artifacts ..."
+ mv -T --verbose /tmp/artifacts/node_modules "${HOME}/node_modules"
+fi
-# tfrs/frontend/* were copied at /tmp/src, copy /tmp/src/* into /opt/app-root/src
echo "---> Installing application source ..."
-cp -r /tmp/src/* ./ && rm -rf /tmp/src/*
+mv /tmp/src/* ./
+
+# Fix source directory permissions
+fix-permissions ./
+
+if [ ! -z $HTTP_PROXY ]; then
+ echo "---> Setting npm http proxy to" $(safeLogging $HTTP_PROXY)
+ npm config set proxy $HTTP_PROXY
+fi
+
+if [ ! -z $http_proxy ]; then
+ echo "---> Setting npm http proxy to" $(safeLogging $http_proxy)
+ npm config set proxy $http_proxy
+fi
+
+if [ ! -z $HTTPS_PROXY ]; then
+ echo "---> Setting npm https proxy to" $(safeLogging $HTTPS_PROXY)
+ npm config set https-proxy $HTTPS_PROXY
+fi
+
+if [ ! -z $https_proxy ]; then
+ echo "---> Setting npm https proxy to" $(safeLogging $https_proxy)
+ npm config set https-proxy $https_proxy
+fi
+
+# Change the npm registry mirror if provided
+if [ -n "$NPM_MIRROR" ]; then
+ npm config set registry $NPM_MIRROR
+fi
+
+# Set the DEV_MODE to false by default.
+if [ -z "$DEV_MODE" ]; then
+ export DEV_MODE=false
+fi
+
+# If NODE_ENV is not set by the user, then NODE_ENV is determined by whether
+# the container is run in development mode.
+if [ -z "$NODE_ENV" ]; then
+ if [ "$DEV_MODE" == true ]; then
+ export NODE_ENV=development
+ else
+ export NODE_ENV=production
+ fi
+fi
+
+if [ "$NODE_ENV" != "production" ]; then
+
+ echo "---> Building your Node application from source"
+ npm install
+
+else
+
+ echo "---> Have to set DEV_MODE and NODE_ENV to empty otherwise the deployment can not be started"
+ echo "---> It'll have error like can not resolve source-map-loader..."
+ export DEV_MODE=""
+ export NODE_ENV=""
+
+ if [[ -z "${ARTIFACTORY_USER}" ]]; then
+ echo "---> Installing all dependencies from external repo"
+ else
+ echo "---> Installing all dependencies from Artifactory"
+ npm config set registry https://artifacts.developer.gov.bc.ca/artifactory/api/npm/npm-remote/
+ curl -u $ARTIFACTORY_USER:$ARTIFACTORY_PASSWORD https://artifacts.developer.gov.bc.ca/artifactory/api/npm/auth >> ~/.npmrc
+ fi
+
+ echo "---> Installing all dependencies"
+ NODE_ENV=development npm install
-echo "---> Building your Node application from source"
+ #do not fail when there is no build script
+ echo "---> Building in production mode"
+ npm run build --if-present
-# pull node packages from artifactory
-npm cache clean --force
-npm config set registry https://artifacts.developer.gov.bc.ca/artifactory/api/npm/npm-remote/
-curl -u $ARTIFACTORY_USER:$ARTIFACTORY_PASSWORD https://artifacts.developer.gov.bc.ca/artifactory/api/npm/auth >> ~/.npmrc
+ echo "---> Pruning the development dependencies"
+ npm prune
-# -d means --loglevel info
-npm install -d
+ NPM_TMP=$(npm config get tmp)
+ if ! mountpoint $NPM_TMP; then
+ echo "---> Cleaning the $NPM_TMP/npm-*"
+ rm -rf $NPM_TMP/npm-*
+ fi
-# run webpack
-npm run dist
+ # Clear the npm's cache and tmp directories only if they are not a docker volumes
+ NPM_CACHE=$(npm config get cache)
+ if ! mountpoint $NPM_CACHE; then
+ echo "---> Cleaning the npm cache $NPM_CACHE"
+ #As of npm@5 even the 'npm cache clean --force' does not fully remove the cache directory
+ # instead of $NPM_CACHE* use $NPM_CACHE/*.
+ # We do not want to delete .npmrc file.
+ rm -rf "${NPM_CACHE:?}/"
+ fi
+fi
# Fix source directory permissions
fix-permissions ./
\ No newline at end of file
diff --git a/frontend/.s2i/bin/run b/frontend/.s2i/bin/run
deleted file mode 100644
index b1b930ce3..000000000
--- a/frontend/.s2i/bin/run
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-npm run production
\ No newline at end of file
diff --git a/frontend/Dockerfile-Openshift b/frontend/Dockerfile-Openshift
index 59a2319a8..9aa32a355 100644
--- a/frontend/Dockerfile-Openshift
+++ b/frontend/Dockerfile-Openshift
@@ -11,4 +11,4 @@ CMD npm run start
chmod +x /usr/local/bin/caddy2
# COPY ./Caddyfile /etc/caddy/Caddyfile
# RUN chmod +x ./Dockerfile-Openshift-entrypoint.sh
-# CMD ["./Dockerfile-Openshift-entrypoint.sh"]
+# CMD ["./Dockerfile-Openshift-entrypoint.sh"]
\ No newline at end of file
diff --git a/frontend/Dockerfile-Openshift-entrypoint.sh b/frontend/Dockerfile-Openshift-entrypoint.sh
deleted file mode 100644
index 113f61a5a..000000000
--- a/frontend/Dockerfile-Openshift-entrypoint.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-nohup caddy2 run --config ./Caddyfile &
-npm run start
diff --git a/frontend/package.json b/frontend/package.json
index 313b5cb17..2f96bcbf7 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "zeva-frontend",
- "version": "1.41.0",
+ "version": "1.42.0",
"private": true,
"dependencies": {
"@fortawesome/fontawesome-free": "^5.13.0",
diff --git a/frontend/src/app/components/EditComment.js b/frontend/src/app/components/EditComment.js
new file mode 100644
index 000000000..41063bbdc
--- /dev/null
+++ b/frontend/src/app/components/EditComment.js
@@ -0,0 +1,62 @@
+import React, { useState, useEffect } from 'react';
+import ReactQuill from 'react-quill';
+
+const EditComment = ({
+ commentId,
+ comment,
+ handleSave,
+ handleCancel,
+ handleDelete
+}) => {
+ const [value, setValue] = useState();
+
+ useEffect(() => {
+ setValue(comment);
+ }, [comment]);
+
+ const handleChange = (editedComment) => {
+ if (editedComment) {
+ setValue(editedComment);
+ }
+ };
+
+ return (
+ <>
+