Skip to content

Commit

Permalink
Merge pull request #4 from City-of-Helsinki/chore/add_workflows
Browse files Browse the repository at this point in the history
Add test and build workflows
  • Loading branch information
laurigates authored Sep 19, 2023
2 parents 5f3e538 + 68c7ce9 commit b3806d9
Show file tree
Hide file tree
Showing 3 changed files with 179 additions and 6 deletions.
86 changes: 86 additions & 0 deletions .github/workflows/build-persister.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
name: Build and push persister image

on:
schedule:
- cron: '33 3 * * *'
push:
branches: [ "main" ]
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

jobs:
build:

runs-on: ubuntu-latest

permissions:
contents: read
packages: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
id-token: write

steps:
- name: Checkout repository
uses: actions/checkout@v3

# Install the cosign tool except on PR
# https://github.com/sigstore/cosign-installer
- name: Install cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@f3c664df7af409cb4873aa5068053ba9d61a57b6 #v2.6.0
with:
cosign-release: 'v1.13.1'

# Workaround: https://github.com/docker/build-push-action/issues/461
- name: Setup Docker buildx
uses: docker/setup-buildx-action@79abd3f86f79a9d68a23c75a09a9a85889262adf

# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}

# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a
with:
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

# Sign the resulting Docker image digest except on PRs.
# This will only write to the public Rekor transparency log when the Docker
# repository is public to avoid leaking data. If you would like to publish
# transparency data even for private images, pass --force to cosign below.
# https://github.com/sigstore/cosign
- name: Sign the published Docker image
if: ${{ github.event_name != 'pull_request' }}
env:
COSIGN_EXPERIMENTAL: "true"
# This step uses the identity token to provision an ephemeral certificate
# against the sigstore community Fulcio instance.
run: echo "${{ steps.meta.outputs.tags }}" | xargs -I {} cosign sign {}@${{ steps.build-and-push.outputs.digest }}
81 changes: 81 additions & 0 deletions .github/workflows/test-persister.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
name: Persister CI

# Controls when the workflow will run
on:
# Triggers the workflow on push or pull request events but only for the "main" branch
push:
branches: [ "main" ]
tags: [ "v*" ]
pull_request:
branches: [ "main" ]

# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
tests:
# The type of runner that the job will run on
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11"]
# services:
# # Label used to access the service container
# kafka:
# # Docker Hub image
# image: bitnami/kafka:3.4
# # Provide the password for postgres
# env:
# ALLOW_PLAINTEXT_LISTENER: "yes"
# KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: "true"
# # Set health checks to wait until postgres has started
# options: >-
# --health-cmd "kafka-topics.sh --bootstrap-server kafka:9092 --topic hc --create --if-not-exists && kafka-topics.sh --bootstrap-server kafka:9092 --topic hc --describe"
# --health-interval 10s
# --health-timeout 5s
# --health-retries 5
# ports:
# # Tests run directly on the runner so we have to map the port
# - 9092:9092
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools wheel
pip install ruff pytest requests
pip install -r requirements.txt
- name: Lint with ruff
run: |
# stop the build if there are Python syntax errors or undefined names
ruff --format=github --select=E9,F63,F7,F82 --target-version=py37 .
# default set of ruff rules with GitHub Annotations
ruff --format=github --target-version=py37 .
# - name: Test with pytest
# env:
# # Kafka config
# KAFKA_HOST: "localhost"
# KAFKA_PORT: 9092
# KAFKA_BOOTSTRAP_SERVERS: "localhost:9092"
# KAFKA_GROUP_ID: "digita_dev"
# KAFKA_PARSED_DATA_TOPIC_NAME: "digita.parseddata"
# KAFKA_RAW_DATA_TOPIC_NAME: "digita.rawdata"
# # Device registry config
# DEVICE_REGISTRY_URL: "http://devreg:8000/api/v1"
# DEVICE_REGISTRY_TOKEN: b48455759b691baf3b811ba437ce9e581fc0a37e
# INFLUXDB_URL: "https://influx.fvh.io"
# INFLUXDB_ORG: "Testing"
# INFLUXDB_BUCKET: "DigitaTest"
# INFLUXDB_TOKEN: "-DWlK7UjluBsFRgQ6ZTU82zfir89lAdY00hSvm-hhM-z2cmA-TwRekLj4C_KxU4jAU23H3vTkgGIQjqs68xHWg=="
# # Debug settings
# LOG_LEVEL: "DEBUG"
# DEBUG: 1
# UVICORN_LOG_LEVEL: "DEBUG"
# run: |
# pytest
18 changes: 12 additions & 6 deletions kafka2influxdb.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import json
import logging
import os
from pprint import pformat
Expand All @@ -13,7 +12,8 @@
"""
Consume parsed data from Kafka and save it to InfluxDB V2 database.
Parsed data is expected to be in the following "well-known" "JSON timeseries"-like format:
Parsed data is expected to be in the following "well-known" "JSON
timeseries"-like format:
parsed_data = {
"header": {
Expand Down Expand Up @@ -46,7 +46,10 @@
"device": {
"device_id": "B81758FFFE031234",
# FIXME: this is incorrect format for device_metadata now
"device_metadata": {"name": "Elsys ERS CO2 A81758FFFE035729", "parser_module": "fvhiot.parsers.elsys"},
"device_metadata": {
"name": "Elsys ERS CO2 A81758FFFE035729",
"parser_module": "fvhiot.parsers.elsys"
},
"device_state": {"state data": "is here"},
},
"meta": {
Expand Down Expand Up @@ -77,14 +80,16 @@


def parsed_data_to_influxdb_format(
measurement_name, device_id, data: dict, extra_fields: dict = None, extra_tags: dict = None
measurement_name, device_id, data: dict, extra_fields: dict = None,
extra_tags: dict = None
) -> list:
"""
Convert parsed data to InfluxDB datapoints format, see example above.
:param measurement_name: name of the measurement, e.g. "elsys"
:param device_id: device ID, e.g. "B81758FFFE031234"
:param data: parsed data in "well-known" "JSON timeseries"-like format
:param extra_fields: extra fields to add to each datapoint, e.g. {"rssi": rssi_value}
:param extra_fields: extra fields to add to each datapoint, e.g. {"rssi":
rssi_value}
:param extra_tags: extra tags to add to each datapoint, e.g. {"dev-type": "sensor"}
"""
influxdb_points = []
Expand Down Expand Up @@ -129,7 +134,8 @@ def main():
logging.debug(pformat(data, width=120))
measurement_name = data["device"]["parser_module"].split(".")[-1]
device_id = data["device"]["device_id"]
influxdb_datapoints = parsed_data_to_influxdb_format(measurement_name, device_id, data)
influxdb_datapoints = parsed_data_to_influxdb_format(measurement_name,
device_id, data)
with client.write_api(write_options=SYNCHRONOUS) as write_api:
write_api.write(bucket, org, influxdb_datapoints)
logging.info(f"Saved {len(influxdb_datapoints)} datapoints to InfluxDB")
Expand Down

0 comments on commit b3806d9

Please sign in to comment.