Skip to content

Commit

Permalink
Fix container image build
Browse files Browse the repository at this point in the history
- Add pre-commit hook config
  - Formatter auto-formatted some of the code without functional changes
- Add gitignore
- Update requirements files
- Update to FVHIoT-python v0.4.1
- Fix missing dependencies to build aiokafka
  • Loading branch information
laurigates committed Dec 20, 2023
1 parent b1fc459 commit dc4e4c5
Show file tree
Hide file tree
Showing 15 changed files with 594 additions and 92 deletions.
161 changes: 161 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
.pybuilder/
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock

# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml

# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
venv*/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# pytype static type analyzer
.pytype/

# Cython debug symbols
cython_debug/

# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/
30 changes: 30 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 23.12.0
hooks:
- id: black
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.1.8'
hooks:
- id: ruff
- repo: https://github.com/jazzband/pip-tools
rev: 7.3.0
hooks:
- id: pip-compile
name: pip-compile requirements.txt
args: [--strip-extras, --output-file=requirements.txt]
files: ^(pyproject\.toml|requirements\.txt)$
- id: pip-compile
name: pip-compile requirements-test.txt
args: [--extra=test, --strip-extras, --output-file=requirements-test.txt]
files: ^(pyproject\.toml|requirements-test\.txt)$
- id: pip-compile
name: pip-compile requirements-dev.txt
args: [--extra=dev, --strip-extras, --output-file=requirements-dev.txt]
files: ^(pyproject\.toml|requirements-dev\.txt)$
7 changes: 7 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@ ENV PYTHONUNBUFFERED 1
RUN addgroup -S app && adduser -S app -G app
WORKDIR /home/app

# Install requirements to build aiokafka
RUN apk add --no-cache \
gcc \
python3-dev \
libc-dev \
zlib-dev

# Copy and install requirements only first to cache the dependency layer
COPY --chown=app:app requirements.txt .
RUN pip install --no-cache-dir --no-compile --upgrade -r requirements.txt
Expand Down
63 changes: 47 additions & 16 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,12 @@
from endpoints import AsyncRequestHandler as RequestHandler

# TODO: for testing, add better defaults (or remove completely to make sure it is set in env)
ENDPOINT_CONFIG_URL = os.getenv("ENDPOINT_CONFIG_URL", "http://127.0.0.1:8000/api/v1/hosts/localhost/")
DEVICE_REGISTRY_TOKEN = os.getenv("DEVICE_REGISTRY_TOKEN", "abcdef1234567890abcdef1234567890abcdef12")
ENDPOINT_CONFIG_URL = os.getenv(
"ENDPOINT_CONFIG_URL", "http://127.0.0.1:8000/api/v1/hosts/localhost/"
)
DEVICE_REGISTRY_TOKEN = os.getenv(
"DEVICE_REGISTRY_TOKEN", "abcdef1234567890abcdef1234567890abcdef12"
)

device_registry_request_headers = {
"Authorization": f"Token {DEVICE_REGISTRY_TOKEN}",
Expand All @@ -44,22 +48,34 @@ async def get_endpoints_from_device_registry(fail_on_error: bool) -> dict:
# Create request to ENDPOINTS_URL and get data using httpx
async with httpx.AsyncClient() as client:
try:
response = await client.get(ENDPOINT_CONFIG_URL, headers=device_registry_request_headers)
response = await client.get(
ENDPOINT_CONFIG_URL, headers=device_registry_request_headers
)
if response.status_code == 200:
data = response.json()
logging.info(f"Got {len(data['endpoints'])} endpoints from device registry {ENDPOINT_CONFIG_URL}")
logging.info(
f"Got {len(data['endpoints'])} endpoints from device registry {ENDPOINT_CONFIG_URL}"
)
else:
logging.error(f"Failed to get endpoints from device registry {ENDPOINT_CONFIG_URL}")
logging.error(
f"Failed to get endpoints from device registry {ENDPOINT_CONFIG_URL}"
)
return endpoints
except Exception as e:
logging.error(f"Failed to get endpoints from device registry {ENDPOINT_CONFIG_URL}: {e}")
logging.error(
f"Failed to get endpoints from device registry {ENDPOINT_CONFIG_URL}: {e}"
)
if fail_on_error:
raise e
for endpoint in data["endpoints"]:
# Import requesthandler module. It must exist in python path.
try:
request_handler_module = importlib.import_module(endpoint["http_request_handler"])
request_handler_function: RequestHandler = request_handler_module.RequestHandler()
request_handler_module = importlib.import_module(
endpoint["http_request_handler"]
)
request_handler_function: RequestHandler = (
request_handler_module.RequestHandler()
)
endpoint["request_handler"] = request_handler_function
logging.info(f"Imported {endpoint['http_request_handler']}")
except ImportError as e:
Expand Down Expand Up @@ -97,18 +113,22 @@ async def trigger_error(_request: Request) -> Response:


async def api_v2(request: Request, endpoint: dict) -> Response:
request_data = await extract_data_from_starlette_request(request) # data validation done here
request_data = await extract_data_from_starlette_request(
request
) # data validation done here
# TODO : remove
# DONE
# logging.error(request_data)
if request_data.get("extra"):
logging.warning(f"RequestModel contains extra values: {request_data['extra']}")
if request_data["request"].get("extra"):
logging.warning(f"RequestData contains extra values: {request_data['request']['extra']}")
logging.warning(
f"RequestData contains extra values: {request_data['request']['extra']}"
)
path = request_data["path"]
(auth_ok, device_id, topic_name, response_message, status_code) = await endpoint["request_handler"].process_request(
request_data, endpoint
)
(auth_ok, device_id, topic_name, response_message, status_code) = await endpoint[
"request_handler"
].process_request(request_data, endpoint)
response_message = str(response_message)
print("REMOVE ME", auth_ok, device_id, topic_name, response_message, status_code)
# add extracted device id to request data before pushing to kafka raw data topic
Expand All @@ -130,7 +150,10 @@ async def api_v2(request: Request, endpoint: dict) -> Response:
f'Failed to send "{path}" data to {topic_name}, producer was not initialised even we had a topic name'
)
# Endpoint process has failed and no data was sent to Kafka. This is a fatal error.
response_message, status_code = "Internal server error, see logs for details", 500
response_message, status_code = (
"Internal server error, see logs for details",
500,
)
else:
logging.info("No action: topic_name is not defined")

Expand Down Expand Up @@ -165,7 +188,11 @@ async def startup():
except Exception as e:
logging.error(f"Failed to create KafkaProducer: {e}")
app.producer = None
logging.info("Ready to go, listening to endpoints: {}".format(", ".join(app.endpoints.keys())))
logging.info(
"Ready to go, listening to endpoints: {}".format(
", ".join(app.endpoints.keys())
)
)


async def shutdown():
Expand All @@ -184,7 +211,11 @@ async def shutdown():
APIRoute("/readiness", endpoint=readiness, methods=["GET", "HEAD"]),
APIRoute("/healthz", endpoint=healthz, methods=["GET", "HEAD"]),
APIRoute("/debug-sentry", endpoint=trigger_error, methods=["GET", "HEAD"]),
APIRoute("/{full_path:path}", endpoint=catch_all, methods=["HEAD", "GET", "POST", "PUT", "PATCH", "DELETE"]),
APIRoute(
"/{full_path:path}",
endpoint=catch_all,
methods=["HEAD", "GET", "POST", "PUT", "PATCH", "DELETE"],
),
]


Expand Down
12 changes: 6 additions & 6 deletions azure-build-main.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Continuous integration (CI) triggers cause a pipeline to run whenever you push
# Continuous integration (CI) triggers cause a pipeline to run whenever you push
# an update to the specified branches or you push specified tags.
trigger:
batch: true
Expand All @@ -9,15 +9,15 @@ trigger:
exclude:
- README.md

# Pull request (PR) triggers cause a pipeline to run whenever a pull request is
# opened with one of the specified target branches, or when updates are made to
# Pull request (PR) triggers cause a pipeline to run whenever a pull request is
# opened with one of the specified target branches, or when updates are made to
# such a pull request.
#
# GitHub creates a new ref when a pull request is created. The ref points to a
# merge commit, which is the merged code between the source and target branches
# GitHub creates a new ref when a pull request is created. The ref points to a
# merge commit, which is the merged code between the source and target branches
# of the pull request.
#
# Opt out of pull request validation
# Opt out of pull request validation
pr: none

# By default, use self-hosted agents
Expand Down
19 changes: 15 additions & 4 deletions endpoints/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,17 @@ def log_match(header: str, ip: str, allowed_network):
r_ip = request_data["request"]["headers"].get("x-real-ip")
if (
r_ip
and ipaddress.ip_address(request_data["request"]["headers"].get("x-real-ip", "")) in allowed_network
and ipaddress.ip_address(
request_data["request"]["headers"].get("x-real-ip", "")
)
in allowed_network
):
log_match("x-real-ip", r_ip, allowed_network)
return True

forwarded_for_ips = request_data["request"]["headers"].get("x-forwarded-for", "").split(",")
forwarded_for_ips = (
request_data["request"]["headers"].get("x-forwarded-for", "").split(",")
)
for r_ip in forwarded_for_ips:
r_ip = r_ip.strip()
if a_ip:
Expand Down Expand Up @@ -73,14 +78,20 @@ async def validate(
api_key = request_data["request"]["headers"].get("x-api-key")
if api_key is None or api_key != endpoint_data["auth_token"]:
logging.warning("Missing or invalid authentication token (x-api-key)")
return False, "Missing or invalid authentication token, see logs for error", 401
return (
False,
"Missing or invalid authentication token, see logs for error",
401,
)
logging.info("Authentication token validated")
if request_data["request"]["get"].get("test") == "true":
logging.info("Test ok")
return False, "Test OK", 400
allowed_ip_addresses = endpoint_data.get("allowed_ip_addresses", "")
if allowed_ip_addresses == "":
logging.warning("Set 'allowed_ip_addresses' in endpoint settings to restrict requests unknown sources")
logging.warning(
"Set 'allowed_ip_addresses' in endpoint settings to restrict requests unknown sources"
)
else:
if is_ip_address_allowed(request_data, allowed_ip_addresses) is False:
return False, "IP address not allowed", 403
Expand Down
Loading

0 comments on commit dc4e4c5

Please sign in to comment.