diff --git a/.deprecated_files b/.deprecated_files
index ca3684e0..af2a4cf8 100644
--- a/.deprecated_files
+++ b/.deprecated_files
@@ -21,3 +21,4 @@ setup.cfg
.pylintrc
.flake8
+.editorconfig
diff --git a/.devcontainer/.dev_config.yaml b/.devcontainer/.dev_config.yaml
index eb79728d..fda99703 100644
--- a/.devcontainer/.dev_config.yaml
+++ b/.devcontainer/.dev_config.yaml
@@ -302,7 +302,7 @@ loader_token_hashes:
log_level: trace
service_name: metldata
-service_instance_id: 001
+service_instance_id: "001"
kafka_servers: ["kafka:9092"]
primary_artifact_name: embedded_public
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index eda6e100..89ff58f8 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -2,7 +2,7 @@
// https://github.com/microsoft/vscode-dev-containers/tree/v0.177.0/containers/python-3-postgres
// Update the VARIANT arg in docker-compose.yml to pick a Python version: 3, 3.8, 3.7, 3.6
{
- "name": "metldata",
+ "name": "${localWorkspaceFolderBasename}",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspace",
@@ -10,6 +10,7 @@
"vscode": {
// Set *default* container specific settings.json values on container create.
"settings": {
+ "files.eol": "\n",
"terminal.integrated.profiles.linux": {
"bash": {
"path": "/bin/bash"
@@ -17,19 +18,11 @@
},
"python.pythonPath": "/usr/local/bin/python",
"python.languageServer": "Pylance",
- "python.linting.enabled": true,
- "python.linting.pylintEnabled": true,
- "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
- "python.formatting.blackPath": "/usr/local/py-utils/bin/black",
- "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
- "python.formatting.provider": "black",
"python.analysis.typeCheckingMode": "basic",
- "python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
- "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
- "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
- "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
- "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint",
"python.testing.pytestPath": "/usr/local/py-utils/bin/pytest",
+ "python.testing.pytestArgs": [
+ "--profile"
+ ],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"editor.formatOnSave": true,
@@ -37,6 +30,7 @@
"editor.rulers": [
88
],
+ "editor.defaultFormatter": "ms-python.black-formatter",
"licenser.license": "Custom",
"licenser.customHeaderFile": "/workspace/.devcontainer/license_header.txt"
},
@@ -45,7 +39,6 @@
"mikestead.dotenv",
"ms-azuretools.vscode-docker",
"ms-python.python",
- "ms-python.isort",
"ms-python.vscode-pylance",
"ms-toolsai.jupyter",
"njpwerner.autodocstring",
@@ -58,7 +51,9 @@
"yzhang.markdown-all-in-one",
"visualstudioexptteam.vscodeintellicode",
"ymotongpoo.licenser",
- "editorconfig.editorconfig"
+ "charliermarsh.ruff",
+ "ms-python.black-formatter",
+ "ms-python.mypy-type-checker"
]
}
},
@@ -68,12 +63,13 @@
"postCreateCommand": "dev_install",
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode",
+ "containerEnv": {
+ // for testcontainers to connect to the docker host:
+ "TC_HOST": "host.docker.internal",
+ "DOCKER_HOST": "unix:///var/run/docker.sock"
+ },
"features": {
- "ghcr.io/devcontainers/features/docker-in-docker:2": {
- "version": "latest",
- "enableNonRootDocker": "true",
- "moby": true,
- "azureDnsAutoDetection": false
- }
+ // details can be found here: https://github.com/devcontainers/features/tree/main/src/docker-outside-of-docker
+ "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {}
}
}
diff --git a/.editorconfig b/.editorconfig
deleted file mode 100644
index 3fa06b52..00000000
--- a/.editorconfig
+++ /dev/null
@@ -1,14 +0,0 @@
-# Top-most EditorConfig file.
-root = true
-
-# For all files ...
-[*]
-
-# Unix-style LF newlines.
-end_of_line = lf
-
-# Force a newline at the end of the file.
-insert_final_newline = true
-
-# Use UTF-8.
-charset = utf-8
diff --git a/.gitignore b/.gitignore
index a71cfd2a..d9ba6145 100644
--- a/.gitignore
+++ b/.gitignore
@@ -133,6 +133,12 @@ dmypy.json
# ignore VS Code settings:
.vscode/
+# key stores
+*.key
+*.rnd
+.keystore
+.ssl/
+
# desktop settings and thumbnails
.DS_Store
desktop.ini
diff --git a/.mandatory_files b/.mandatory_files
index c0b51fb1..ba9f6c31 100644
--- a/.mandatory_files
+++ b/.mandatory_files
@@ -6,9 +6,7 @@
# may differ from that of the template repository.
.devcontainer/dev_launcher
-.devcontainer/devcontainer.json
.devcontainer/docker-compose.yml
-.devcontainer/Dockerfile
tests/__init__.py
tests/fixtures/__init__.py
diff --git a/.readme_template.md b/.readme_template.md
index dd8b1844..10e3e612 100644
--- a/.readme_template.md
+++ b/.readme_template.md
@@ -1,5 +1,5 @@
-[![tests](https://github.com/ghga-de/$name/actions/workflows/unit_and_int_tests.yaml/badge.svg)](https://github.com/ghga-de/$name/actions/workflows/unit_and_int_tests.yaml)
+[![tests](https://github.com/ghga-de/$name/actions/workflows/tests.yaml/badge.svg)](https://github.com/ghga-de/$name/actions/workflows/tests.yaml)
[![Coverage Status](https://coveralls.io/repos/github/ghga-de/$name/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/$name?branch=main)
# $title
@@ -11,6 +11,7 @@ $summary
$description
## Installation
+
We recommend using the provided Docker container.
A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/$name):
@@ -42,6 +43,7 @@ $shortname --help
```
## Configuration
+
### Parameters
The service requires the following configuration parameters:
@@ -77,19 +79,20 @@ $openapi_doc
$design_description
## Development
+
For setting up the development environment, we rely on the
-[devcontainer feature](https://code.visualstudio.com/docs/remote/containers) of vscode
+[devcontainer feature](https://code.visualstudio.com/docs/remote/containers) of VS Code
in combination with Docker Compose.
-To use it, you have to have Docker Compose as well as vscode with its "Remote - Containers"
+To use it, you have to have Docker Compose as well as VS Code with its "Remote - Containers"
extension (`ms-vscode-remote.remote-containers`) installed.
-Then open this repository in vscode and run the command
-`Remote-Containers: Reopen in Container` from the vscode "Command Palette".
+Then open this repository in VS Code and run the command
+`Remote-Containers: Reopen in Container` from the VS Code "Command Palette".
This will give you a full-fledged, pre-configured development environment including:
- infrastructural dependencies of the service (databases, etc.)
-- all relevant vscode extensions pre-installed
-- pre-configured linting and auto-formating
+- all relevant VS Code extensions pre-installed
+- pre-configured linting and auto-formatting
- a pre-configured debugger
- automatic license-header insertion
@@ -101,9 +104,11 @@ if you update dependencies in the [`./pyproject.toml`](./pyproject.toml) or the
[`./requirements-dev.txt`](./requirements-dev.txt), please run it again.
## License
+
This repository is free to use and modify according to the
[Apache 2.0 License](./LICENSE).
-## Readme Generation
-This readme is autogenerate, please see [`readme_generation.md`](./readme_generation.md)
+## README Generation
+
+This README file is auto-generated, please see [`readme_generation.md`](./readme_generation.md)
for details.
diff --git a/.static_files b/.static_files
index e3c8f767..5f035a96 100644
--- a/.static_files
+++ b/.static_files
@@ -11,6 +11,7 @@
.devcontainer/dev_install
.devcontainer/license_header.txt
.devcontainer/Dockerfile
+.devcontainer/devcontainer.json
scripts/script_utils/__init__.py
scripts/script_utils/cli.py
@@ -39,7 +40,6 @@ scripts/README.md
example_data/README.md
.coveragerc
-.editorconfig
.gitattributes
.gitignore
.mypy.ini
diff --git a/README.md b/README.md
index 430cab48..bd21b2f2 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-[![tests](https://github.com/ghga-de/metldata/actions/workflows/unit_and_int_tests.yaml/badge.svg)](https://github.com/ghga-de/metldata/actions/workflows/unit_and_int_tests.yaml)
+[![tests](https://github.com/ghga-de/metldata/actions/workflows/tests.yaml/badge.svg)](https://github.com/ghga-de/metldata/actions/workflows/tests.yaml)
[![Coverage Status](https://coveralls.io/repos/github/ghga-de/metldata/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/metldata?branch=main)
# Metldata
@@ -58,6 +58,7 @@ accessions are not violated, however, data might become unavailable.
## Installation
+
We recommend using the provided Docker container.
A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/metldata):
@@ -89,41 +90,146 @@ metldata --help
```
## Configuration
+
### Parameters
The service requires the following configuration parameters:
-- **`artifact_infos`** *(array)*: Information for artifacts to be queryable via the Artifacts REST API.
+- **`db_connection_str`** *(string, format: password)*: MongoDB connection string. Might include credentials. For more information see: https://naiveskill.com/mongodb-connection-string/.
- - **Items**: Refer to *[#/definitions/ArtifactInfo](#definitions/ArtifactInfo)*.
-- **`db_connection_str`** *(string, format: password)*: MongoDB connection string. Might include credentials. For more information see: https://naiveskill.com/mongodb-connection-string/.
+ Examples:
+
+ ```json
+ "mongodb://localhost:27017"
+ ```
+
- **`db_name`** *(string)*: Name of the database located on the MongoDB server.
+
+ Examples:
+
+ ```json
+ "my-database"
+ ```
+
+
- **`service_name`** *(string)*: Default: `"metldata"`.
- **`service_instance_id`** *(string)*: A string that uniquely identifies this instance across all instances of this service. A globally unique Kafka client ID will be created by concatenating the service_name and the service_instance_id.
+
+ Examples:
+
+ ```json
+ "germany-bw-instance-001"
+ ```
+
+
- **`kafka_servers`** *(array)*: A list of connection strings to connect to Kafka bootstrap servers.
- **Items** *(string)*
+
+ Examples:
+
+ ```json
+ [
+ "localhost:9092"
+ ]
+ ```
+
+
+- **`kafka_security_protocol`** *(string)*: Protocol used to communicate with brokers. Valid values are: PLAINTEXT, SSL. Must be one of: `["PLAINTEXT", "SSL"]`. Default: `"PLAINTEXT"`.
+
+- **`kafka_ssl_cafile`** *(string)*: Certificate Authority file path containing certificates used to sign broker certificates. If a CA not specified, the default system CA will be used if found by OpenSSL. Default: `""`.
+
+- **`kafka_ssl_certfile`** *(string)*: Optional filename of client certificate, as well as any CA certificates needed to establish the certificate's authenticity. Default: `""`.
+
+- **`kafka_ssl_keyfile`** *(string)*: Optional filename containing the client private key. Default: `""`.
+
+- **`kafka_ssl_password`** *(string)*: Optional password to be used for the client private key. Default: `""`.
+
- **`primary_artifact_name`** *(string)*: Name of the artifact from which the information for outgoing change events is derived.
+
+ Examples:
+
+ ```json
+ "embedded_public"
+ ```
+
+
- **`primary_dataset_name`** *(string)*: Name of the resource class corresponding to the embedded_dataset slot.
+
+ Examples:
+
+ ```json
+ "EmbeddedDataset"
+ ```
+
+
- **`resource_change_event_topic`** *(string)*: Name of the topic used for events informing other services about resource changes, i.e. deletion or insertion.
+
+ Examples:
+
+ ```json
+ "searchable_resources"
+ ```
+
+
- **`resource_deletion_event_type`** *(string)*: Type used for events indicating the deletion of a previously existing resource.
+
+ Examples:
+
+ ```json
+ "searchable_resource_deleted"
+ ```
+
+
- **`resource_upsertion_type`** *(string)*: Type used for events indicating the upsert of a resource.
+
+ Examples:
+
+ ```json
+ "searchable_resource_upserted"
+ ```
+
+
- **`dataset_change_event_topic`** *(string)*: Name of the topic announcing, among other things, the list of files included in a new dataset.
+
+ Examples:
+
+ ```json
+ "metadata_datasets"
+ ```
+
+
- **`dataset_deletion_type`** *(string)*: Type used for events announcing a new dataset overview.
+
+ Examples:
+
+ ```json
+ "dataset_deleted"
+ ```
+
+
- **`dataset_upsertion_type`** *(string)*: Type used for events announcing a new dataset overview.
+
+ Examples:
+
+ ```json
+ "dataset_created"
+ ```
+
+
- **`host`** *(string)*: IP of the host. Default: `"127.0.0.1"`.
- **`port`** *(integer)*: Port to expose the server on the specified host. Default: `8080`.
@@ -140,60 +246,91 @@ The service requires the following configuration parameters:
- **`docs_url`** *(string)*: Path to host the swagger documentation. This is relative to the specified host and port. Default: `"/docs"`.
-- **`cors_allowed_origins`** *(array)*: A list of origins that should be permitted to make cross-origin requests. By default, cross-origin requests are not allowed. You can use ['*'] to allow any origin.
+- **`cors_allowed_origins`**: A list of origins that should be permitted to make cross-origin requests. By default, cross-origin requests are not allowed. You can use ['*'] to allow any origin. Default: `null`.
- - **Items** *(string)*
+ - **Any of**
-- **`cors_allow_credentials`** *(boolean)*: Indicate that cookies should be supported for cross-origin requests. Defaults to False. Also, cors_allowed_origins cannot be set to ['*'] for credentials to be allowed. The origins must be explicitly specified.
+ - *array*
-- **`cors_allowed_methods`** *(array)*: A list of HTTP methods that should be allowed for cross-origin requests. Defaults to ['GET']. You can use ['*'] to allow all standard methods.
+ - **Items** *(string)*
- - **Items** *(string)*
+ - *null*
-- **`cors_allowed_headers`** *(array)*: A list of HTTP request headers that should be supported for cross-origin requests. Defaults to []. You can use ['*'] to allow all headers. The Accept, Accept-Language, Content-Language and Content-Type headers are always allowed for CORS requests.
- - **Items** *(string)*
+ Examples:
-- **`loader_token_hashes`** *(array)*: Hashes of tokens used to authenticate for loading artifact.
+ ```json
+ [
+ "https://example.org",
+ "https://www.example.org"
+ ]
+ ```
- - **Items** *(string)*
-## Definitions
+- **`cors_allow_credentials`**: Indicate that cookies should be supported for cross-origin requests. Defaults to False. Also, cors_allowed_origins cannot be set to ['*'] for credentials to be allowed. The origins must be explicitly specified. Default: `null`.
+
+ - **Any of**
+
+ - *boolean*
+
+ - *null*
+
+ Examples:
-- **`AnchorPoint`** *(object)*: A model for describing an anchor point for the specified target class.
+ ```json
+ [
+ "https://example.org",
+ "https://www.example.org"
+ ]
+ ```
- - **`target_class`** *(string, required)*: The name of the class to be targeted.
- - **`identifier_slot`** *(string, required)*: The name of the slot in the target class that is used as identifier.
+- **`cors_allowed_methods`**: A list of HTTP methods that should be allowed for cross-origin requests. Defaults to ['GET']. You can use ['*'] to allow all standard methods. Default: `null`.
- - **`root_slot`** *(string, required)*: The name of the slot in the root class used to link to the target class.
+ - **Any of**
-- **`ArtifactResourceClass`** *(object)*: Model to describe a resource class of an artifact.
+ - *array*
- - **`name`** *(string, required)*: The name of the metadata class.
+ - **Items** *(string)*
- - **`description`** *(string)*: A description of the metadata class.
+ - *null*
- - **`anchor_point`**: The anchor point for this metadata class.
- - **All of**
+ Examples:
- - : Refer to *[#/definitions/AnchorPoint](#definitions/AnchorPoint)*.
+ ```json
+ [
+ "*"
+ ]
+ ```
- - **`json_schema`** *(object, required)*: The JSON schema for this metadata class.
-- **`ArtifactInfo`** *(object)*: Model to describe general information on an artifact.
-Please note, it does not contain actual artifact instances derived from specific
-metadata.
+- **`cors_allowed_headers`**: A list of HTTP request headers that should be supported for cross-origin requests. Defaults to []. You can use ['*'] to allow all headers. The Accept, Accept-Language, Content-Language and Content-Type headers are always allowed for CORS requests. Default: `null`.
- - **`name`** *(string, required)*: The name of the artifact.
+ - **Any of**
- - **`description`** *(string, required)*: A description of the artifact.
+ - *array*
- - **`resource_classes`** *(object, required)*: A dictionary of resource classes for this artifact. The keys are the names of the classes. The values are the corresponding class models. Can contain additional properties.
+ - **Items** *(string)*
- - **Additional Properties**: Refer to *[#/definitions/ArtifactResourceClass](#definitions/ArtifactResourceClass)*.
+ - *null*
+
+
+ Examples:
+
+ ```json
+ []
+ ```
+
+
+- **`artifact_infos`** *(array)*: Information for artifacts to be queryable via the Artifacts REST API.
+
+ - **Items**: Refer to *[#/$defs/ArtifactInfo](#$defs/ArtifactInfo)*.
+
+- **`loader_token_hashes`** *(array)*: Hashes of tokens used to authenticate for loading artifact.
+
+ - **Items** *(string)*
### Usage:
@@ -383,19 +520,20 @@ that is applied to the source events as explained above.
## Development
+
For setting up the development environment, we rely on the
-[devcontainer feature](https://code.visualstudio.com/docs/remote/containers) of vscode
+[devcontainer feature](https://code.visualstudio.com/docs/remote/containers) of VS Code
in combination with Docker Compose.
-To use it, you have to have Docker Compose as well as vscode with its "Remote - Containers"
+To use it, you have to have Docker Compose as well as VS Code with its "Remote - Containers"
extension (`ms-vscode-remote.remote-containers`) installed.
-Then open this repository in vscode and run the command
-`Remote-Containers: Reopen in Container` from the vscode "Command Palette".
+Then open this repository in VS Code and run the command
+`Remote-Containers: Reopen in Container` from the VS Code "Command Palette".
This will give you a full-fledged, pre-configured development environment including:
- infrastructural dependencies of the service (databases, etc.)
-- all relevant vscode extensions pre-installed
-- pre-configured linting and auto-formating
+- all relevant VS Code extensions pre-installed
+- pre-configured linting and auto-formatting
- a pre-configured debugger
- automatic license-header insertion
@@ -407,9 +545,11 @@ if you update dependencies in the [`./pyproject.toml`](./pyproject.toml) or the
[`./requirements-dev.txt`](./requirements-dev.txt), please run it again.
## License
+
This repository is free to use and modify according to the
[Apache 2.0 License](./LICENSE).
-## Readme Generation
-This readme is autogenerate, please see [`readme_generation.md`](./readme_generation.md)
+## README Generation
+
+This README file is auto-generated, please see [`readme_generation.md`](./readme_generation.md)
for details.
diff --git a/config_schema.json b/config_schema.json
index e3f55882..216d19cb 100644
--- a/config_schema.json
+++ b/config_schema.json
@@ -1,167 +1,266 @@
{
- "title": "ModSettings",
- "description": "Modifies the orginal Settings class provided by the user",
- "type": "object",
- "properties": {
- "artifact_infos": {
- "title": "Artifact Infos",
- "description": "Information for artifacts to be queryable via the Artifacts REST API.",
- "env_names": [
- "metldata_artifact_infos"
+ "$defs": {
+ "AnchorPoint": {
+ "description": "A model for describing an anchor point for the specified target class.",
+ "properties": {
+ "target_class": {
+ "description": "The name of the class to be targeted.",
+ "title": "Target Class",
+ "type": "string"
+ },
+ "identifier_slot": {
+ "description": "The name of the slot in the target class that is used as identifier.",
+ "title": "Identifier Slot",
+ "type": "string"
+ },
+ "root_slot": {
+ "description": "The name of the slot in the root class used to link to the target class.",
+ "title": "Root Slot",
+ "type": "string"
+ }
+ },
+ "required": [
+ "target_class",
+ "identifier_slot",
+ "root_slot"
],
- "type": "array",
- "items": {
- "$ref": "#/definitions/ArtifactInfo"
- }
+ "title": "AnchorPoint",
+ "type": "object"
},
+ "ArtifactInfo": {
+ "description": "Model to describe general information on an artifact.\nPlease note, it does not contain actual artifact instances derived from specific\nmetadata.",
+ "properties": {
+ "name": {
+ "description": "The name of the artifact.",
+ "title": "Name",
+ "type": "string"
+ },
+ "description": {
+ "description": "A description of the artifact.",
+ "title": "Description",
+ "type": "string"
+ },
+ "resource_classes": {
+ "additionalProperties": {
+ "$ref": "#/$defs/ArtifactResourceClass"
+ },
+ "description": "A dictionary of resource classes for this artifact. The keys are the names of the classes. The values are the corresponding class models.",
+ "title": "Resource Classes",
+ "type": "object"
+ }
+ },
+ "required": [
+ "name",
+ "description",
+ "resource_classes"
+ ],
+ "title": "ArtifactInfo",
+ "type": "object"
+ },
+ "ArtifactResourceClass": {
+ "description": "Model to describe a resource class of an artifact.",
+ "properties": {
+ "name": {
+ "description": "The name of the metadata class.",
+ "title": "Name",
+ "type": "string"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "A description of the metadata class.",
+ "title": "Description"
+ },
+ "anchor_point": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/AnchorPoint"
+ }
+ ],
+ "description": "The anchor point for this metadata class."
+ },
+ "json_schema": {
+ "description": "The JSON schema for this metadata class.",
+ "title": "Json Schema",
+ "type": "object"
+ }
+ },
+ "required": [
+ "name",
+ "anchor_point",
+ "json_schema"
+ ],
+ "title": "ArtifactResourceClass",
+ "type": "object"
+ }
+ },
+ "additionalProperties": false,
+ "description": "Modifies the orginal Settings class provided by the user",
+ "properties": {
"db_connection_str": {
- "title": "Db Connection Str",
"description": "MongoDB connection string. Might include credentials. For more information see: https://naiveskill.com/mongodb-connection-string/",
- "example": "mongodb://localhost:27017",
- "env_names": [
- "metldata_db_connection_str"
+ "examples": [
+ "mongodb://localhost:27017"
],
+ "format": "password",
+ "title": "Db Connection Str",
"type": "string",
- "writeOnly": true,
- "format": "password"
+ "writeOnly": true
},
"db_name": {
- "title": "Db Name",
"description": "Name of the database located on the MongoDB server.",
- "example": "my-database",
- "env_names": [
- "metldata_db_name"
+ "examples": [
+ "my-database"
],
+ "title": "Db Name",
"type": "string"
},
"service_name": {
- "title": "Service Name",
"default": "metldata",
- "env_names": [
- "metldata_service_name"
- ],
+ "title": "Service Name",
"type": "string"
},
"service_instance_id": {
- "title": "Service Instance Id",
"description": "A string that uniquely identifies this instance across all instances of this service. A globally unique Kafka client ID will be created by concatenating the service_name and the service_instance_id.",
- "example": "germany-bw-instance-001",
- "env_names": [
- "metldata_service_instance_id"
+ "examples": [
+ "germany-bw-instance-001"
],
+ "title": "Service Instance Id",
"type": "string"
},
"kafka_servers": {
- "title": "Kafka Servers",
"description": "A list of connection strings to connect to Kafka bootstrap servers.",
- "example": [
- "localhost:9092"
- ],
- "env_names": [
- "metldata_kafka_servers"
+ "examples": [
+ [
+ "localhost:9092"
+ ]
],
- "type": "array",
"items": {
"type": "string"
- }
+ },
+ "title": "Kafka Servers",
+ "type": "array"
+ },
+ "kafka_security_protocol": {
+ "default": "PLAINTEXT",
+ "description": "Protocol used to communicate with brokers. Valid values are: PLAINTEXT, SSL.",
+ "enum": [
+ "PLAINTEXT",
+ "SSL"
+ ],
+ "title": "Kafka Security Protocol",
+ "type": "string"
+ },
+ "kafka_ssl_cafile": {
+ "default": "",
+ "description": "Certificate Authority file path containing certificates used to sign broker certificates. If a CA not specified, the default system CA will be used if found by OpenSSL.",
+ "title": "Kafka Ssl Cafile",
+ "type": "string"
+ },
+ "kafka_ssl_certfile": {
+ "default": "",
+ "description": "Optional filename of client certificate, as well as any CA certificates needed to establish the certificate's authenticity.",
+ "title": "Kafka Ssl Certfile",
+ "type": "string"
+ },
+ "kafka_ssl_keyfile": {
+ "default": "",
+ "description": "Optional filename containing the client private key.",
+ "title": "Kafka Ssl Keyfile",
+ "type": "string"
+ },
+ "kafka_ssl_password": {
+ "default": "",
+ "description": "Optional password to be used for the client private key.",
+ "title": "Kafka Ssl Password",
+ "type": "string"
},
"primary_artifact_name": {
- "title": "Primary Artifact Name",
"description": "Name of the artifact from which the information for outgoing change events is derived.",
- "example": "embedded_public",
- "env_names": [
- "metldata_primary_artifact_name"
+ "examples": [
+ "embedded_public"
],
+ "title": "Primary Artifact Name",
"type": "string"
},
"primary_dataset_name": {
- "title": "Primary Dataset Name",
"description": "Name of the resource class corresponding to the embedded_dataset slot.",
- "example": "EmbeddedDataset",
- "env_names": [
- "metldata_primary_dataset_name"
+ "examples": [
+ "EmbeddedDataset"
],
+ "title": "Primary Dataset Name",
"type": "string"
},
"resource_change_event_topic": {
- "title": "Resource Change Event Topic",
"description": "Name of the topic used for events informing other services about resource changes, i.e. deletion or insertion.",
- "example": "searchable_resources",
- "env_names": [
- "metldata_resource_change_event_topic"
+ "examples": [
+ "searchable_resources"
],
+ "title": "Resource Change Event Topic",
"type": "string"
},
"resource_deletion_event_type": {
- "title": "Resource Deletion Event Type",
"description": "Type used for events indicating the deletion of a previously existing resource.",
- "example": "searchable_resource_deleted",
- "env_names": [
- "metldata_resource_deletion_event_type"
+ "examples": [
+ "searchable_resource_deleted"
],
+ "title": "Resource Deletion Event Type",
"type": "string"
},
"resource_upsertion_type": {
- "title": "Resource Upsertion Type",
"description": "Type used for events indicating the upsert of a resource.",
- "example": "searchable_resource_upserted",
- "env_names": [
- "metldata_resource_upsertion_type"
+ "examples": [
+ "searchable_resource_upserted"
],
+ "title": "Resource Upsertion Type",
"type": "string"
},
"dataset_change_event_topic": {
- "title": "Dataset Change Event Topic",
"description": "Name of the topic announcing, among other things, the list of files included in a new dataset.",
- "example": "metadata_datasets",
- "env_names": [
- "metldata_dataset_change_event_topic"
+ "examples": [
+ "metadata_datasets"
],
+ "title": "Dataset Change Event Topic",
"type": "string"
},
"dataset_deletion_type": {
- "title": "Dataset Deletion Type",
"description": "Type used for events announcing a new dataset overview.",
- "example": "dataset_deleted",
- "env_names": [
- "metldata_dataset_deletion_type"
+ "examples": [
+ "dataset_deleted"
],
+ "title": "Dataset Deletion Type",
"type": "string"
},
"dataset_upsertion_type": {
- "title": "Dataset Upsertion Type",
"description": "Type used for events announcing a new dataset overview.",
- "example": "dataset_created",
- "env_names": [
- "metldata_dataset_upsertion_type"
+ "examples": [
+ "dataset_created"
],
+ "title": "Dataset Upsertion Type",
"type": "string"
},
"host": {
- "title": "Host",
- "description": "IP of the host.",
"default": "127.0.0.1",
- "env_names": [
- "metldata_host"
- ],
+ "description": "IP of the host.",
+ "title": "Host",
"type": "string"
},
"port": {
- "title": "Port",
- "description": "Port to expose the server on the specified host",
"default": 8080,
- "env_names": [
- "metldata_port"
- ],
+ "description": "Port to expose the server on the specified host",
+ "title": "Port",
"type": "integer"
},
"log_level": {
- "title": "Log Level",
- "description": "Controls the verbosity of the log.",
"default": "info",
- "env_names": [
- "metldata_log_level"
- ],
+ "description": "Controls the verbosity of the log.",
"enum": [
"critical",
"error",
@@ -170,120 +269,138 @@
"debug",
"trace"
],
+ "title": "Log Level",
"type": "string"
},
"auto_reload": {
- "title": "Auto Reload",
- "description": "A development feature. Set to `True` to automatically reload the server upon code changes",
"default": false,
- "env_names": [
- "metldata_auto_reload"
- ],
+ "description": "A development feature. Set to `True` to automatically reload the server upon code changes",
+ "title": "Auto Reload",
"type": "boolean"
},
"workers": {
- "title": "Workers",
- "description": "Number of workers processes to run.",
"default": 1,
- "env_names": [
- "metldata_workers"
- ],
+ "description": "Number of workers processes to run.",
+ "title": "Workers",
"type": "integer"
},
"api_root_path": {
- "title": "Api Root Path",
- "description": "Root path at which the API is reachable. This is relative to the specified host and port.",
"default": "/",
- "env_names": [
- "metldata_api_root_path"
- ],
+ "description": "Root path at which the API is reachable. This is relative to the specified host and port.",
+ "title": "Api Root Path",
"type": "string"
},
"openapi_url": {
- "title": "Openapi Url",
- "description": "Path to get the openapi specification in JSON format. This is relative to the specified host and port.",
"default": "/openapi.json",
- "env_names": [
- "metldata_openapi_url"
- ],
+ "description": "Path to get the openapi specification in JSON format. This is relative to the specified host and port.",
+ "title": "Openapi Url",
"type": "string"
},
"docs_url": {
- "title": "Docs Url",
- "description": "Path to host the swagger documentation. This is relative to the specified host and port.",
"default": "/docs",
- "env_names": [
- "metldata_docs_url"
- ],
+ "description": "Path to host the swagger documentation. This is relative to the specified host and port.",
+ "title": "Docs Url",
"type": "string"
},
"cors_allowed_origins": {
- "title": "Cors Allowed Origins",
- "description": "A list of origins that should be permitted to make cross-origin requests. By default, cross-origin requests are not allowed. You can use ['*'] to allow any origin.",
- "example": [
- "https://example.org",
- "https://www.example.org"
+ "anyOf": [
+ {
+ "items": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "type": "null"
+ }
],
- "env_names": [
- "metldata_cors_allowed_origins"
+ "default": null,
+ "description": "A list of origins that should be permitted to make cross-origin requests. By default, cross-origin requests are not allowed. You can use ['*'] to allow any origin.",
+ "examples": [
+ [
+ "https://example.org",
+ "https://www.example.org"
+ ]
],
- "type": "array",
- "items": {
- "type": "string"
- }
+ "title": "Cors Allowed Origins"
},
"cors_allow_credentials": {
- "title": "Cors Allow Credentials",
- "description": "Indicate that cookies should be supported for cross-origin requests. Defaults to False. Also, cors_allowed_origins cannot be set to ['*'] for credentials to be allowed. The origins must be explicitly specified.",
- "example": [
- "https://example.org",
- "https://www.example.org"
+ "anyOf": [
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "null"
+ }
],
- "env_names": [
- "metldata_cors_allow_credentials"
+ "default": null,
+ "description": "Indicate that cookies should be supported for cross-origin requests. Defaults to False. Also, cors_allowed_origins cannot be set to ['*'] for credentials to be allowed. The origins must be explicitly specified.",
+ "examples": [
+ [
+ "https://example.org",
+ "https://www.example.org"
+ ]
],
- "type": "boolean"
+ "title": "Cors Allow Credentials"
},
"cors_allowed_methods": {
- "title": "Cors Allowed Methods",
- "description": "A list of HTTP methods that should be allowed for cross-origin requests. Defaults to ['GET']. You can use ['*'] to allow all standard methods.",
- "example": [
- "*"
+ "anyOf": [
+ {
+ "items": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "type": "null"
+ }
],
- "env_names": [
- "metldata_cors_allowed_methods"
+ "default": null,
+ "description": "A list of HTTP methods that should be allowed for cross-origin requests. Defaults to ['GET']. You can use ['*'] to allow all standard methods.",
+ "examples": [
+ [
+ "*"
+ ]
],
- "type": "array",
- "items": {
- "type": "string"
- }
+ "title": "Cors Allowed Methods"
},
"cors_allowed_headers": {
- "title": "Cors Allowed Headers",
+ "anyOf": [
+ {
+ "items": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "A list of HTTP request headers that should be supported for cross-origin requests. Defaults to []. You can use ['*'] to allow all headers. The Accept, Accept-Language, Content-Language and Content-Type headers are always allowed for CORS requests.",
- "example": [],
- "env_names": [
- "metldata_cors_allowed_headers"
+ "examples": [
+ []
],
- "type": "array",
+ "title": "Cors Allowed Headers"
+ },
+ "artifact_infos": {
+ "description": "Information for artifacts to be queryable via the Artifacts REST API.",
"items": {
- "type": "string"
- }
+ "$ref": "#/$defs/ArtifactInfo"
+ },
+ "title": "Artifact Infos",
+ "type": "array"
},
"loader_token_hashes": {
- "title": "Loader Token Hashes",
"description": "Hashes of tokens used to authenticate for loading artifact.",
- "env_names": [
- "metldata_loader_token_hashes"
- ],
- "type": "array",
"items": {
"type": "string"
- }
+ },
+ "title": "Loader Token Hashes",
+ "type": "array"
}
},
"required": [
- "artifact_infos",
"db_connection_str",
"db_name",
"service_instance_id",
@@ -296,102 +413,9 @@
"dataset_change_event_topic",
"dataset_deletion_type",
"dataset_upsertion_type",
+ "artifact_infos",
"loader_token_hashes"
],
- "additionalProperties": false,
- "definitions": {
- "AnchorPoint": {
- "title": "AnchorPoint",
- "description": "A model for describing an anchor point for the specified target class.",
- "type": "object",
- "properties": {
- "target_class": {
- "title": "Target Class",
- "description": "The name of the class to be targeted.",
- "type": "string"
- },
- "identifier_slot": {
- "title": "Identifier Slot",
- "description": "The name of the slot in the target class that is used as identifier.",
- "type": "string"
- },
- "root_slot": {
- "title": "Root Slot",
- "description": "The name of the slot in the root class used to link to the target class.",
- "type": "string"
- }
- },
- "required": [
- "target_class",
- "identifier_slot",
- "root_slot"
- ]
- },
- "ArtifactResourceClass": {
- "title": "ArtifactResourceClass",
- "description": "Model to describe a resource class of an artifact.",
- "type": "object",
- "properties": {
- "name": {
- "title": "Name",
- "description": "The name of the metadata class.",
- "type": "string"
- },
- "description": {
- "title": "Description",
- "description": "A description of the metadata class.",
- "type": "string"
- },
- "anchor_point": {
- "title": "Anchor Point",
- "description": "The anchor point for this metadata class.",
- "allOf": [
- {
- "$ref": "#/definitions/AnchorPoint"
- }
- ]
- },
- "json_schema": {
- "title": "Json Schema",
- "description": "The JSON schema for this metadata class.",
- "type": "object"
- }
- },
- "required": [
- "name",
- "anchor_point",
- "json_schema"
- ]
- },
- "ArtifactInfo": {
- "title": "ArtifactInfo",
- "description": "Model to describe general information on an artifact.\nPlease note, it does not contain actual artifact instances derived from specific\nmetadata.",
- "type": "object",
- "properties": {
- "name": {
- "title": "Name",
- "description": "The name of the artifact.",
- "type": "string"
- },
- "description": {
- "title": "Description",
- "description": "A description of the artifact.",
- "type": "string"
- },
- "resource_classes": {
- "title": "Resource Classes",
- "description": "A dictionary of resource classes for this artifact. The keys are the names of the classes. The values are the corresponding class models.",
- "type": "object",
- "additionalProperties": {
- "$ref": "#/definitions/ArtifactResourceClass"
- }
- }
- },
- "required": [
- "name",
- "description",
- "resource_classes"
- ]
- }
- }
+ "title": "ModSettings",
+ "type": "object"
}
\ No newline at end of file
diff --git a/example_config.yaml b/example_config.yaml
index 3bb158e5..2aa32e0d 100644
--- a/example_config.yaml
+++ b/example_config.yaml
@@ -204,8 +204,13 @@ db_connection_str: '**********'
db_name: dev_db
docs_url: /docs
host: 127.0.0.1
+kafka_security_protocol: PLAINTEXT
kafka_servers:
- kafka:9092
+kafka_ssl_cafile: ''
+kafka_ssl_certfile: ''
+kafka_ssl_keyfile: ''
+kafka_ssl_password: ''
loader_token_hashes:
- 09e5724dab34b50fe2db5ebae7ac5eac4ef2904d105f8f2b3d5a4cb2c7f43da4
log_level: trace
@@ -216,6 +221,6 @@ primary_dataset_name: EmbeddedDataset
resource_change_event_topic: searchable_resources
resource_deletion_event_type: searchable_resource_deleted
resource_upsertion_type: searchable_resource_upserted
-service_instance_id: '1'
+service_instance_id: '001'
service_name: metldata
workers: 1
diff --git a/pyproject.toml b/pyproject.toml
index 65503bd2..e16ef130 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -21,10 +21,10 @@ classifiers = [
"Intended Audience :: Developers",
]
dependencies = [
- "hexkit[akafka,mongodb]>=0.10.2",
- "ghga-service-commons[api,auth]>=0.5.0",
- "ghga-event-schemas>=0.13.4",
- "typer>=0.7.0",
+ "hexkit[akafka,mongodb]~=1.0.0",
+ "ghga-service-commons[api,auth]~=1.1.0",
+ "ghga-event-schemas~=1.0.0",
+ "typer~=0.9.0",
"linkml==1.6.1",
"linkml-runtime==1.6.0",
"linkml-validator==0.4.5",
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 596f35b7..97c91940 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
-# pip-compile --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmpu62427re/pyproject.toml /workspace/requirements-dev.in
+# pip-compile --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmph8bxvet7/pyproject.toml /workspace/requirements-dev.in
#
aiokafka==0.8.0 \
--hash=sha256:021e9f0027ca63c6c04daccfdd0e985f7a56d51bd0d43f482f674a58fada52f5 \
@@ -37,16 +37,21 @@ aiokafka==0.8.0 \
--hash=sha256:f0a216a27f05b050d5a5308fb3444014fa6bca5f0cd63468eaa169c5f19ea1dd \
--hash=sha256:f3f96301337fa7f7242f46651619b8e9e8fa8f23902dc11416fe764436d662d3
# via hexkit
+annotated-types==0.6.0 \
+ --hash=sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43 \
+ --hash=sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d
+ # via pydantic
antlr4-python3-runtime==4.9.3 \
--hash=sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b
# via
# linkml
# pyjsg
# pyshexc
-anyio==4.0.0 \
- --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
- --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
+anyio==3.7.1 \
+ --hash=sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780 \
+ --hash=sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5
# via
+ # fastapi
# httpcore
# starlette
# watchfiles
@@ -326,7 +331,9 @@ coverage[toml]==7.3.2 \
--hash=sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738 \
--hash=sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a \
--hash=sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4
- # via pytest-cov
+ # via
+ # coverage
+ # pytest-cov
cryptography==41.0.4 \
--hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \
--hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \
@@ -447,7 +454,6 @@ dnspython==2.3.0 \
--hash=sha256:89141536394f909066cabd112e3e1a37e4e654db00a25308b0f130bc3152eb46
# via
# email-validator
- # ghga-event-schemas
# pymongo
docker==6.1.3 \
--hash=sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20 \
@@ -467,9 +473,9 @@ exceptiongroup==1.1.3 \
# via
# anyio
# pytest
-fastapi==0.96.1 \
- --hash=sha256:22d773ce95f14f04f8f37a0c8998fc163e67af83b65510d2879de6cbaaa10215 \
- --hash=sha256:5c1d243030e63089ccfc0aec69c2da6d619943917727e8e82ee502358d5119bf
+fastapi==0.103.2 \
+ --hash=sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e \
+ --hash=sha256:75a11f6bfb8fc4d2bec0bd710c2d5f2829659c0e8c0afd5560fdda6ce25ec653
# via ghga-service-commons
filelock==3.12.4 \
--hash=sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4 \
@@ -479,14 +485,16 @@ fqdn==1.5.1 \
--hash=sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f \
--hash=sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014
# via jsonschema
-ghga-event-schemas==0.13.5 \
- --hash=sha256:1ce129c7c969fe2b263483d55c500a3d7f75b6c97e0acb6f49ddd84eca1e1974 \
- --hash=sha256:c5fb0abab7d06512b2470bb84247473528051d109200ba69a87b6701e3384d6c
- # via metldata (pyproject.toml)
-ghga-service-commons[api,auth]==0.7.1 \
- --hash=sha256:70a6bc39d3ab8309524dfbcd4c29e5f37de118c41107fb42054834e5dcf8decc \
- --hash=sha256:888a789cbea86bcd7433e0b848c137928eea87bd9194de3504a3c25a36ac983c
+ghga-event-schemas==1.0.0 \
+ --hash=sha256:34e3dcfc97025dcf9c05de73f3d40cf3c1cb361ba74f4cb2ef621de16bc158c2 \
+ --hash=sha256:9f006151bb19064eb135313967e8ca6c07382ecfe0a273884e96236ffd01f3ea
# via metldata (pyproject.toml)
+ghga-service-commons[api,auth]==1.1.0 \
+ --hash=sha256:2502ac05e429f988d066303b53147691b86c9915455c375c79a4c73d65060e8a \
+ --hash=sha256:a198195e731c0239e2c248e8f40c3918ebb6087f1d0fb8f55eef91479c7ea287
+ # via
+ # ghga-service-commons
+ # metldata (pyproject.toml)
gprof2dot==2022.7.29 \
--hash=sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5 \
--hash=sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6
@@ -572,11 +580,12 @@ hbreader==0.9.1 \
# jsonasobj2
# linkml
# linkml-runtime
-hexkit[akafka,mongodb,test-mongodb]==0.10.2 \
- --hash=sha256:1495f2bc6ae7423874bc20367dd28555cd15a74ccf5cb4997e0fb8307757987e \
- --hash=sha256:436ea50e706ab616803eb85a9a9f5e7bec727379b750b966650d6e64d4ea5ef0
+hexkit[akafka,mongodb,test-mongodb]==1.0.0 \
+ --hash=sha256:767e9cda5433f8e0c74e8f0b0376dccbfee8db1efecd2dc75e3f2b841dc99c60 \
+ --hash=sha256:a88afb9d8ff9c1672fe81a233501b2ae369e155b72aae46ff5ae83f7bb252b6b
# via
# -r /workspace/requirements-dev.in
+ # hexkit
# metldata (pyproject.toml)
httpcore==0.16.3 \
--hash=sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb \
@@ -701,6 +710,7 @@ jsonschema[format]==4.19.1 \
# via
# ghga-event-schemas
# hexkit
+ # jsonschema
# linkml
# linkml-runtime
jsonschema-specifications==2023.7.1 \
@@ -906,43 +916,9 @@ pycparser==2.21 \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
# via cffi
-pydantic[email]==1.10.6 \
- --hash=sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62 \
- --hash=sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35 \
- --hash=sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832 \
- --hash=sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a \
- --hash=sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06 \
- --hash=sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4 \
- --hash=sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6 \
- --hash=sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7 \
- --hash=sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f \
- --hash=sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70 \
- --hash=sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186 \
- --hash=sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd \
- --hash=sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d \
- --hash=sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c \
- --hash=sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f \
- --hash=sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65 \
- --hash=sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc \
- --hash=sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2 \
- --hash=sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d \
- --hash=sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a \
- --hash=sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7 \
- --hash=sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb \
- --hash=sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb \
- --hash=sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77 \
- --hash=sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7 \
- --hash=sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160 \
- --hash=sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0 \
- --hash=sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4 \
- --hash=sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d \
- --hash=sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084 \
- --hash=sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b \
- --hash=sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd \
- --hash=sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d \
- --hash=sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2 \
- --hash=sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31 \
- --hash=sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083
+pydantic[email]==2.4.2 \
+ --hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \
+ --hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1
# via
# curies
# fastapi
@@ -952,6 +928,119 @@ pydantic[email]==1.10.6 \
# linkml
# linkml-runtime
# linkml-validator
+ # pydantic-settings
+pydantic-core==2.10.1 \
+ --hash=sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e \
+ --hash=sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33 \
+ --hash=sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7 \
+ --hash=sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7 \
+ --hash=sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea \
+ --hash=sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4 \
+ --hash=sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0 \
+ --hash=sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7 \
+ --hash=sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94 \
+ --hash=sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff \
+ --hash=sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82 \
+ --hash=sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd \
+ --hash=sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893 \
+ --hash=sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e \
+ --hash=sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d \
+ --hash=sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901 \
+ --hash=sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9 \
+ --hash=sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c \
+ --hash=sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7 \
+ --hash=sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891 \
+ --hash=sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f \
+ --hash=sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a \
+ --hash=sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9 \
+ --hash=sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5 \
+ --hash=sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e \
+ --hash=sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a \
+ --hash=sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c \
+ --hash=sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f \
+ --hash=sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514 \
+ --hash=sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b \
+ --hash=sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302 \
+ --hash=sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096 \
+ --hash=sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0 \
+ --hash=sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27 \
+ --hash=sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884 \
+ --hash=sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a \
+ --hash=sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357 \
+ --hash=sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430 \
+ --hash=sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221 \
+ --hash=sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325 \
+ --hash=sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4 \
+ --hash=sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05 \
+ --hash=sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55 \
+ --hash=sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875 \
+ --hash=sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970 \
+ --hash=sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc \
+ --hash=sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6 \
+ --hash=sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f \
+ --hash=sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b \
+ --hash=sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d \
+ --hash=sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15 \
+ --hash=sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118 \
+ --hash=sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee \
+ --hash=sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e \
+ --hash=sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6 \
+ --hash=sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208 \
+ --hash=sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede \
+ --hash=sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3 \
+ --hash=sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e \
+ --hash=sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada \
+ --hash=sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175 \
+ --hash=sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a \
+ --hash=sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c \
+ --hash=sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f \
+ --hash=sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58 \
+ --hash=sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f \
+ --hash=sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a \
+ --hash=sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a \
+ --hash=sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921 \
+ --hash=sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e \
+ --hash=sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904 \
+ --hash=sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776 \
+ --hash=sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52 \
+ --hash=sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf \
+ --hash=sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8 \
+ --hash=sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f \
+ --hash=sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b \
+ --hash=sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63 \
+ --hash=sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c \
+ --hash=sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f \
+ --hash=sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468 \
+ --hash=sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e \
+ --hash=sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab \
+ --hash=sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2 \
+ --hash=sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb \
+ --hash=sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb \
+ --hash=sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132 \
+ --hash=sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b \
+ --hash=sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607 \
+ --hash=sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934 \
+ --hash=sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698 \
+ --hash=sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e \
+ --hash=sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561 \
+ --hash=sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de \
+ --hash=sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b \
+ --hash=sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a \
+ --hash=sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595 \
+ --hash=sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402 \
+ --hash=sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881 \
+ --hash=sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429 \
+ --hash=sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5 \
+ --hash=sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7 \
+ --hash=sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c \
+ --hash=sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531 \
+ --hash=sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6 \
+ --hash=sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521
+ # via pydantic
+pydantic-settings==2.0.3 \
+ --hash=sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945 \
+ --hash=sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625
+ # via hexkit
pyjsg==0.11.10 \
--hash=sha256:10af60ff42219be7e85bf7f11c19b648715b0b29eb2ddbd269e87069a7c3f26d \
--hash=sha256:4bd6e3ff2833fa2b395bbe803a2d72a5f0bab5b7285bccd0da1a1bc0aee88bfa
@@ -1100,7 +1189,9 @@ python-dateutil==2.8.2 \
python-dotenv==1.0.0 \
--hash=sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba \
--hash=sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a
- # via uvicorn
+ # via
+ # pydantic-settings
+ # uvicorn
pytrie==0.4.0 \
--hash=sha256:8f4488f402d3465993fb6b6efa09866849ed8cda7903b50647b7d0342b805379
# via curies
@@ -1201,7 +1292,9 @@ rfc3339-validator==0.1.4 \
rfc3986[idna2008]==1.5.0 \
--hash=sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835 \
--hash=sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97
- # via httpx
+ # via
+ # httpx
+ # rfc3986
rfc3987==1.3.8 \
--hash=sha256:10702b1e51e5658843460b189b185c0366d2cf4cff716f13111b0ea9fd2dce53 \
--hash=sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733
@@ -1456,9 +1549,11 @@ stringcase==1.2.0 \
# via
# -r /workspace/requirements-dev-common.in
# linkml-validator
-testcontainers[mongo]==3.4.1 \
- --hash=sha256:7626f2899f869b929c14b8eb4996b1e70a4b4bd1934de500be193db89b18f7cc
- # via hexkit
+testcontainers[kafka,mongo]==3.7.1 \
+ --hash=sha256:7f48cef4bf0ccd78f1a4534d4b701a003a3bace851f24eae58a32f9e3f0aeba0
+ # via
+ # hexkit
+ # testcontainers
tomli==2.0.1 \
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
@@ -1503,9 +1598,11 @@ typing-extensions==4.8.0 \
--hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef
# via
# black
+ # fastapi
# mypy
# prefixmaps
# pydantic
+ # pydantic-core
# sqlalchemy
# starlette
# typer
@@ -1524,7 +1621,9 @@ urllib3==2.0.6 \
uvicorn[standard]==0.20.0 \
--hash=sha256:a4e12017b940247f836bc90b72e725d7dfd0c8ed1c51eb365f5ba30d9f5127d8 \
--hash=sha256:c3ed1598a5668208723f2bb49336f4509424ad198d6ab2615b7783db58d919fd
- # via ghga-service-commons
+ # via
+ # ghga-service-commons
+ # uvicorn
uvloop==0.17.0 \
--hash=sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d \
--hash=sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1 \
diff --git a/requirements.txt b/requirements.txt
index a1eaca8e..75461ba8 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
-# pip-compile --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmpu62427re/pyproject.toml
+# pip-compile --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmph8bxvet7/pyproject.toml
#
aiokafka==0.8.0 \
--hash=sha256:021e9f0027ca63c6c04daccfdd0e985f7a56d51bd0d43f482f674a58fada52f5 \
@@ -39,6 +39,12 @@ aiokafka==0.8.0 \
# via
# -c /workspace/requirements-dev.txt
# hexkit
+annotated-types==0.6.0 \
+ --hash=sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43 \
+ --hash=sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d
+ # via
+ # -c /workspace/requirements-dev.txt
+ # pydantic
antlr4-python3-runtime==4.9.3 \
--hash=sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b
# via
@@ -46,11 +52,12 @@ antlr4-python3-runtime==4.9.3 \
# linkml
# pyjsg
# pyshexc
-anyio==4.0.0 \
- --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
- --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
+anyio==3.7.1 \
+ --hash=sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780 \
+ --hash=sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5
# via
# -c /workspace/requirements-dev.txt
+ # fastapi
# httpcore
# starlette
# watchfiles
@@ -376,7 +383,6 @@ dnspython==2.3.0 \
# via
# -c /workspace/requirements-dev.txt
# email-validator
- # ghga-event-schemas
# pymongo
email-validator==2.0.0.post2 \
--hash=sha256:1ff6e86044200c56ae23595695c54e9614f4a9551e0e393614f764860b3d7900 \
@@ -397,9 +403,9 @@ exceptiongroup==1.1.3 \
# -c /workspace/requirements-dev.txt
# anyio
# pytest
-fastapi==0.96.1 \
- --hash=sha256:22d773ce95f14f04f8f37a0c8998fc163e67af83b65510d2879de6cbaaa10215 \
- --hash=sha256:5c1d243030e63089ccfc0aec69c2da6d619943917727e8e82ee502358d5119bf
+fastapi==0.103.2 \
+ --hash=sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e \
+ --hash=sha256:75a11f6bfb8fc4d2bec0bd710c2d5f2829659c0e8c0afd5560fdda6ce25ec653
# via
# -c /workspace/requirements-dev.txt
# ghga-service-commons
@@ -409,17 +415,18 @@ fqdn==1.5.1 \
# via
# -c /workspace/requirements-dev.txt
# jsonschema
-ghga-event-schemas==0.13.5 \
- --hash=sha256:1ce129c7c969fe2b263483d55c500a3d7f75b6c97e0acb6f49ddd84eca1e1974 \
- --hash=sha256:c5fb0abab7d06512b2470bb84247473528051d109200ba69a87b6701e3384d6c
+ghga-event-schemas==1.0.0 \
+ --hash=sha256:34e3dcfc97025dcf9c05de73f3d40cf3c1cb361ba74f4cb2ef621de16bc158c2 \
+ --hash=sha256:9f006151bb19064eb135313967e8ca6c07382ecfe0a273884e96236ffd01f3ea
# via
# -c /workspace/requirements-dev.txt
# metldata (pyproject.toml)
-ghga-service-commons[api,auth]==0.7.1 \
- --hash=sha256:70a6bc39d3ab8309524dfbcd4c29e5f37de118c41107fb42054834e5dcf8decc \
- --hash=sha256:888a789cbea86bcd7433e0b848c137928eea87bd9194de3504a3c25a36ac983c
+ghga-service-commons[api,auth]==1.1.0 \
+ --hash=sha256:2502ac05e429f988d066303b53147691b86c9915455c375c79a4c73d65060e8a \
+ --hash=sha256:a198195e731c0239e2c248e8f40c3918ebb6087f1d0fb8f55eef91479c7ea287
# via
# -c /workspace/requirements-dev.txt
+ # ghga-service-commons
# metldata (pyproject.toml)
graphviz==0.20.1 \
--hash=sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977 \
@@ -507,11 +514,12 @@ hbreader==0.9.1 \
# jsonasobj2
# linkml
# linkml-runtime
-hexkit[akafka,mongodb]==0.10.2 \
- --hash=sha256:1495f2bc6ae7423874bc20367dd28555cd15a74ccf5cb4997e0fb8307757987e \
- --hash=sha256:436ea50e706ab616803eb85a9a9f5e7bec727379b750b966650d6e64d4ea5ef0
+hexkit[akafka,mongodb]==1.0.0 \
+ --hash=sha256:767e9cda5433f8e0c74e8f0b0376dccbfee8db1efecd2dc75e3f2b841dc99c60 \
+ --hash=sha256:a88afb9d8ff9c1672fe81a233501b2ae369e155b72aae46ff5ae83f7bb252b6b
# via
# -c /workspace/requirements-dev.txt
+ # hexkit
# metldata (pyproject.toml)
httpcore==0.16.3 \
--hash=sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb \
@@ -652,6 +660,7 @@ jsonschema[format]==4.19.1 \
# -c /workspace/requirements-dev.txt
# ghga-event-schemas
# hexkit
+ # jsonschema
# linkml
# linkml-runtime
jsonschema-specifications==2023.7.1 \
@@ -820,43 +829,9 @@ pycparser==2.21 \
# via
# -c /workspace/requirements-dev.txt
# cffi
-pydantic[email]==1.10.6 \
- --hash=sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62 \
- --hash=sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35 \
- --hash=sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832 \
- --hash=sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a \
- --hash=sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06 \
- --hash=sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4 \
- --hash=sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6 \
- --hash=sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7 \
- --hash=sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f \
- --hash=sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70 \
- --hash=sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186 \
- --hash=sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd \
- --hash=sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d \
- --hash=sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c \
- --hash=sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f \
- --hash=sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65 \
- --hash=sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc \
- --hash=sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2 \
- --hash=sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d \
- --hash=sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a \
- --hash=sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7 \
- --hash=sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb \
- --hash=sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb \
- --hash=sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77 \
- --hash=sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7 \
- --hash=sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160 \
- --hash=sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0 \
- --hash=sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4 \
- --hash=sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d \
- --hash=sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084 \
- --hash=sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b \
- --hash=sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd \
- --hash=sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d \
- --hash=sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2 \
- --hash=sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31 \
- --hash=sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083
+pydantic[email]==2.4.2 \
+ --hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \
+ --hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1
# via
# -c /workspace/requirements-dev.txt
# curies
@@ -867,6 +842,123 @@ pydantic[email]==1.10.6 \
# linkml
# linkml-runtime
# linkml-validator
+ # pydantic-settings
+pydantic-core==2.10.1 \
+ --hash=sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e \
+ --hash=sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33 \
+ --hash=sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7 \
+ --hash=sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7 \
+ --hash=sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea \
+ --hash=sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4 \
+ --hash=sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0 \
+ --hash=sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7 \
+ --hash=sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94 \
+ --hash=sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff \
+ --hash=sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82 \
+ --hash=sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd \
+ --hash=sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893 \
+ --hash=sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e \
+ --hash=sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d \
+ --hash=sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901 \
+ --hash=sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9 \
+ --hash=sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c \
+ --hash=sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7 \
+ --hash=sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891 \
+ --hash=sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f \
+ --hash=sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a \
+ --hash=sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9 \
+ --hash=sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5 \
+ --hash=sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e \
+ --hash=sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a \
+ --hash=sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c \
+ --hash=sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f \
+ --hash=sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514 \
+ --hash=sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b \
+ --hash=sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302 \
+ --hash=sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096 \
+ --hash=sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0 \
+ --hash=sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27 \
+ --hash=sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884 \
+ --hash=sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a \
+ --hash=sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357 \
+ --hash=sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430 \
+ --hash=sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221 \
+ --hash=sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325 \
+ --hash=sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4 \
+ --hash=sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05 \
+ --hash=sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55 \
+ --hash=sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875 \
+ --hash=sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970 \
+ --hash=sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc \
+ --hash=sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6 \
+ --hash=sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f \
+ --hash=sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b \
+ --hash=sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d \
+ --hash=sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15 \
+ --hash=sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118 \
+ --hash=sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee \
+ --hash=sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e \
+ --hash=sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6 \
+ --hash=sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208 \
+ --hash=sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede \
+ --hash=sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3 \
+ --hash=sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e \
+ --hash=sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada \
+ --hash=sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175 \
+ --hash=sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a \
+ --hash=sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c \
+ --hash=sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f \
+ --hash=sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58 \
+ --hash=sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f \
+ --hash=sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a \
+ --hash=sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a \
+ --hash=sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921 \
+ --hash=sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e \
+ --hash=sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904 \
+ --hash=sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776 \
+ --hash=sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52 \
+ --hash=sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf \
+ --hash=sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8 \
+ --hash=sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f \
+ --hash=sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b \
+ --hash=sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63 \
+ --hash=sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c \
+ --hash=sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f \
+ --hash=sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468 \
+ --hash=sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e \
+ --hash=sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab \
+ --hash=sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2 \
+ --hash=sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb \
+ --hash=sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb \
+ --hash=sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132 \
+ --hash=sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b \
+ --hash=sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607 \
+ --hash=sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934 \
+ --hash=sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698 \
+ --hash=sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e \
+ --hash=sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561 \
+ --hash=sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de \
+ --hash=sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b \
+ --hash=sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a \
+ --hash=sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595 \
+ --hash=sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402 \
+ --hash=sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881 \
+ --hash=sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429 \
+ --hash=sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5 \
+ --hash=sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7 \
+ --hash=sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c \
+ --hash=sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531 \
+ --hash=sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6 \
+ --hash=sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521
+ # via
+ # -c /workspace/requirements-dev.txt
+ # pydantic
+pydantic-settings==2.0.3 \
+ --hash=sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945 \
+ --hash=sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625
+ # via
+ # -c /workspace/requirements-dev.txt
+ # hexkit
pyjsg==0.11.10 \
--hash=sha256:10af60ff42219be7e85bf7f11c19b648715b0b29eb2ddbd269e87069a7c3f26d \
--hash=sha256:4bd6e3ff2833fa2b395bbe803a2d72a5f0bab5b7285bccd0da1a1bc0aee88bfa
@@ -1002,6 +1094,7 @@ python-dotenv==1.0.0 \
--hash=sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a
# via
# -c /workspace/requirements-dev.txt
+ # pydantic-settings
# uvicorn
pytrie==0.4.0 \
--hash=sha256:8f4488f402d3465993fb6b6efa09866849ed8cda7903b50647b7d0342b805379
@@ -1113,6 +1206,7 @@ rfc3986[idna2008]==1.5.0 \
# via
# -c /workspace/requirements-dev.txt
# httpx
+ # rfc3986
rfc3987==1.3.8 \
--hash=sha256:10702b1e51e5658843460b189b185c0366d2cf4cff716f13111b0ea9fd2dce53 \
--hash=sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733
@@ -1385,8 +1479,10 @@ typing-extensions==4.8.0 \
--hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef
# via
# -c /workspace/requirements-dev.txt
+ # fastapi
# prefixmaps
# pydantic
+ # pydantic-core
# sqlalchemy
# starlette
# typer
@@ -1409,6 +1505,7 @@ uvicorn[standard]==0.20.0 \
# via
# -c /workspace/requirements-dev.txt
# ghga-service-commons
+ # uvicorn
uvloop==0.17.0 \
--hash=sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d \
--hash=sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1 \
diff --git a/scripts/license_checker.py b/scripts/license_checker.py
index 0ddc7861..5d8be069 100755
--- a/scripts/license_checker.py
+++ b/scripts/license_checker.py
@@ -88,6 +88,9 @@
"xml",
"yaml",
"yml",
+ "tsv",
+ "fastq",
+ "gz",
]
# exclude any files with names that match any of the following regex:
diff --git a/scripts/update_config_docs.py b/scripts/update_config_docs.py
index b1be57a0..90c12081 100755
--- a/scripts/update_config_docs.py
+++ b/scripts/update_config_docs.py
@@ -28,7 +28,6 @@
from typing import Any
import yaml
-from pydantic import BaseSettings
from script_utils.cli import echo_failure, echo_success, run
@@ -44,7 +43,7 @@ class ValidationError(RuntimeError):
"""Raised when validation of config documentation fails."""
-def get_config_class() -> type[BaseSettings]:
+def get_config_class():
"""
Dynamically imports and returns the Config class from the current service.
This makes the script service repo agnostic.
@@ -77,14 +76,16 @@ def get_schema() -> str:
"""Returns a JSON schema generated from a Config class."""
config = get_dev_config()
- return config.schema_json(indent=2)
+ return config.schema_json(indent=2) # change eventually to .model_json_schema(...)
def get_example() -> str:
"""Returns an example config YAML."""
config = get_dev_config()
- normalized_config_dict = json.loads(config.json())
+ normalized_config_dict = json.loads(
+ config.json() # change eventually to .model_dump_json()
+ )
return yaml.dump(normalized_config_dict) # pyright: ignore
diff --git a/src/metldata/accession_registry/accession_registry.py b/src/metldata/accession_registry/accession_registry.py
index bc310009..24701d88 100644
--- a/src/metldata/accession_registry/accession_registry.py
+++ b/src/metldata/accession_registry/accession_registry.py
@@ -19,7 +19,8 @@
import secrets
from collections.abc import Iterable
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
from metldata.accession_registry.accession_store import AccessionStore
@@ -30,12 +31,14 @@ class AccessionRegistryConfig(BaseSettings):
prefix_mapping: dict[str, str] = Field(
...,
description="Specifies the ID prefix (values) per resource type (keys).",
- example={
- "file": "GHGAF",
- "experiment": "GHGAE",
- "sample": "GHGAS",
- "dataset": "GHGAD",
- },
+ examples=[
+ {
+ "file": "GHGAF",
+ "experiment": "GHGAE",
+ "sample": "GHGAS",
+ "dataset": "GHGAD",
+ }
+ ],
)
suffix_length: int = Field(8, description="Length of the numeric ID suffix.")
diff --git a/src/metldata/accession_registry/accession_store.py b/src/metldata/accession_registry/accession_store.py
index d190cb9c..4501ce63 100644
--- a/src/metldata/accession_registry/accession_store.py
+++ b/src/metldata/accession_registry/accession_store.py
@@ -18,7 +18,8 @@
from pathlib import Path
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
class AccessionStoreConfig(BaseSettings):
diff --git a/src/metldata/artifacts_rest/config.py b/src/metldata/artifacts_rest/config.py
index 4cdac9f6..814c000e 100644
--- a/src/metldata/artifacts_rest/config.py
+++ b/src/metldata/artifacts_rest/config.py
@@ -16,7 +16,8 @@
"""Config parameters and their defaults."""
-from pydantic import BaseSettings, Field, validator
+from pydantic import Field, field_validator
+from pydantic_settings import BaseSettings
from metldata.artifacts_rest.models import ArtifactInfo
@@ -29,8 +30,7 @@ class ArtifactsRestConfig(BaseSettings):
description="Information for artifacts to be queryable via the Artifacts REST API.",
)
- # pylint: disable=no-self-argument
- @validator("artifact_infos")
+ @field_validator("artifact_infos")
def validate_artifact_info_names(
cls, value: list[ArtifactInfo]
) -> list[ArtifactInfo]:
diff --git a/src/metldata/artifacts_rest/models.py b/src/metldata/artifacts_rest/models.py
index 250aa17c..d82eeec3 100644
--- a/src/metldata/artifacts_rest/models.py
+++ b/src/metldata/artifacts_rest/models.py
@@ -16,10 +16,11 @@
"""Data models."""
-from typing import Optional, TypedDict
+from typing import Optional
from ghga_service_commons.utils.utc_dates import DateTimeUTC
-from pydantic import BaseModel, Field, validator
+from pydantic import BaseModel, Field, field_validator
+from typing_extensions import TypedDict
from metldata.custom_types import Json
from metldata.model_utils.anchors import AnchorPoint
@@ -73,8 +74,7 @@ class ArtifactInfo(BaseModel):
),
)
- # pylint: disable=no-self-argument
- @validator("resource_classes")
+ @field_validator("resource_classes")
def check_resource_class_names(
cls, value: dict[str, ArtifactResourceClass]
) -> dict[str, ArtifactResourceClass]:
diff --git a/src/metldata/builtin_transformations/add_accessions/config.py b/src/metldata/builtin_transformations/add_accessions/config.py
index 74b0befe..38ca599e 100644
--- a/src/metldata/builtin_transformations/add_accessions/config.py
+++ b/src/metldata/builtin_transformations/add_accessions/config.py
@@ -16,12 +16,15 @@
"""Config parameters and their defaults."""
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings, SettingsConfigDict
class AccessionAdditionConfig(BaseSettings):
"""Config to add accessions to a model and associated metadata."""
+ model_config = SettingsConfigDict(extra="forbid")
+
accession_slot_name: str = Field(
"accession", description="The name of the slot to contain the accessions to."
)
@@ -29,8 +32,3 @@ class AccessionAdditionConfig(BaseSettings):
"The accession for an entity.",
description="The description of the slot to contain the accessions to.",
)
-
- class Config:
- """Pydantic config."""
-
- extra = "forbid"
diff --git a/src/metldata/builtin_transformations/aggregate/config.py b/src/metldata/builtin_transformations/aggregate/config.py
index 66ad3308..e25d6335 100644
--- a/src/metldata/builtin_transformations/aggregate/config.py
+++ b/src/metldata/builtin_transformations/aggregate/config.py
@@ -18,7 +18,8 @@
from typing import Optional
-from pydantic import BaseModel, BaseSettings, root_validator
+from pydantic import BaseModel, model_validator
+from pydantic_settings import BaseSettings
from metldata.builtin_transformations.aggregate.func import (
AggregationFunction,
@@ -36,8 +37,7 @@ class AggregationOperation(BaseModel):
visit_only_once: Optional[list[str]] = None
function: type[AggregationFunction]
- # pylint: disable=no-self-argument
- @root_validator(pre=True)
+ @model_validator(mode="before")
def lookup_operation(cls, values: dict) -> dict:
"""Replaces operation strings with operation types."""
if "function" in values:
diff --git a/src/metldata/builtin_transformations/custom_embeddings/config.py b/src/metldata/builtin_transformations/custom_embeddings/config.py
index ce4a27d8..c527ad8b 100644
--- a/src/metldata/builtin_transformations/custom_embeddings/config.py
+++ b/src/metldata/builtin_transformations/custom_embeddings/config.py
@@ -16,7 +16,8 @@
"""Models used to describe embedding profiles."""
-from pydantic import BaseSettings, Field, validator
+from pydantic import Field, field_validator
+from pydantic_settings import BaseSettings, SettingsConfigDict
from metldata.builtin_transformations.custom_embeddings.embedding_profile import (
EmbeddingProfile,
@@ -42,6 +43,8 @@ class CustomEmbeddingConfig(BaseSettings):
model.
"""
+ model_config = SettingsConfigDict(extra="forbid")
+
embedding_profiles: list[EmbeddingProfile] = Field(
...,
description=(
@@ -50,7 +53,7 @@ class CustomEmbeddingConfig(BaseSettings):
)
# pylint: disable=no-self-argument
- @validator("embedding_profiles")
+ @field_validator("embedding_profiles")
def check_embedding_profiles_unique(
cls,
value: list[EmbeddingProfile],
@@ -66,8 +69,3 @@ def check_embedding_profiles_unique(
raise ValueError("Names for embedded classes must be unique.")
return value
-
- class Config:
- """Pydantic config."""
-
- extra = "forbid"
diff --git a/src/metldata/builtin_transformations/delete_slots/config.py b/src/metldata/builtin_transformations/delete_slots/config.py
index 41e7e0a3..09679b6a 100644
--- a/src/metldata/builtin_transformations/delete_slots/config.py
+++ b/src/metldata/builtin_transformations/delete_slots/config.py
@@ -16,12 +16,15 @@
"""Config parameters and their defaults."""
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings, SettingsConfigDict
class SlotDeletionConfig(BaseSettings):
"""Config containing slots to be deleted from models and associated metadata."""
+ model_config = SettingsConfigDict(extra="forbid")
+
slots_to_delete: dict[str, list[str]] = Field(
...,
description=(
@@ -29,14 +32,11 @@ class SlotDeletionConfig(BaseSettings):
+ " The keys refer to classes, the values to the slots that should be"
+ " deleted from the respective class."
),
- example={
- "class_a": ["some_slot", "another_slot"],
- "class_b": ["some_slot"],
- "class_c": ["some_slot", "yet_another_slot"],
- },
+ examples=[
+ {
+ "class_a": ["some_slot", "another_slot"],
+ "class_b": ["some_slot"],
+ "class_c": ["some_slot", "yet_another_slot"],
+ }
+ ],
)
-
- class Config:
- """Pydantic config."""
-
- extra = "forbid"
diff --git a/src/metldata/builtin_transformations/infer_references/config.py b/src/metldata/builtin_transformations/infer_references/config.py
index 4c0ba2aa..9362b17a 100644
--- a/src/metldata/builtin_transformations/infer_references/config.py
+++ b/src/metldata/builtin_transformations/infer_references/config.py
@@ -16,7 +16,8 @@
"""Models used to describe all inferred references based on existing references."""
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings, SettingsConfigDict
from metldata.builtin_transformations.infer_references.reference import (
InferredReference,
@@ -30,6 +31,8 @@ class ReferenceInferenceConfig(BaseSettings):
a list of InferredReferences.
"""
+ model_config = SettingsConfigDict(extra="forbid")
+
inferred_ref_map: dict[str, dict[str, ReferenceDetails]] = Field(
...,
description=(
@@ -39,24 +42,26 @@ class ReferenceInferenceConfig(BaseSettings):
+ " refer to the names of the new slots of classes that hold the inferred"
+ " references. The values refer to the actual references details."
),
- example={
- "class_a": {
- "class_d": {
- "path": "class_a(class_b)>class_b(class_d)>class_d",
- "multivalued": False,
+ examples=[
+ {
+ "class_a": {
+ "class_d": {
+ "path": "class_a(class_b)>class_b(class_d)>class_d",
+ "multivalued": False,
+ },
+ "class_c": {
+ "path": "class_a(class_b)>class_b<(class_c)class_c",
+ "multivalued": True,
+ },
},
- "class_c": {
- "path": "class_a(class_b)>class_b<(class_c)class_c",
- "multivalued": True,
+ "class_b": {
+ "class_c": {
+ "path": "class_b<(class_c)class_c",
+ "multivalued": True,
+ }
},
- },
- "class_b": {
- "class_c": {
- "path": "class_b<(class_c)class_c",
- "multivalued": True,
- }
- },
- },
+ }
+ ],
)
@property
@@ -78,8 +83,3 @@ def inferred_references(self) -> list[InferredReference]:
)
return inferred_refs
-
- class Config:
- """Pydantic config."""
-
- extra = "forbid"
diff --git a/src/metldata/builtin_transformations/infer_references/path/path.py b/src/metldata/builtin_transformations/infer_references/path/path.py
index 06b9a381..1f2e567a 100644
--- a/src/metldata/builtin_transformations/infer_references/path/path.py
+++ b/src/metldata/builtin_transformations/infer_references/path/path.py
@@ -16,6 +16,8 @@
"""Logic for handling reference paths."""
+from pydantic import GetJsonSchemaHandler, ValidationInfo
+
from metldata.builtin_transformations.infer_references.path.path_str import (
PATH_PATTERN,
ValidationError,
@@ -72,7 +74,7 @@ def __init__(self, *, path_str: str):
self.target = self.elements[-1].target
@classmethod
- def validate(cls, value) -> "ReferencePath":
+ def validate(cls, value, info: ValidationInfo) -> "ReferencePath":
"""A validator for pydantic."""
if isinstance(value, cls):
return value
@@ -91,7 +93,9 @@ def __get_validators__(cls):
yield cls.validate
@classmethod
- def __modify_schema__(cls, field_schema: dict):
+ def __get_pydantic_json_schema__(
+ cls, field_schema: dict, handler: GetJsonSchemaHandler
+ ):
"""Modify the field schema for pydantic."""
field_schema.update(type="string", pattern=PATH_PATTERN)
diff --git a/src/metldata/builtin_transformations/infer_references/reference.py b/src/metldata/builtin_transformations/infer_references/reference.py
index 4f821eb1..f1b15858 100644
--- a/src/metldata/builtin_transformations/infer_references/reference.py
+++ b/src/metldata/builtin_transformations/infer_references/reference.py
@@ -16,7 +16,7 @@
"""Reference models."""
-from pydantic import BaseModel, Field, root_validator
+from pydantic import BaseModel, ConfigDict, Field, model_validator
from metldata.builtin_transformations.infer_references.path.path import ReferencePath
@@ -40,6 +40,8 @@ class InferredReference(ReferenceDetails):
references.
"""
+ model_config = ConfigDict(frozen=True)
+
source: str = Field(
..., description="The source class to which this reference should be added."
)
@@ -49,25 +51,20 @@ class InferredReference(ReferenceDetails):
description="The name of the new slot in the source to store the inferred reference.",
)
- @root_validator(pre=False)
+ @model_validator(mode="after")
@classmethod
def validate_source_and_target(cls, values):
"""Validate that the source and target attributes are identical with the
source and target specified in the path.
"""
- if values.get("source") != values.get("path").source:
+ if values.source != values.path.source:
raise ValueError(
"The source is not identical with the source of the specified path."
)
- if values.get("target") != values.get("path").target:
+ if values.target != values.path.target:
raise ValueError(
"The target is not identical with the target of the specified path."
)
return values
-
- class Config:
- """Config for this model."""
-
- frozen = True
diff --git a/src/metldata/builtin_transformations/merge_slots/config.py b/src/metldata/builtin_transformations/merge_slots/config.py
index 3497b831..93c2b81e 100644
--- a/src/metldata/builtin_transformations/merge_slots/config.py
+++ b/src/metldata/builtin_transformations/merge_slots/config.py
@@ -16,7 +16,8 @@
"""Config parameters and their defaults."""
-from pydantic import BaseSettings, Field, validator
+from pydantic import Field, field_validator
+from pydantic_settings import BaseSettings, SettingsConfigDict
from metldata.builtin_transformations.merge_slots.models import SlotMergeInstruction
@@ -24,6 +25,8 @@
class SlotMergingConfig(BaseSettings):
"""Config containing slots to be deleted from models and associated metadata."""
+ model_config = SettingsConfigDict(extra="forbid")
+
merge_instructions: list[SlotMergeInstruction] = Field(
...,
description=(
@@ -34,7 +37,7 @@ class SlotMergingConfig(BaseSettings):
+ " a source slot in another merge instruction."
+ " The source slots will not be deleted."
),
- example=[
+ examples=[
{
"class_name": "class_a",
"source_slots": ["some_slot", "another_slot"],
@@ -43,8 +46,7 @@ class SlotMergingConfig(BaseSettings):
],
)
- # pylint: disable=no-self-argument
- @validator("merge_instructions")
+ @field_validator("merge_instructions")
def validate_merge_instructions(
cls, filtered_merge_instructions: list[SlotMergeInstruction]
) -> list[SlotMergeInstruction]:
@@ -82,8 +84,3 @@ def validate_merge_instructions(
)
return filtered_merge_instructions
-
- class Config:
- """Pydantic config."""
-
- extra = "forbid"
diff --git a/src/metldata/builtin_transformations/merge_slots/models.py b/src/metldata/builtin_transformations/merge_slots/models.py
index 5340c938..c3781fc2 100644
--- a/src/metldata/builtin_transformations/merge_slots/models.py
+++ b/src/metldata/builtin_transformations/merge_slots/models.py
@@ -18,7 +18,7 @@
from typing import Optional
-from pydantic import BaseModel, Field, root_validator
+from pydantic import BaseModel, Field, model_validator
class SlotMergeInstruction(BaseModel):
@@ -28,7 +28,7 @@ class SlotMergeInstruction(BaseModel):
source_slots: list[str] = Field(
...,
description="The slots that should be merged into the target slot.",
- min_items=2,
+ min_length=2,
)
target_slot: str = Field(
..., description="The slot into which the source slots should be merged."
@@ -38,9 +38,8 @@ class SlotMergeInstruction(BaseModel):
description="A description of the target slot.",
)
- # pylint: disable=no-self-argument
- @root_validator()
- def validate_overlapping_slots(cls, values: dict) -> dict:
+ @model_validator(mode="before")
+ def validate_overlapping_slots(cls, values) -> dict:
"""Validate that source and target slots do not overlap."""
source_slots = set(values["source_slots"])
target_slot = values["target_slot"]
diff --git a/src/metldata/event_handling/artifact_events.py b/src/metldata/event_handling/artifact_events.py
index 55ebf987..422f5f3f 100644
--- a/src/metldata/event_handling/artifact_events.py
+++ b/src/metldata/event_handling/artifact_events.py
@@ -16,7 +16,7 @@
"""Logic for handling artifact events."""
-from pydantic import BaseModel, Field, validator
+from pydantic import BaseModel, Field, field_validator
class ArtifactEventConfig(BaseModel):
@@ -31,8 +31,7 @@ class ArtifactEventConfig(BaseModel):
),
)
- # pylint: disable=no-self-argument
- @validator("artifact_topic_prefix")
+ @field_validator("artifact_topic_prefix")
def artifact_topic_prefix_must_not_contain_dots(cls, value: str):
"""Validate that artifact topic prefix does not contain dots."""
if "." in value:
diff --git a/src/metldata/event_handling/event_handling.py b/src/metldata/event_handling/event_handling.py
index 5ef22d84..9b56f235 100644
--- a/src/metldata/event_handling/event_handling.py
+++ b/src/metldata/event_handling/event_handling.py
@@ -25,7 +25,8 @@
from hexkit.custom_types import Ascii, JsonObject
from hexkit.protocols.eventpub import EventPublisherProtocol
from hexkit.protocols.eventsub import EventSubscriberProtocol
-from pydantic import BaseModel, BaseSettings, Field
+from pydantic import BaseModel, Field
+from pydantic_settings import BaseSettings
class FileSystemEventConfig(BaseSettings):
diff --git a/src/metldata/event_handling/submission_events.py b/src/metldata/event_handling/submission_events.py
index a1ed9552..2f5e86c2 100644
--- a/src/metldata/event_handling/submission_events.py
+++ b/src/metldata/event_handling/submission_events.py
@@ -19,7 +19,8 @@
"""
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
class SourceEventConfig(BaseSettings):
diff --git a/src/metldata/load/collect.py b/src/metldata/load/collect.py
index 4a26f264..3b5c5abc 100644
--- a/src/metldata/load/collect.py
+++ b/src/metldata/load/collect.py
@@ -18,7 +18,7 @@
from collections import defaultdict
-from pydantic import Field, validator
+from pydantic import Field, field_validator
from metldata.event_handling.artifact_events import (
ArtifactEventConfig,
@@ -40,8 +40,7 @@ class ArtifactCollectorConfig(ArtifactEventConfig):
),
)
- # pylint: disable=no-self-argument
- @validator("artifact_types")
+ @field_validator("artifact_types")
def artifact_types_must_not_contain_dots(cls, value: list[str]):
"""Validate that artifact types do not contain dots."""
for artifact_type in value:
diff --git a/src/metldata/load/event_publisher.py b/src/metldata/load/event_publisher.py
index f2a4281c..ee309d67 100644
--- a/src/metldata/load/event_publisher.py
+++ b/src/metldata/load/event_publisher.py
@@ -26,7 +26,8 @@
SearchableResourceInfo,
)
from hexkit.protocols.eventpub import EventPublisherProtocol
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
class EventPubTranslatorConfig(BaseSettings):
@@ -36,47 +37,47 @@ class EventPubTranslatorConfig(BaseSettings):
...,
description="Name of the artifact from which the information for outgoing"
+ " change events is derived.",
- example="embedded_public",
+ examples=["embedded_public"],
)
primary_dataset_name: str = Field(
...,
description="Name of the resource class corresponding to the embedded_dataset slot.",
- example="EmbeddedDataset",
+ examples=["EmbeddedDataset"],
)
resource_change_event_topic: str = Field(
...,
description="Name of the topic used for events informing other services about"
+ " resource changes, i.e. deletion or insertion.",
- example="searchable_resources",
+ examples=["searchable_resources"],
)
resource_deletion_event_type: str = Field(
...,
description="Type used for events indicating the deletion of a previously"
+ " existing resource.",
- example="searchable_resource_deleted",
+ examples=["searchable_resource_deleted"],
)
resource_upsertion_type: str = Field(
...,
description="Type used for events indicating the upsert of a resource.",
- example="searchable_resource_upserted",
+ examples=["searchable_resource_upserted"],
)
dataset_change_event_topic: str = Field(
...,
description="Name of the topic announcing, among other things, the list of"
+ " files included in a new dataset.",
- example="metadata_datasets",
+ examples=["metadata_datasets"],
)
dataset_deletion_type: str = Field(
...,
description="Type used for events announcing a new dataset overview.",
- example="dataset_deleted",
+ examples=["dataset_deleted"],
)
dataset_upsertion_type: str = Field(
...,
description="Type used for events announcing a new dataset overview.",
- example="dataset_created",
+ examples=["dataset_created"],
)
@@ -129,7 +130,7 @@ async def process_dataset_deletion(self, *, accession: str):
"""
dataset_id = MetadataDatasetID(accession=accession)
- payload = json.loads(dataset_id.json())
+ payload = json.loads(dataset_id.model_dump_json())
await self._provider.publish(
payload=payload,
type_=self._config.dataset_deletion_type,
@@ -146,7 +147,7 @@ async def process_resource_deletion(self, *, accession: str, class_name: str):
accession=accession, class_name=class_name
)
- payload = json.loads(resource_info.json())
+ payload = json.loads(resource_info.model_dump_json())
await self._provider.publish(
payload=payload,
type_=self._config.resource_deletion_event_type,
@@ -161,7 +162,7 @@ async def process_dataset_upsert(
Fires an event that should be processed by the WPS
"""
- payload = json.loads(dataset_overview.json())
+ payload = json.loads(dataset_overview.model_dump_json())
await self._provider.publish(
payload=payload,
type_=self._config.dataset_upsertion_type,
@@ -174,7 +175,7 @@ async def process_resource_upsert(self, *, resource: SearchableResource):
Fires an event that should be processed by MASS
"""
- payload = json.loads(resource.json())
+ payload = json.loads(resource.model_dump_json())
await self._provider.publish(
payload=payload,
type_=self._config.resource_upsertion_type,
diff --git a/src/metldata/model_utils/anchors.py b/src/metldata/model_utils/anchors.py
index 10ab524a..3834ae12 100644
--- a/src/metldata/model_utils/anchors.py
+++ b/src/metldata/model_utils/anchors.py
@@ -21,7 +21,7 @@
from linkml_runtime import SchemaView
from linkml_runtime.linkml_model import SlotDefinition
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, ConfigDict, Field
from metldata.model_utils.essentials import ROOT_CLASS, MetadataModel
from metldata.model_utils.identifiers import get_class_identifiers
@@ -42,6 +42,8 @@ class ClassNotAnchoredError(RuntimeError):
class AnchorPoint(BaseModel):
"""A model for describing an anchor point for the specified target class."""
+ model_config = ConfigDict(frozen=True)
+
target_class: str = Field(..., description="The name of the class to be targeted.")
identifier_slot: str = Field(
...,
@@ -56,11 +58,6 @@ class AnchorPoint(BaseModel):
),
)
- class Config:
- """Pydantic Configs."""
-
- frozen = True
-
def check_root_slot(slot: SlotDefinition):
"""Make sure that the given root slot is a valid anchor point. Validates that the
diff --git a/src/metldata/model_utils/config.py b/src/metldata/model_utils/config.py
index b9f2a1a2..19f72620 100644
--- a/src/metldata/model_utils/config.py
+++ b/src/metldata/model_utils/config.py
@@ -17,7 +17,8 @@
from pathlib import Path
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
from metldata.model_utils.assumptions import check_basic_model_assumption
from metldata.model_utils.essentials import MetadataModel
diff --git a/src/metldata/submission_registry/event_publisher.py b/src/metldata/submission_registry/event_publisher.py
index 1feca5c6..e3554b5e 100644
--- a/src/metldata/submission_registry/event_publisher.py
+++ b/src/metldata/submission_registry/event_publisher.py
@@ -56,6 +56,6 @@ def publish_submission(self, submission: models.Submission):
topic=self._config.source_event_topic,
type_=self._config.source_event_type,
key=submission.id,
- payload=json.loads(payload.json()),
+ payload=json.loads(payload.model_dump_json()),
)
)
diff --git a/src/metldata/submission_registry/models.py b/src/metldata/submission_registry/models.py
index 4a21debe..65322d61 100644
--- a/src/metldata/submission_registry/models.py
+++ b/src/metldata/submission_registry/models.py
@@ -21,7 +21,7 @@
from typing import Optional
from ghga_service_commons.utils.utc_dates import DateTimeUTC, now_as_utc
-from pydantic import BaseModel, Field, validator
+from pydantic import BaseModel, Field, field_validator
from typing_extensions import TypeAlias
from metldata.custom_types import SubmissionContent
@@ -105,8 +105,7 @@ def current_status(self) -> SubmissionStatus:
return sorted_history[-1].new_status
- # pylint: disable=no-self-argument
- @validator("accession_map")
+ @field_validator("accession_map")
def check_accession_uniqueness(cls, value: AccessionMap) -> AccessionMap:
"""Check that no accessions are re-used accross classes."""
total_resources = 0
diff --git a/src/metldata/submission_registry/submission_registry.py b/src/metldata/submission_registry/submission_registry.py
index 957b4a89..283daafe 100644
--- a/src/metldata/submission_registry/submission_registry.py
+++ b/src/metldata/submission_registry/submission_registry.py
@@ -112,7 +112,7 @@ def init_submission(self, *, header: models.SubmissionHeader) -> str:
submission content is still empty.
"""
id_ = generate_submission_id()
- submission_creation = models.Submission(id=id_, **header.dict())
+ submission_creation = models.Submission(id=id_, **header.model_dump())
self._submission_store.insert_new(submission=submission_creation)
return id_
@@ -156,7 +156,7 @@ def upsert_submission_content(
anchor_points_by_target=self._anchor_points_by_target,
)
- updated_submission = submission.copy(
+ updated_submission = submission.model_copy(
update={"content": content, "accession_map": updated_accession_map}
)
self._submission_store.update_existing(submission=updated_submission)
@@ -186,7 +186,7 @@ def complete_submission(self, *, id_: str) -> None:
status_change = models.StatusChange(
timestamp=now_as_utc(), new_status=models.SubmissionStatus.COMPLETED
)
- updated_submission = submission.copy(
+ updated_submission = submission.model_copy(
update={
"status_history": submission.status_history # noqa: RUF005
+ (status_change,)
diff --git a/src/metldata/submission_registry/submission_store.py b/src/metldata/submission_registry/submission_store.py
index e80cd0c9..f23a838a 100644
--- a/src/metldata/submission_registry/submission_store.py
+++ b/src/metldata/submission_registry/submission_store.py
@@ -19,7 +19,8 @@
import json
from pathlib import Path
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
from metldata.submission_registry import models
@@ -67,7 +68,7 @@ def _save(self, *, submission: models.Submission) -> None:
"""Save a submission to a JSON file."""
json_path = self._get_submission_json_path(submission_id=submission.id)
with open(json_path, "w", encoding="utf-8") as file:
- file.write(submission.json(indent=4))
+ file.write(submission.model_dump_json(indent=4))
def exists(self, *, submission_id: str) -> bool:
"""Check whether a submission with the specified ID exists."""
diff --git a/src/metldata/transform/artifact_publisher.py b/src/metldata/transform/artifact_publisher.py
index 39a198d5..073afa31 100644
--- a/src/metldata/transform/artifact_publisher.py
+++ b/src/metldata/transform/artifact_publisher.py
@@ -52,7 +52,7 @@ def __init__(
async def publish_artifact(self, artifact_event: ArtifactEvent):
"""Publish an artifact as submission event"""
- payload = json.loads(artifact_event.payload.json())
+ payload = json.loads(artifact_event.payload.model_dump_json())
topic = get_artifact_topic(
artifact_topic_prefix=self._config.artifact_topic_prefix,
artifact_type=artifact_event.artifact_type,
diff --git a/src/metldata/transform/base.py b/src/metldata/transform/base.py
index 567e8db2..e2d37a4c 100644
--- a/src/metldata/transform/base.py
+++ b/src/metldata/transform/base.py
@@ -22,7 +22,14 @@
from graphlib import CycleError, TopologicalSorter
from typing import Callable, Generic, Optional, TypeVar
-from pydantic import BaseModel, Field, create_model, root_validator, validator
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ create_model,
+ field_validator,
+ model_validator,
+)
from metldata.custom_types import Json
from metldata.event_handling.models import SubmissionAnnotation
@@ -97,7 +104,7 @@ class TransformationDefinition(Generic[Config]):
+ " MetadataModelTransformationError if the transformation fails."
),
)
- metadata_transformer_factory: type[MetadataTransformer[Config]] = Field(
+ metadata_transformer_factory: type[MetadataTransformer] = Field(
...,
description=(
"A class for transforming metadata. Raises a MetadataTransformationError"
@@ -113,6 +120,7 @@ class WorkflowConfig(BaseModel, ABC):
class WorkflowStepBase(BaseModel, ABC):
"""A base class for workflow steps."""
+ model_config = ConfigDict(frozen=True)
description: str = Field(..., description="A description of the step.")
input: Optional[str] = Field(
...,
@@ -122,11 +130,6 @@ class WorkflowStepBase(BaseModel, ABC):
),
)
- class Config:
- """Config for the workflow step."""
-
- frozen = True
-
class WorkflowStep(WorkflowStepBase):
"""A single step in a transformation workflow."""
@@ -140,6 +143,7 @@ class WorkflowStep(WorkflowStepBase):
class WorkflowDefinition(BaseModel):
"""A definition of a transformation workflow."""
+ model_config = ConfigDict(frozen=True)
description: str = Field(..., description="A description of the workflow.")
steps: dict[str, WorkflowStep] = Field(
...,
@@ -158,7 +162,7 @@ class WorkflowDefinition(BaseModel):
)
# pylint: disable=no-self-argument
- @validator("steps", pre=False)
+ @field_validator("steps", mode="after")
def validate_step_references(
cls, steps: dict[str, WorkflowStep]
) -> dict[str, WorkflowStep]:
@@ -188,13 +192,13 @@ def validate_step_references(
return steps
- @root_validator(pre=False)
+ @model_validator(mode="after")
def validate_artifact_references(cls, values):
"""Validate that artifacts reference existing workflow steps."""
- steps = values.get("steps")
+ steps = values.steps
if steps is None:
raise ValueError("Steps are undefined.")
- artifacts = values.get("artifacts")
+ artifacts = values.artifacts
if artifacts is None:
raise ValueError("Artifacts are undefined.")
@@ -239,8 +243,3 @@ def step_order(self) -> list[str]:
return list(topological_sorter.static_order())
except CycleError as exc:
raise RuntimeError("Step definitions imply a circular dependency.") from exc
-
- class Config:
- """Config for the workflow step."""
-
- frozen = True
diff --git a/src/metldata/transform/handling.py b/src/metldata/transform/handling.py
index e59d4013..8a542a72 100644
--- a/src/metldata/transform/handling.py
+++ b/src/metldata/transform/handling.py
@@ -16,7 +16,7 @@
"""Logic for handling Transformation."""
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
from metldata.custom_types import Json
from metldata.event_handling.models import SubmissionAnnotation
@@ -115,13 +115,9 @@ def transform_metadata(
class ResolvedWorkflowStep(WorkflowStepBase):
"""A resolved workflow step contains a transformation handler."""
+ model_config = ConfigDict(arbitrary_types_allowed=True)
transformation_handler: TransformationHandler
- class Config:
- """Config for ResolvedWorkflowStep."""
-
- arbitrary_types_allowed = True
-
class ResolvedWorkflow(WorkflowDefinition):
"""A resolved workflow contains a list of resolved workflow steps."""
@@ -139,7 +135,7 @@ def check_workflow_config(
Raises:
WorkflowConfigMismatchError:
"""
- if workflow_config.schema_json() == workflow_definition.schema_json():
+ if isinstance(workflow_config, workflow_definition.config_cls):
raise WorkflowConfigMismatchError(
workflow_definition=workflow_definition, workflow_config=workflow_config
)
diff --git a/src/metldata/transform/main.py b/src/metldata/transform/main.py
index 31ab5d7d..1495d8d8 100644
--- a/src/metldata/transform/main.py
+++ b/src/metldata/transform/main.py
@@ -56,7 +56,7 @@ async def run_workflow_on_source_event(
for artifact_type, artifact_content in artifacts.items():
artifact_event = ArtifactEvent(
artifact_type=artifact_type,
- payload=source_event.copy(update={"content": artifact_content}),
+ payload=source_event.model_copy(update={"content": artifact_content}),
)
await publish_artifact_func(artifact_event)
diff --git a/tests/artifact_rest/test_api_factory.py b/tests/artifact_rest/test_api_factory.py
index 361de5f6..45f20f32 100644
--- a/tests/artifact_rest/test_api_factory.py
+++ b/tests/artifact_rest/test_api_factory.py
@@ -166,7 +166,11 @@ async def test_get_stats_endpoint(
observed_stats = response.json()
assert isinstance(observed_stats, dict)
- observed_created = DateTimeUTC.fromisoformat(observed_stats.pop("created"))
+ raw_observed_created = observed_stats.pop("created")
+ if isinstance(raw_observed_created, str) and raw_observed_created.endswith("Z"):
+ raw_observed_created = raw_observed_created.replace("Z", "+00:00")
+
+ observed_created = DateTimeUTC.fromisoformat(raw_observed_created)
assert abs((now_as_utc() - observed_created).seconds) < 5
expected_stats = {
diff --git a/tests/builtin_tranformations/aggregate/conftest.py b/tests/builtin_tranformations/aggregate/conftest.py
index 1bcfd0c1..cc4999c5 100644
--- a/tests/builtin_tranformations/aggregate/conftest.py
+++ b/tests/builtin_tranformations/aggregate/conftest.py
@@ -54,7 +54,7 @@ def data_complete_1_resolved_public() -> Json:
@fixture
def config() -> AggregateConfig:
"""A working config"""
- return AggregateConfig.parse_obj(
+ return AggregateConfig.model_validate(
load_yaml(Path("transformations/aggregate/default/config.yaml"))
)
@@ -62,6 +62,6 @@ def config() -> AggregateConfig:
@fixture
def invalid_config() -> AggregateConfig:
"""An invalid config with conflicting output paths."""
- return AggregateConfig.parse_obj(
+ return AggregateConfig.model_validate(
load_yaml(Path("transformations/aggregate/config_invalid.yaml"))
)
diff --git a/tests/fixtures/event_handling.py b/tests/fixtures/event_handling.py
index a6500c8f..bfc7be48 100644
--- a/tests/fixtures/event_handling.py
+++ b/tests/fixtures/event_handling.py
@@ -62,8 +62,8 @@ def expect_events(self, expected_events: list[Event]) -> None:
)
# hashable versions for comparison:
- observed_event_jsons = {event.json() for event in observed_events}
- expected_event_jsons = {event.json() for event in expected_events}
+ observed_event_jsons = {event.model_dump_json() for event in observed_events}
+ expected_event_jsons = {event.model_dump_json() for event in expected_events}
if expected_event_jsons != observed_event_jsons:
raise EventExpectationMismatch(
diff --git a/tests/fixtures/load/config.py b/tests/fixtures/load/config.py
index cef7ea88..68e6a373 100644
--- a/tests/fixtures/load/config.py
+++ b/tests/fixtures/load/config.py
@@ -18,7 +18,7 @@
from typing import Optional, Union
import yaml
-from pydantic.env_settings import BaseSettings
+from pydantic_settings import BaseSettings
from metldata.load.config import ArtifactLoaderAPIConfig
from tests.fixtures.load.utils import BASE_DIR
@@ -37,7 +37,7 @@ def get_config(
if sources is not None:
for source in sources:
if isinstance(source, BaseSettings):
- sources_dict.update(**source.dict())
+ sources_dict.update(**source.model_dump())
else:
sources_dict.update(**source)
diff --git a/tests/load/test_client.py b/tests/load/test_client.py
index 9ea88af2..cfbd1c33 100644
--- a/tests/load/test_client.py
+++ b/tests/load/test_client.py
@@ -51,7 +51,7 @@ async def test_upload_artifacts_via_http_api(
artifact_types=list(EXAMPLE_ARTIFACTS.keys()),
artifact_topic_prefix="artifact",
loader_api_root="http://localhost:8000",
- **file_system_event_fixture.config.dict(),
+ **file_system_event_fixture.config.model_dump(),
)
# publish artifacts:
diff --git a/tests/submission_registry/test_event_publisher.py b/tests/submission_registry/test_event_publisher.py
index 8ad42b3c..00604327 100644
--- a/tests/submission_registry/test_event_publisher.py
+++ b/tests/submission_registry/test_event_publisher.py
@@ -58,7 +58,7 @@ def check_source_events(
annotation=SubmissionAnnotation(
accession_map=expected_submission.accession_map
),
- ).json()
+ ).model_dump_json()
),
)
for expected_submission in expected_submissions
diff --git a/tests/submission_registry/test_submission_store.py b/tests/submission_registry/test_submission_store.py
index 920ba966..23968e37 100644
--- a/tests/submission_registry/test_submission_store.py
+++ b/tests/submission_registry/test_submission_store.py
@@ -62,7 +62,7 @@ def test_happy(config_sub_fixture: SubmissionConfig): # noqa: F811
assert EXAMPLE_SUBMISSION == submission_queried
# update the submision:
- submission_updated = EXAMPLE_SUBMISSION.copy(
+ submission_updated = EXAMPLE_SUBMISSION.model_copy(
update={"title": "updated test submission"}
)
submission_store.update_existing(submission=submission_updated)
@@ -74,7 +74,7 @@ def test_happy(config_sub_fixture: SubmissionConfig): # noqa: F811
assert submission_updated_queried == submission_updated
# test getting all existing submission IDs
- submission_2 = EXAMPLE_SUBMISSION.copy(update={"id": "testsubmission002"})
+ submission_2 = EXAMPLE_SUBMISSION.model_copy(update={"id": "testsubmission002"})
submission_store.insert_new(submission=submission_2)
assert submission_store.get_all_submission_ids() == [
diff --git a/tests/test_event_handling.py b/tests/test_event_handling.py
index 5499dfb0..4548613d 100644
--- a/tests/test_event_handling.py
+++ b/tests/test_event_handling.py
@@ -21,7 +21,7 @@
import pytest
from hexkit.custom_types import Ascii, JsonObject
from hexkit.protocols.eventsub import EventSubscriberProtocol
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, ConfigDict, Field
from metldata.event_handling.event_handling import FileSystemEventSubscriber
from tests.fixtures.event_handling import (
@@ -44,15 +44,11 @@
class ConsumedEvent(BaseModel):
"""Consumed event without the key."""
+ model_config = ConfigDict(frozen=True)
topic: str
type_: str
payload: str = Field(..., description="JSON string of the event payload.")
- class Config:
- """Pydantic model configuration."""
-
- frozen = True
-
@pytest.mark.asyncio
async def test_pub_sub_workflow(
diff --git a/tests/transform/test_base.py b/tests/transform/test_base.py
index d835d62f..a3612c96 100644
--- a/tests/transform/test_base.py
+++ b/tests/transform/test_base.py
@@ -104,16 +104,17 @@ def test_workflow_definition_config_cls():
"""Test that the config_cls of the WorkflowDefinition generates a concatenated
config class correctly."""
- config_fields = EXAMPLE_WORKFLOW_DEFINITION.config_cls.__fields__
+ config_fields = EXAMPLE_WORKFLOW_DEFINITION.config_cls.model_fields
assert "infer_references" in config_fields
assert "delete_slots" in config_fields
assert (
- config_fields["infer_references"].type_
+ config_fields["infer_references"].annotation
== REFERENCE_INFERENCE_TRANSFORMATION.config_cls
)
assert (
- config_fields["delete_slots"].type_ == SLOT_DELETION_TRANSFORMATION.config_cls
+ config_fields["delete_slots"].annotation
+ == SLOT_DELETION_TRANSFORMATION.config_cls
)
diff --git a/tests/transform/test_main.py b/tests/transform/test_main.py
index fc70a79a..d04ad5d2 100644
--- a/tests/transform/test_main.py
+++ b/tests/transform/test_main.py
@@ -47,7 +47,7 @@ async def test_run_workflow_on_all_source_events(
artifact_topic_prefix="artifacts",
source_event_topic="source-events",
source_event_type="source-event",
- **file_system_event_fixture.config.dict(),
+ **file_system_event_fixture.config.model_dump(),
)
submission_id = "some-submission-id"
@@ -60,7 +60,7 @@ async def test_run_workflow_on_all_source_events(
submission_id=submission_id,
content=EXAMPLE_WORKFLOW_TEST_CASE.original_metadata,
annotation=EXAMPLE_WORKFLOW_TEST_CASE.submission_annotation,
- ).json()
+ ).model_dump_json()
),
)
await file_system_event_fixture.publish_events(events=[source_event])
@@ -78,7 +78,7 @@ async def test_run_workflow_on_all_source_events(
submission_id=submission_id,
content=artifact,
annotation=EXAMPLE_WORKFLOW_TEST_CASE.submission_annotation,
- ).json()
+ ).model_dump_json()
),
)
for artifact_type, artifact in EXAMPLE_WORKFLOW_TEST_CASE.artifact_metadata.items()