From 11c91633e5849f03d03a4744128507e0d1180e92 Mon Sep 17 00:00:00 2001 From: Vinicius Vaz Date: Wed, 1 Nov 2023 07:28:02 -0300 Subject: [PATCH 1/6] start updating airflow and pydantic dependencies --- Dockerfile-airflow-domino.dev | 2 +- Dockerfile-airflow-domino.prod | 2 +- docker-compose-dev.yaml | 2 +- pyproject.toml | 8 ++++---- rest/requirements.txt | 2 +- rest/schemas/requests/piece_repository.py | 4 ++-- rest/schemas/requests/workflow.py | 2 +- rest/services/workflow_service.py | 4 ++-- src/domino/base_piece.py | 4 ++-- src/domino/cli/utils/docker-compose-without-database.yaml | 2 +- src/domino/cli/utils/docker-compose.yaml | 2 +- src/domino/custom_operators/docker_operator.py | 2 +- src/domino/custom_operators/k8s_operator.py | 6 +++--- src/domino/schemas/container_resources.py | 4 ++-- src/domino/schemas/piece_metadata.py | 2 +- src/domino/schemas/shared_storage.py | 4 ++-- 16 files changed, 26 insertions(+), 26 deletions(-) diff --git a/Dockerfile-airflow-domino.dev b/Dockerfile-airflow-domino.dev index aa895ea1..68192102 100644 --- a/Dockerfile-airflow-domino.dev +++ b/Dockerfile-airflow-domino.dev @@ -1,4 +1,4 @@ -FROM apache/airflow:2.6.3-python3.9 +FROM apache/airflow:2.7.2-python3.9 ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 diff --git a/Dockerfile-airflow-domino.prod b/Dockerfile-airflow-domino.prod index 63b2dc6c..e800d1b4 100644 --- a/Dockerfile-airflow-domino.prod +++ b/Dockerfile-airflow-domino.prod @@ -1,4 +1,4 @@ -FROM apache/airflow:2.6.3-python3.9 +FROM apache/airflow:2.7.2-python3.9 ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 diff --git a/docker-compose-dev.yaml b/docker-compose-dev.yaml index 7ef9d7bf..acd1b4fa 100644 --- a/docker-compose-dev.yaml +++ b/docker-compose-dev.yaml @@ -4,7 +4,7 @@ version: '3.8' x-airflow-common: &airflow-common - image: apache/airflow:2.6.3-python3.9 + image: apache/airflow:2.7.2-python3.9 # build: . environment: &airflow-common-env diff --git a/pyproject.toml b/pyproject.toml index 3d5e71de..16f7c96b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,11 +18,11 @@ classifiers = [ ] dependencies = [ - "pydantic==1.10.11", + "pydantic==2.4.2", "tomli==2.0.1", "tomli-w==1.0.0", "PyYAML==6.0.1", - "jsonschema==4.17.3", + "jsonschema==4.18.0", "click==8.1.3", "rich==12.6.0", "colorama==0.4.6", @@ -71,7 +71,7 @@ cli = [ "bottle==0.12.25" ] airflow = [ - "apache-airflow==2.6.3", + "apache-airflow==2.7.2", "apache-airflow-providers-cncf-kubernetes==5.0.0", "apache-airflow-providers-docker==3.6.0", ] @@ -83,7 +83,7 @@ full = [ "PyGithub==1.55", "docker>=6.0.1", "kubernetes==23.6.0", - "apache-airflow==2.6.3", + "apache-airflow==2.7.2", "apache-airflow-providers-cncf-kubernetes==5.0.0", "apache-airflow-providers-docker==3.6.0", ] diff --git a/rest/requirements.txt b/rest/requirements.txt index 6295a34c..f34bb4eb 100644 --- a/rest/requirements.txt +++ b/rest/requirements.txt @@ -1,6 +1,6 @@ alembic==1.12.0 bcrypt==3.2.2 -fastapi==0.103.0 +fastapi==0.104.1 fastapi-utils==0.2.1 Jinja2==2.11.3 psycopg2-binary==2.9.3 diff --git a/rest/schemas/requests/piece_repository.py b/rest/schemas/requests/piece_repository.py index 86e133a3..a2d17279 100644 --- a/rest/schemas/requests/piece_repository.py +++ b/rest/schemas/requests/piece_repository.py @@ -21,8 +21,8 @@ class CreateRepositoryRequest(BaseModel): workspace_id: int = Field(description='Workspace id to create repository') source: RepositorySourceRequestEnum = Field(description="Source of the repository", default=RepositorySource.github.value) path: str = Field(..., description="Path to the repository.") - version: str = Field(regex=r'((^\d+\.\d+\.\d+$))', description="Version of the repository.") + version: str = Field(pattern=r'((^\d+\.\d+\.\d+$))', description="Version of the repository.") url: str = Field(..., description="Url of the repository.") class PatchRepositoryRequest(BaseModel): - version: str = Field(regex=r'((^\d+\.\d+\.\d+$))', description="Version of the repository.") \ No newline at end of file + version: str = Field(pattern=r'((^\d+\.\d+\.\d+$))', description="Version of the repository.") \ No newline at end of file diff --git a/rest/schemas/requests/workflow.py b/rest/schemas/requests/workflow.py index c3dd4fa3..68a80ae4 100644 --- a/rest/schemas/requests/workflow.py +++ b/rest/schemas/requests/workflow.py @@ -36,7 +36,7 @@ class WorkflowBaseSettings(BaseModel): name: str = Field( description="Workflow name", example="workflow_name", - regex=r"^[\w]*$", + pattern=r"^[\w]*$", ) start_date: str = Field(alias="startDateTime") select_end_date: Optional[SelectEndDate] = Field(alias="selectEndDate", default=SelectEndDate.never) diff --git a/rest/services/workflow_service.py b/rest/services/workflow_service.py index 518ab21f..043f2560 100644 --- a/rest/services/workflow_service.py +++ b/rest/services/workflow_service.py @@ -79,7 +79,7 @@ def create_workflow( uuid_name=workflow_id, created_at=datetime.utcnow(), schema={}, - ui_schema=body.ui_schema.dict(), + ui_schema=body.ui_schema.model_dump(), created_by=auth_context.user_id, last_changed_at=datetime.utcnow(), start_date=body.workflow.start_date, @@ -90,7 +90,7 @@ def create_workflow( ) workflow = self.workflow_repository.create(new_workflow) - data_dict = body.dict() + data_dict = body.model_dump() data_dict['workflow']['id'] = workflow_id try: diff --git a/src/domino/base_piece.py b/src/domino/base_piece.py index d83e4da6..52b06324 100644 --- a/src/domino/base_piece.py +++ b/src/domino/base_piece.py @@ -150,8 +150,8 @@ def format_xcom(self, output_obj: pydantic.BaseModel) -> dict: Returns: dict: XCOM dictionary """ - # xcom_obj = output_obj.dict() - xcom_obj = json.loads(output_obj.json()) + # xcom_obj = output_obj.model_dump() + xcom_obj = json.loads(output_obj.model_dump_json()) if not isinstance(xcom_obj, dict): self.logger.info(f"Piece {self.__class__.__name__} is not returning a valid XCOM object. Auto-generating a base XCOM for it...") xcom_obj = dict() diff --git a/src/domino/cli/utils/docker-compose-without-database.yaml b/src/domino/cli/utils/docker-compose-without-database.yaml index 8d5a03e6..81cd9d35 100644 --- a/src/domino/cli/utils/docker-compose-without-database.yaml +++ b/src/domino/cli/utils/docker-compose-without-database.yaml @@ -4,7 +4,7 @@ version: '3.8' x-airflow-common: &airflow-common - image: apache/airflow:2.6.3-python3.9 + image: apache/airflow:2.7.2-python3.9 environment: &airflow-common-env AIRFLOW__CORE__EXECUTOR: CeleryExecutor diff --git a/src/domino/cli/utils/docker-compose.yaml b/src/domino/cli/utils/docker-compose.yaml index e6213bc9..9e95b43d 100644 --- a/src/domino/cli/utils/docker-compose.yaml +++ b/src/domino/cli/utils/docker-compose.yaml @@ -4,7 +4,7 @@ version: '3.8' x-airflow-common: &airflow-common - image: apache/airflow:2.6.3-python3.9 + image: apache/airflow:2.7.2-python3.9 environment: &airflow-common-env AIRFLOW__CORE__EXECUTOR: CeleryExecutor diff --git a/src/domino/custom_operators/docker_operator.py b/src/domino/custom_operators/docker_operator.py index fb80f64d..a18e1d92 100644 --- a/src/domino/custom_operators/docker_operator.py +++ b/src/domino/custom_operators/docker_operator.py @@ -40,7 +40,7 @@ def __init__( "dag_id": dag_id, }), "DOMINO_RUN_PIECE_KWARGS": str(piece_input_kwargs), - "DOMINO_WORKFLOW_SHARED_STORAGE": self.workflow_shared_storage.json() if self.workflow_shared_storage else "", + "DOMINO_WORKFLOW_SHARED_STORAGE": self.workflow_shared_storage.model_dump_json() if self.workflow_shared_storage else "", "AIRFLOW_CONTEXT_EXECUTION_DATETIME": "{{ dag_run.logical_date | ts_nodash }}", "AIRFLOW_CONTEXT_DAG_RUN_ID": "{{ run_id }}", } diff --git a/src/domino/custom_operators/k8s_operator.py b/src/domino/custom_operators/k8s_operator.py index 194dc468..74f16253 100644 --- a/src/domino/custom_operators/k8s_operator.py +++ b/src/domino/custom_operators/k8s_operator.py @@ -49,7 +49,7 @@ def __init__( "dag_id": dag_id, }), "DOMINO_RUN_PIECE_KWARGS": str(piece_input_kwargs), - "DOMINO_WORKFLOW_SHARED_STORAGE": workflow_shared_storage.json() if workflow_shared_storage else "", + "DOMINO_WORKFLOW_SHARED_STORAGE": workflow_shared_storage.model_dump_json() if workflow_shared_storage else "", "AIRFLOW_CONTEXT_EXECUTION_DATETIME": "{{ dag_run.logical_date | ts_nodash }}", "AIRFLOW_CONTEXT_DAG_RUN_ID": "{{ run_id }}", } @@ -62,7 +62,7 @@ def __init__( limits={"cpu": "100m", "memory": "128Mi"}, use_gpu=False, ) - basic_container_resources = base_container_resources_model.dict() + basic_container_resources = base_container_resources_model.model_dump() updated_container_resources = dict_deep_update(basic_container_resources, container_resources) use_gpu = updated_container_resources.pop("use_gpu", False) if use_gpu: @@ -307,7 +307,7 @@ def add_shared_storage_sidecar(self, pod: k8s.V1Pod) -> k8s.V1Pod: self.workflow_shared_storage.source = self.workflow_shared_storage.source.name sidecar_env_vars = { - 'DOMINO_WORKFLOW_SHARED_STORAGE': self.workflow_shared_storage.json() if self.workflow_shared_storage else "", + 'DOMINO_WORKFLOW_SHARED_STORAGE': self.workflow_shared_storage.model_dump_json() if self.workflow_shared_storage else "", 'DOMINO_WORKFLOW_SHARED_STORAGE_SECRETS': str(storage_piece_secrets), 'DOMINO_INSTANTIATE_PIECE_KWARGS': str({ "deploy_mode": self.deploy_mode, diff --git a/src/domino/schemas/container_resources.py b/src/domino/schemas/container_resources.py index 4b1da42a..ff7c8850 100644 --- a/src/domino/schemas/container_resources.py +++ b/src/domino/schemas/container_resources.py @@ -2,8 +2,8 @@ class SystemRequirementsModel(BaseModel): - cpu: str = Field(regex=r"^\d+\.*\d*m$") - memory: str = Field(regex=r"^\d+\.*\d*Mi$") + cpu: str = Field(pattern=r"^\d+\.*\d*m$") + memory: str = Field(pattern=r"^\d+\.*\d*Mi$") class ContainerResourcesModel(BaseModel): diff --git a/src/domino/schemas/piece_metadata.py b/src/domino/schemas/piece_metadata.py index b9943a14..0f57f4e7 100644 --- a/src/domino/schemas/piece_metadata.py +++ b/src/domino/schemas/piece_metadata.py @@ -46,7 +46,7 @@ class PieceMetadata(BaseModel): name: str = Field( description="Piece name", example="ExamplePiece", - # regex= # TODO - regex for *Piece + # pattern= # TODO - regex for *Piece ) description: str = Field( description="Description of this Piece", diff --git a/src/domino/schemas/shared_storage.py b/src/domino/schemas/shared_storage.py index aa2d35e3..348f804a 100644 --- a/src/domino/schemas/shared_storage.py +++ b/src/domino/schemas/shared_storage.py @@ -32,12 +32,12 @@ class WorkflowSharedStorage(BaseModel): class LocalSharedStorage(WorkflowSharedStorage): - source = StorageSource.local + source: StorageSource = StorageSource.local storage_piece_name: str = "LocalStoragePiece" # TODO to be implemented class AwsS3SharedStorage(WorkflowSharedStorage): - source = StorageSource.aws_s3 + source: StorageSource = StorageSource.aws_s3 bucket: str = Field( description="The name of the bucket to be used as the root of the storage source." ) From 824cafb4b859307e3b12aa63ff5ac13878b13795 Mon Sep 17 00:00:00 2001 From: Vinicius Vaz Date: Wed, 1 Nov 2023 09:15:42 -0300 Subject: [PATCH 2/6] migrating rest to pydantic 2 --- rest/core/settings.py | 99 ++++++++++------------ rest/main.py | 1 - rest/requirements.txt | 5 +- rest/routers/piece_repository_router.py | 6 +- rest/schemas/context/auth_context.py | 2 +- rest/schemas/errors/base.py | 12 +-- rest/schemas/requests/piece.py | 2 +- rest/schemas/requests/piece_repository.py | 10 +-- rest/schemas/requests/workflow.py | 50 +++++------ rest/schemas/responses/piece.py | 16 ++-- rest/schemas/responses/piece_repository.py | 12 +-- rest/schemas/responses/secret.py | 2 +- rest/schemas/responses/workflow.py | 88 +++++++++---------- 13 files changed, 146 insertions(+), 159 deletions(-) diff --git a/rest/core/settings.py b/rest/core/settings.py index e2409914..76df30bc 100644 --- a/rest/core/settings.py +++ b/rest/core/settings.py @@ -1,30 +1,17 @@ -from pydantic import BaseSettings, validators -from typing import Optional +from pydantic_settings import BaseSettings +from typing import Optional, Union import os from database.models.enums import RepositorySource -def empty_to_none(v: str) -> Optional[str]: - if v == '': - return None - return v - - -class EmptyStrToNone(str): - @classmethod - def __get_validators__(cls): - yield validators.str_validator - yield empty_to_none - - class Settings(BaseSettings): # General app config - VERSION = "0.1.0" - APP_TITLE = "Domino REST api" + VERSION: str = "0.1.0" + APP_TITLE: str = "Domino REST api" # Database config - DB_URL = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( + DB_URL: str = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( user=os.environ.get("DOMINO_DB_USER", "postgres"), password=os.environ.get("DOMINO_DB_PASSWORD", "postgres"), host=os.environ.get("DOMINO_DB_HOST", "localhost"), @@ -33,42 +20,44 @@ class Settings(BaseSettings): ) # Auth config - AUTH_SECRET_KEY = os.environ.get('AUTH_SECRET_KEY', "SECRET") - AUTH_ALGORITHM = os.environ.get('AUTH_ALGORITHM', "HS256") - AUTH_ACCESS_TOKEN_EXPIRE_MINUTES = 600 + AUTH_SECRET_KEY: str = os.environ.get('AUTH_SECRET_KEY', "SECRET") + AUTH_ALGORITHM: str = os.environ.get('AUTH_ALGORITHM', "HS256") + AUTH_ACCESS_TOKEN_EXPIRE_MINUTES: int = 600 # Secrets config - SECRETS_SECRET_KEY = os.environ.get('SECRETS_SECRET_KEY', b'j1DsRJ-ehxU_3PbXW0c_-U4nTOx3knRB4zzWguMVaio=') - GITHUB_TOKEN_SECRET_KEY = os.environ.get('GITHUB_TOKEN_SECRET_KEY', b'j1DsRJ-ehxU_3PbXW0c_-U4nTOx3knRB4zzWguMVaio=') + SECRETS_SECRET_KEY: str = os.environ.get('SECRETS_SECRET_KEY', b'j1DsRJ-ehxU_3PbXW0c_-U4nTOx3knRB4zzWguMVaio=') + GITHUB_TOKEN_SECRET_KEY: str = os.environ.get('GITHUB_TOKEN_SECRET_KEY', b'j1DsRJ-ehxU_3PbXW0c_-U4nTOx3knRB4zzWguMVaio=') # Used by github rest client - DOMINO_GITHUB_ACCESS_TOKEN_WORKFLOWS = os.environ.get('DOMINO_GITHUB_ACCESS_TOKEN_WORKFLOWS', '') + DOMINO_GITHUB_ACCESS_TOKEN_WORKFLOWS: str = os.environ.get('DOMINO_GITHUB_ACCESS_TOKEN_WORKFLOWS', '') # Workflows storage - DOMINO_GITHUB_WORKFLOWS_REPOSITORY = os.environ.get('DOMINO_GITHUB_WORKFLOWS_REPOSITORY', "Tauffer-Consulting/domino_workflows_dev") - DOMINO_LOCAL_WORKFLOWS_REPOSITORY = '/opt/airflow/dags' + DOMINO_GITHUB_WORKFLOWS_REPOSITORY: str = os.environ.get('DOMINO_GITHUB_WORKFLOWS_REPOSITORY', "Tauffer-Consulting/domino_workflows_dev") + DOMINO_LOCAL_WORKFLOWS_REPOSITORY: str = '/opt/airflow/dags' # Default domino pieces repository - DOMINO_DEFAULT_PIECES_REPOSITORY = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY', "Tauffer-Consulting/default_domino_pieces") - DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION', "0.4.3") - DOMINO_DEFAULT_PIECES_REPOSITORY_SOURCE = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_SOURCE', "github") - DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN: EmptyStrToNone = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN', "") + DOMINO_DEFAULT_PIECES_REPOSITORY: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY', "Tauffer-Consulting/default_domino_pieces") + DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION', "0.4.3") + DOMINO_DEFAULT_PIECES_REPOSITORY_SOURCE: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_SOURCE', "github") + DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN: Union[str, None] = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN', None) + if DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN.strip() == '': + DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN = None DOMINO_DEFAULT_PIECES_REPOSITORY_URL: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_URL', 'https://github.com/Tauffer-Consulting/default_domino_pieces') # Default DB mock data - RUN_CREATE_MOCK_DATA = False - ADMIN_CREDENTIALS = { + RUN_CREATE_MOCK_DATA: bool = False + ADMIN_CREDENTIALS: dict = { "email": "admin@email.com", "password": "admin" } - AIRFLOW_ADMIN_CREDENTIALS = { + AIRFLOW_ADMIN_CREDENTIALS: dict = { "username": os.environ.get('AIRFLOW_ADMIN_USERNAME', "admin"), "password": os.environ.get('AIRFLOW_ADMIN_PASSWORD', "admin") } - AIRFLOW_WEBSERVER_HOST = os.environ.get('AIRFLOW_WEBSERVER_HOST', "http://airflow-webserver:8080/") + AIRFLOW_WEBSERVER_HOST: str = os.environ.get('AIRFLOW_WEBSERVER_HOST', "http://airflow-webserver:8080/") # Default repositories - DEFAULT_STORAGE_REPOSITORY = dict( + DEFAULT_STORAGE_REPOSITORY: dict = dict( name="default_storage_repository", path="default_storage_repository", source=getattr(RepositorySource, 'default').value, @@ -76,16 +65,16 @@ class Settings(BaseSettings): url="domino-default/default_storage_repository" ) - DEPLOY_MODE = os.environ.get('DOMINO_DEPLOY_MODE', 'local-k8s') + DEPLOY_MODE: str = os.environ.get('DOMINO_DEPLOY_MODE', 'local-k8s') - CONDITIONAL_ENDPOINTS_ENABLED = False if DEPLOY_MODE == 'local-compose' else True + CONDITIONAL_ENDPOINTS_ENABLED: bool = False if DEPLOY_MODE == 'local-compose' else True class LocalK8sSettings(Settings): - SERVER_HOST = "0.0.0.0" - DEBUG = True - PORT = 8000 - RELOAD = True - CORS = { + SERVER_HOST: str = "0.0.0.0" + DEBUG: bool = True + PORT: int = 8000 + RELOAD: bool = True + CORS: dict = { "origins": [ "*", ], @@ -93,15 +82,15 @@ class LocalK8sSettings(Settings): "allow_methods": ["*"], "allow_headers": ["*"], } - ROOT_PATH = '/api' + ROOT_PATH: str = '/api' class LocalComposeSettings(Settings): - SERVER_HOST = "0.0.0.0" - DEBUG = True - PORT = 8000 - RELOAD = True - CORS = { + SERVER_HOST: str = "0.0.0.0" + DEBUG: bool = True + PORT: int = 8000 + RELOAD: bool = True + CORS: dict = { "origins": [ "*", ], @@ -110,15 +99,15 @@ class LocalComposeSettings(Settings): "allow_headers": ["*"], } - ROOT_PATH = '/' + ROOT_PATH: str = '/' class ProdSettings(Settings): - SERVER_HOST = "0.0.0.0" - DEBUG = False - PORT = 8000 - RELOAD = False - CORS = { + SERVER_HOST: str = "0.0.0.0" + DEBUG: bool = False + PORT: int = 8000 + RELOAD: bool = False + CORS: dict = { "origins": [ "*", ], @@ -128,7 +117,7 @@ class ProdSettings(Settings): } # ROOT_PATH is based in proxy config. Must be the same as the path to the api in the proxy - ROOT_PATH = '/api' + ROOT_PATH: str = '/api' def get_settings(): diff --git a/rest/main.py b/rest/main.py index d6d1cc5d..97cd0e71 100644 --- a/rest/main.py +++ b/rest/main.py @@ -2,7 +2,6 @@ from dotenv import find_dotenv, load_dotenv from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware - from routers.auth_router import router as auth_router from routers.user_router import router as user_router from routers.workflow_router import router as workflow_router diff --git a/rest/requirements.txt b/rest/requirements.txt index f34bb4eb..4cf6f530 100644 --- a/rest/requirements.txt +++ b/rest/requirements.txt @@ -1,10 +1,10 @@ alembic==1.12.0 bcrypt==3.2.2 fastapi==0.104.1 -fastapi-utils==0.2.1 Jinja2==2.11.3 psycopg2-binary==2.9.3 -pydantic[email]<2.0.0 +pydantic[email]==2.4.2 +pydantic-settings==2.0.3 MarkupSafe==2.0.1 PyGithub==1.55 PyJWT==2.4.0 @@ -16,7 +16,6 @@ sqlalchemy-redshift==0.8.9 SQLAlchemy-Utils==0.38.2 uvicorn==0.18.1 uvicorn[standard]==0.18.1 -python-dotenv==0.20.0 passlib==1.7.4 tomli==2.0.1 tomli_w==1.0.0 diff --git a/rest/routers/piece_repository_router.py b/rest/routers/piece_repository_router.py index 817600be..4da6a3ac 100644 --- a/rest/routers/piece_repository_router.py +++ b/rest/routers/piece_repository_router.py @@ -14,7 +14,7 @@ from database.models.enums import RepositorySource from schemas.exceptions.base import BaseException, ConflictException, ForbiddenException, ResourceNotFoundException, UnauthorizedException from schemas.errors.base import ConflictError, ForbiddenError, ResourceNotFoundError, SomethingWrongError, UnauthorizedError -from typing import List +from typing import List, Optional router = APIRouter(prefix="/pieces-repositories") auth_service = AuthService() @@ -124,8 +124,8 @@ def get_piece_repository_release_data( ) def get_pieces_repositories( workspace_id: int, - page: int = 0, - page_size: int = 100, + page: Optional[int] = 0, + page_size: Optional[int] = 100, filters: ListRepositoryFilters = Depends(), ) -> GetWorkspaceRepositoriesResponse: """Get pieces repositories for workspace""" diff --git a/rest/schemas/context/auth_context.py b/rest/schemas/context/auth_context.py index 01d5f9d8..e7432e3c 100644 --- a/rest/schemas/context/auth_context.py +++ b/rest/schemas/context/auth_context.py @@ -4,7 +4,7 @@ class WorkspaceAuthorizerData(BaseModel): id: int name: str - github_access_token: Optional[str] + github_access_token: Optional[str] = None user_permission: str diff --git a/rest/schemas/errors/base.py b/rest/schemas/errors/base.py index 2cc62205..33c27d13 100644 --- a/rest/schemas/errors/base.py +++ b/rest/schemas/errors/base.py @@ -2,30 +2,30 @@ class BadRequestError(BaseModel): - status_code = 400 + status_code: int = 400 message: str = Field(default="Bad request data") class SomethingWrongError(BaseModel): - status_code = 500 + status_code: int = 500 message: str = Field(default="Something went wrong") class UnauthorizedError(BaseModel): - status_code = 401 + status_code: int = 401 message: str = Field(default="Unauthorized") class ForbiddenError(BaseModel): - status_code = 403 + status_code: int = 403 message: str = Field(default="Forbidden") class ConflictError(BaseModel): - status_code = 409 + status_code: int = 409 message: str = Field(default="Conflict") class ResourceNotFoundError(BaseModel): - status_code = 404 + status_code: int = 404 message: str = Field(default="Resource not found") \ No newline at end of file diff --git a/rest/schemas/requests/piece.py b/rest/schemas/requests/piece.py index 7ec2b4ca..69cc50c4 100644 --- a/rest/schemas/requests/piece.py +++ b/rest/schemas/requests/piece.py @@ -2,4 +2,4 @@ from typing import Optional class ListPiecesFilters(BaseModel): - name__like: Optional[str] + name__like: Optional[str] = None diff --git a/rest/schemas/requests/piece_repository.py b/rest/schemas/requests/piece_repository.py index a2d17279..087b05f3 100644 --- a/rest/schemas/requests/piece_repository.py +++ b/rest/schemas/requests/piece_repository.py @@ -10,11 +10,11 @@ class Config: use_enum_values = True class ListRepositoryFilters(BaseModel): - name__like: Optional[str] - path__like: Optional[str] - version: Optional[str] - url: Optional[str] - workspace_id: Optional[int] + name__like: Optional[str] = None + path__like: Optional[str] = None + version: Optional[str] = None + url: Optional[str] = None + workspace_id: Optional[int] = None source: Optional[RepositorySource] = Field(description="Source of the repository.", default=RepositorySource.github.value) class CreateRepositoryRequest(BaseModel): diff --git a/rest/schemas/requests/workflow.py b/rest/schemas/requests/workflow.py index 68a80ae4..a78d8cbe 100644 --- a/rest/schemas/requests/workflow.py +++ b/rest/schemas/requests/workflow.py @@ -1,7 +1,7 @@ -from .base_model import BaseRequestModel from typing import Dict, List, Optional from enum import Enum -from pydantic import BaseModel, validator, Field +from pydantic import BaseModel, field_validator, Field +from pydantic_core.core_schema import FieldValidationInfo from datetime import datetime from constants.default_pieces.storage import AWSS3StoragePiece @@ -40,14 +40,14 @@ class WorkflowBaseSettings(BaseModel): ) start_date: str = Field(alias="startDateTime") select_end_date: Optional[SelectEndDate] = Field(alias="selectEndDate", default=SelectEndDate.never) - end_date: Optional[str] = Field(alias='endDateTime') + end_date: Optional[str] = Field(alias='endDateTime', default=None) schedule: ScheduleIntervalType = Field(alias="scheduleInterval") catchup: Optional[bool] = False # TODO add catchup to UI? generate_report: Optional[bool] = Field(alias="generateReport", default=False) # TODO add generate report to UI? - description: Optional[str] # TODO add description to UI? + description: Optional[str] = None # TODO add description to UI? - @validator('start_date') + @field_validator('start_date') def start_date_validator(cls, v): try: if '.' in v: @@ -63,16 +63,16 @@ def start_date_validator(cls, v): except ValueError: raise ValueError(f"Invalid start date: {v}") - @validator('end_date') - def end_date_validator(cls, v, values): + @field_validator('end_date') + def end_date_validator(cls, v, info: FieldValidationInfo): try: - if 'start_date' not in values: + if 'start_date' not in info.data: raise ValueError("Start date must be provided") - converted_start_date = datetime.fromisoformat(values['start_date']) - if 'select_end_date' not in values: + converted_start_date = datetime.fromisoformat(info.data['start_date']) + if 'select_end_date' not in info.data: raise ValueError("Select end date must be provided") - if values['select_end_date'] == SelectEndDate.never.value: + if info.data['select_end_date'] == SelectEndDate.never.value: return None converted_end_date = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%fZ").date() @@ -83,7 +83,7 @@ def end_date_validator(cls, v, values): raise ValueError(f"Invalid end date: {v}") class Config: - allow_population_by_field_name = True + populate_by_name = True storage_default_piece_model_map = { @@ -103,9 +103,9 @@ class WorkflowSharedStorageModeEnum(str, Enum): class WorkflowSharedStorageDataModel(BaseModel): - source: Optional[WorkflowSharedStorageSourceEnum] - mode: Optional[WorkflowSharedStorageModeEnum] - provider_options: Optional[Dict] + source: Optional[WorkflowSharedStorageSourceEnum] = None + mode: Optional[WorkflowSharedStorageModeEnum] = None + provider_options: Optional[Dict] = None class Config: use_enum_values = True @@ -130,7 +130,7 @@ class TasksDataModel(BaseModel): task_id: str piece: TaskPieceDataModel piece_input_kwargs: Dict - dependencies: Optional[List[str]] + dependencies: Optional[List[str]] = None """ Request data models @@ -143,7 +143,7 @@ class CreateWorkflowRequest(BaseModel): ] ui_schema: UiSchema - @validator('tasks') + @field_validator('tasks') def tasks_validator(cls, v): if not v: raise ValueError("Tasks must be provided") @@ -152,12 +152,12 @@ def tasks_validator(cls, v): class ListWorkflowsFilters(BaseModel): # TODO add filters - created_at: Optional[str] - name__like: Optional[str] = Field(alias="name") - last_changed_at: Optional[str] - start_date: Optional[str] - start_date__gt: Optional[str] - end_date: Optional[str] - end_date__gt: Optional[str] - schedule: Optional[ScheduleIntervalType] + created_at: Optional[str] = None + name__like: Optional[str] = Field(alias="name", default=None) + last_changed_at: Optional[str] = None + start_date: Optional[str] = None + start_date__gt: Optional[str] = None + end_date: Optional[str] = None + end_date__gt: Optional[str] = None + schedule: Optional[ScheduleIntervalType] = None diff --git a/rest/schemas/responses/piece.py b/rest/schemas/responses/piece.py index 2b49b0a1..88f81cda 100644 --- a/rest/schemas/responses/piece.py +++ b/rest/schemas/responses/piece.py @@ -5,12 +5,12 @@ class GetPiecesResponse(BaseModel): id: int name: str - description: Optional[str] - dependency: Optional[Dict] - source_image: Optional[str] - input_schema: Optional[Dict] - output_schema: Optional[Dict] - secrets_schema: Optional[Dict] - style: Optional[Dict] - source_url: Optional[str] + description: Optional[str] = None + dependency: Optional[Dict] = None + source_image: Optional[str] = None + input_schema: Optional[Dict] = None + output_schema: Optional[Dict] = None + secrets_schema: Optional[Dict] = None + style: Optional[Dict] = None + source_url: Optional[str] = None repository_id: int \ No newline at end of file diff --git a/rest/schemas/responses/piece_repository.py b/rest/schemas/responses/piece_repository.py index 88121b47..113e7b1b 100644 --- a/rest/schemas/responses/piece_repository.py +++ b/rest/schemas/responses/piece_repository.py @@ -10,8 +10,8 @@ class GetWorkspaceRepositoriesData(BaseModel): name: str label: str source: RepositorySource - path: Optional[str] - version: Optional[str] + path: Optional[str] = None + version: Optional[str] = None workspace_id: int class GetWorkspaceRepositoriesResponse(BaseModel): @@ -29,7 +29,7 @@ class CreateRepositoryReponse(BaseModel): source: RepositorySource = Field(..., description="Source of the repository") label: str = Field(..., description="Label of the repository") path: str = Field(..., description="Path of the repository") - version: Optional[str] = Field(None, description="Version of the repository") + version: Optional[str] = Field(default=None, description="Version of the repository") workspace_id: int = Field(..., description="Workspace id the repository belongs to") @@ -43,7 +43,7 @@ class GetRepositoryReleasesResponse(BaseModel): class GetRepositoryReleaseDataResponse(BaseModel): name: str - description: Optional[str] + description: Optional[str] = None pieces: List[str] class GetRepositoryResponse(BaseModel): @@ -51,7 +51,7 @@ class GetRepositoryResponse(BaseModel): created_at: datetime name: str source: RepositorySource - path: Optional[str] - version: Optional[str] + path: Optional[str] = None + version: Optional[str] = None workspace_id: int #secrets: List[RepositorySecret] \ No newline at end of file diff --git a/rest/schemas/responses/secret.py b/rest/schemas/responses/secret.py index 058cb36d..677e775b 100644 --- a/rest/schemas/responses/secret.py +++ b/rest/schemas/responses/secret.py @@ -19,4 +19,4 @@ class ListRepositorySecretsResponse(BaseModel): class GetSecretsByPieceResponse(BaseModel): name: str - value: Optional[str] \ No newline at end of file + value: Optional[str] = None \ No newline at end of file diff --git a/rest/schemas/responses/workflow.py b/rest/schemas/responses/workflow.py index 1a040591..4df0469f 100644 --- a/rest/schemas/responses/workflow.py +++ b/rest/schemas/responses/workflow.py @@ -1,5 +1,5 @@ from schemas.responses.base import PaginationSet -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, Field, field_validator from datetime import datetime, timezone from typing import Dict, Optional, List, Union from enum import Enum @@ -47,14 +47,14 @@ class WorkflowConfigResponse(BaseModel): # TODO remove regex ? name: str start_date: str - end_date: Optional[str] - schedule: Optional[ScheduleIntervalTypeResponse] + end_date: Optional[str] = None + schedule: Optional[ScheduleIntervalTypeResponse] = None catchup: bool = False generate_report: bool = False - description: Optional[str] + description: Optional[str] = None - @validator('schedule') + @field_validator('schedule') def set_schedule(cls, schedule): return schedule or ScheduleIntervalTypeResponse.none @@ -76,27 +76,27 @@ class GetWorkflowsResponseData(BaseModel): is_paused: bool is_active: bool status: WorkflowStatus - schedule: Optional[ScheduleIntervalTypeResponse] - next_dagrun: Optional[datetime] + schedule: Optional[ScheduleIntervalTypeResponse] = None + next_dagrun: Optional[datetime] = None - @validator('schedule') + @field_validator('schedule') def set_schedule(cls, schedule): return schedule or ScheduleIntervalTypeResponse.none - @validator('created_at', pre=True, always=True) + @field_validator('created_at', mode='before') def add_utc_timezone_created_at(cls, v): if isinstance(v, datetime) and v.tzinfo is None: v = v.replace(tzinfo=timezone.utc) return v - @validator('last_changed_at', pre=True, always=True) + @field_validator('last_changed_at', mode='before') def add_utc_timezone_last_changed_at(cls, v): if isinstance(v, datetime) and v.tzinfo is None: v = v.replace(tzinfo=timezone.utc) return v - @validator('next_dagrun', pre=True, always=True) + @field_validator('next_dagrun', mode='before') def add_utc_timezone_next_dagrun(cls, v): if isinstance(v, datetime) and v.tzinfo is None: v = v.replace(tzinfo=timezone.utc) @@ -114,29 +114,29 @@ class GetWorkflowResponse(BaseModel): id: int name: str created_at: datetime - schema_: Optional[BaseWorkflowModel] = Field(alias="schema") - ui_schema: Optional[BaseUiSchema] + schema_: Optional[BaseWorkflowModel] = Field(alias="schema", default=None) + ui_schema: Optional[BaseUiSchema] = None last_changed_at: datetime last_changed_by: int created_by: int workspace_id: int # Airflow database infos # TODO check if add more fields, or if we should use /details fields to get more infos - is_paused: Optional[Union[bool, WorkflowStatus]] # Whether the DAG is paused. - is_active: Optional[Union[bool, WorkflowStatus]] # Whether the DAG is currently seen by the scheduler(s). - is_subdag: Optional[Union[bool, WorkflowStatus]] # Whether the DAG is SubDAG. - last_pickled: Optional[Union[datetime, WorkflowStatus]] # The last time the DAG was pickled. - last_expired: Optional[Union[datetime, WorkflowStatus]] # Time when the DAG last received a refresh signal (e.g. the DAG's "refresh" button was clicked in the web UI) - schedule: Optional[Union[ScheduleIntervalTypeResponse, WorkflowStatus]] # The schedule interval for the DAG. - max_active_tasks: Optional[Union[int, WorkflowStatus]] # Maximum number of active tasks that can be run on the DAG - max_active_runs: Optional[Union[int, WorkflowStatus]] # Maximum number of active DAG runs for the DAG - has_task_concurrency_limits: Optional[Union[bool, WorkflowStatus]] # Whether the DAG has task concurrency limits - has_import_errors: Optional[Union[bool, WorkflowStatus]] # Whether the DAG has import errors - next_dagrun: Optional[Union[datetime, WorkflowStatus]] # The logical date of the next dag run. - next_dagrun_data_interval_start: Optional[Union[datetime, WorkflowStatus]] # The start date of the next dag run. - next_dagrun_data_interval_end: Optional[Union[datetime, WorkflowStatus]] # The end date of the next dag run. - - @validator('schedule') + is_paused: Optional[Union[bool, WorkflowStatus]] = None # Whether the DAG is paused. + is_active: Optional[Union[bool, WorkflowStatus]] = None # Whether the DAG is currently seen by the scheduler(s). + is_subdag: Optional[Union[bool, WorkflowStatus]] = None # Whether the DAG is SubDAG. + last_pickled: Optional[Union[datetime, WorkflowStatus]] = None # The last time the DAG was pickled. + last_expired: Optional[Union[datetime, WorkflowStatus]] = None # Time when the DAG last received a refresh signal (e.g. the DAG's "refresh" button was clicked in the web UI) + schedule: Optional[Union[ScheduleIntervalTypeResponse, WorkflowStatus]] = None # The schedule interval for the DAG. + max_active_tasks: Optional[Union[int, WorkflowStatus]] = None # Maximum number of active tasks that can be run on the DAG + max_active_runs: Optional[Union[int, WorkflowStatus]] = None # Maximum number of active DAG runs for the DAG + has_task_concurrency_limits: Optional[Union[bool, WorkflowStatus]] = None # Whether the DAG has task concurrency limits + has_import_errors: Optional[Union[bool, WorkflowStatus]] = None # Whether the DAG has import errors + next_dagrun: Optional[Union[datetime, WorkflowStatus]] = None # The logical date of the next dag run. + next_dagrun_data_interval_start: Optional[Union[datetime, WorkflowStatus]] = None # The start date of the next dag run. + next_dagrun_data_interval_end: Optional[Union[datetime, WorkflowStatus]] = None # The end date of the next dag run. + + @field_validator('schedule') def set_schedule(cls, schedule): return schedule or ScheduleIntervalTypeResponse.none @@ -144,18 +144,18 @@ def set_schedule(cls, schedule): class GetWorkflowRunsResponseData(BaseModel): dag_id: str = Field(alias='workflow_uuid') dag_run_id: str = Field(alias='workflow_run_id') - start_date: Optional[datetime] - end_date: Optional[datetime] - execution_date: Optional[datetime] - state: Optional[WorkflowRunState] + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + execution_date: Optional[datetime] = None + state: Optional[WorkflowRunState] = None - @validator('state') + @field_validator('state') def set_state(cls, state): return state or WorkflowRunState.none class Config: - allow_population_by_field_name = True + populate_by_name = True class GetWorkflowRunsResponse(BaseModel): @@ -166,21 +166,21 @@ class GetWorkflowRunsResponse(BaseModel): class GetWorkflowRunTasksResponseData(BaseModel): dag_id: str = Field(alias='workflow_uuid') dag_run_id: str = Field(alias='workflow_run_id') - duration: Optional[float] - start_date: Optional[datetime] - end_date: Optional[datetime] - execution_date: Optional[datetime] - docker_image: Optional[str] + duration: Optional[float] = None + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + execution_date: Optional[datetime] = None + docker_image: Optional[str] = None task_id: str try_number: int - state: Optional[WorkflowRunTaskState] + state: Optional[WorkflowRunTaskState] = None - @validator('state') + @field_validator('state') def set_state(cls, state): return state or WorkflowRunTaskState.none class Config: - allow_population_by_field_name = True + populate_by_name = True class GetWorkflowRunTasksResponse(BaseModel): @@ -194,8 +194,8 @@ class WorkflowSchemaBaseModel(BaseModel): class GetWorkflowRunTaskResultResponse(BaseModel): - base64_content: Optional[str] - file_type: Optional[str] + base64_content: Optional[str] = None + file_type: Optional[str] = None class GetWorkflowRunTaskLogsResponse(BaseModel): From b6b882f0c98f03eef65da7532e888045f2ff5e88 Mon Sep 17 00:00:00 2001 From: Vinicius Vaz Date: Wed, 1 Nov 2023 09:25:19 -0300 Subject: [PATCH 3/6] update tests dependencies --- .github/workflows/integration-tests-api.yml | 1 + rest/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests-api.yml b/.github/workflows/integration-tests-api.yml index 91c44412..e74061db 100644 --- a/.github/workflows/integration-tests-api.yml +++ b/.github/workflows/integration-tests-api.yml @@ -6,6 +6,7 @@ on: - main - dev - test/* + - chore/update-airflow-pydantic jobs: api-tests: diff --git a/rest/requirements-test.txt b/rest/requirements-test.txt index fad26993..81cb6d0d 100644 --- a/rest/requirements-test.txt +++ b/rest/requirements-test.txt @@ -1,4 +1,4 @@ -fastapi==0.88.0 +fastapi==0.104.1 pytest==7.2.0 httpx==0.23.1 requests==2.28.1 From eace869bd16f784c3d6f0ff2d0f3851ee4882a25 Mon Sep 17 00:00:00 2001 From: Vinicius Vaz Date: Wed, 1 Nov 2023 09:39:48 -0300 Subject: [PATCH 4/6] fix token --- .github/workflows/integration-tests-api.yml | 2 +- rest/core/settings.py | 7 +++---- rest/services/piece_repository_service.py | 6 ++++++ 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.github/workflows/integration-tests-api.yml b/.github/workflows/integration-tests-api.yml index e74061db..77be01ec 100644 --- a/.github/workflows/integration-tests-api.yml +++ b/.github/workflows/integration-tests-api.yml @@ -6,7 +6,7 @@ on: - main - dev - test/* - - chore/update-airflow-pydantic + # - chore/update-airflow-pydantic jobs: api-tests: diff --git a/rest/core/settings.py b/rest/core/settings.py index 76df30bc..59169478 100644 --- a/rest/core/settings.py +++ b/rest/core/settings.py @@ -1,5 +1,5 @@ from pydantic_settings import BaseSettings -from typing import Optional, Union +from typing import Union import os from database.models.enums import RepositorySource @@ -39,9 +39,8 @@ class Settings(BaseSettings): DOMINO_DEFAULT_PIECES_REPOSITORY: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY', "Tauffer-Consulting/default_domino_pieces") DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_VERSION', "0.4.3") DOMINO_DEFAULT_PIECES_REPOSITORY_SOURCE: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_SOURCE', "github") - DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN: Union[str, None] = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN', None) - if DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN.strip() == '': - DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN = None + DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN: Union[str, None] = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN', '') + DOMINO_DEFAULT_PIECES_REPOSITORY_URL: str = os.environ.get('DOMINO_DEFAULT_PIECES_REPOSITORY_URL', 'https://github.com/Tauffer-Consulting/default_domino_pieces') # Default DB mock data diff --git a/rest/services/piece_repository_service.py b/rest/services/piece_repository_service.py index 45e66b58..a1590b58 100644 --- a/rest/services/piece_repository_service.py +++ b/rest/services/piece_repository_service.py @@ -88,6 +88,8 @@ def get_piece_repository_releases(self, source: str, path: str, auth_context: Au self.logger.info(f"Getting releases for repository {path}") token = auth_context.workspace.github_access_token if auth_context.workspace.github_access_token else settings.DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN + if not token.strip(): + token = None github_client = GithubRestClient(token=token) if source == getattr(RepositorySource, 'github').value: tags = github_client.get_tags(repo_name=path) @@ -100,6 +102,8 @@ def get_piece_repository_release_data(self, version: str, source:str, path: str, self.logger.info(f'Getting release data for repository {path}') token = auth_context.workspace.github_access_token if auth_context.workspace.github_access_token else settings.DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN + if not token.strip(): + token = None tag_data = self._read_repository_data(path=path, source=source, version=version, github_access_token=token) name = tag_data.get('config_toml').get('repository').get("REPOSITORY_NAME") description = tag_data.get('config_toml').get('repository').get("DESCRIPTION") @@ -215,6 +219,8 @@ def create_piece_repository( raise ConflictException(message=f"Repository {piece_repository_data.path} already exists for this workspace") token = auth_context.workspace.github_access_token if auth_context.workspace.github_access_token else settings.DOMINO_DEFAULT_PIECES_REPOSITORY_TOKEN + if not token.strip(): + token = None repository_files_metadata = self._read_repository_data( source=piece_repository_data.source, path=piece_repository_data.path, From e279bd2b728c420a29227e50b5613eb52dc6f4d9 Mon Sep 17 00:00:00 2001 From: Vinicius Vaz Date: Wed, 1 Nov 2023 10:02:28 -0300 Subject: [PATCH 5/6] run tst --- .github/workflows/integration-tests-api.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests-api.yml b/.github/workflows/integration-tests-api.yml index 77be01ec..e74061db 100644 --- a/.github/workflows/integration-tests-api.yml +++ b/.github/workflows/integration-tests-api.yml @@ -6,7 +6,7 @@ on: - main - dev - test/* - # - chore/update-airflow-pydantic + - chore/update-airflow-pydantic jobs: api-tests: From c640c27d20824cda4e5b6c0704383d65aa46ff30 Mon Sep 17 00:00:00 2001 From: Vinicius Vaz Date: Wed, 1 Nov 2023 10:37:29 -0300 Subject: [PATCH 6/6] workflow --- .github/workflows/integration-tests-api.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/integration-tests-api.yml b/.github/workflows/integration-tests-api.yml index e74061db..91c44412 100644 --- a/.github/workflows/integration-tests-api.yml +++ b/.github/workflows/integration-tests-api.yml @@ -6,7 +6,6 @@ on: - main - dev - test/* - - chore/update-airflow-pydantic jobs: api-tests: