From cc75fdfb902258e861147b1f38c039dfa8881db1 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Thu, 20 Jun 2024 17:23:59 +0530 Subject: [PATCH 01/13] feat: Add script to deploy uploader code to a lambda --- .pre-commit-config.yaml | 1 + poetry.lock | 1438 ++++++++++++++++- pyproject.toml | 4 + scripts/recording_uploader/.gitignore | 244 +++ scripts/recording_uploader/README.md | 87 + scripts/recording_uploader/__init__.py | 0 scripts/recording_uploader/deploy.py | 41 + scripts/recording_uploader/samconfig.toml | 34 + scripts/recording_uploader/template.yaml | 48 + .../recording_uploader/uploader/__init__.py | 0 scripts/recording_uploader/uploader/app.py | 40 + .../uploader/requirements.txt | 1 + 12 files changed, 1895 insertions(+), 43 deletions(-) create mode 100644 scripts/recording_uploader/.gitignore create mode 100644 scripts/recording_uploader/README.md create mode 100644 scripts/recording_uploader/__init__.py create mode 100644 scripts/recording_uploader/deploy.py create mode 100644 scripts/recording_uploader/samconfig.toml create mode 100644 scripts/recording_uploader/template.yaml create mode 100644 scripts/recording_uploader/uploader/__init__.py create mode 100644 scripts/recording_uploader/uploader/app.py create mode 100644 scripts/recording_uploader/uploader/requirements.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2fbba0905..81b83c233 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,6 +3,7 @@ repos: rev: v2.3.0 hooks: - id: check-yaml + args: ['--unsafe'] - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/psf/black diff --git a/poetry.lock b/poetry.lock index 9cf91a77a..44a02f66e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -235,6 +235,25 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] +[[package]] +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] + [[package]] name = "ascii-magic" version = "2.3.0" @@ -416,6 +435,84 @@ files = [ {file = "av-12.3.0.tar.gz", hash = "sha256:04b1892562aff3277efc79f32bd8f1d0cbb64ed011241cb3e96f9ad471816c22"}, ] +[[package]] +name = "aws-lambda-builders" +version = "1.50.0" +description = "Python library to compile, build & package AWS Lambda functions for several runtimes & frameworks." +optional = false +python-versions = ">=3.8" +files = [ + {file = "aws_lambda_builders-1.50.0-py3-none-any.whl", hash = "sha256:40a613ecb19fbf0b64a47bae14bd252ea5da32ea71fde9808d596e2dbc011baf"}, + {file = "aws_lambda_builders-1.50.0.tar.gz", hash = "sha256:ad95ed55359c399872f5825582896500dfc1c5564eccf2a6ab8d0e9f6c1ae385"}, +] + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[package.extras] +dev = ["black (==24.4.2)", "coverage (==7.5.1)", "flake8 (==3.8.4)", "parameterized (==0.9.0)", "pyelftools (>=0.31,<1.0)", "pytest (>=6.1.1)", "pytest-cov (==5.0.0)", "ruff (==0.4.3)"] + +[[package]] +name = "aws-sam-cli" +version = "1.119.0" +description = "AWS SAM CLI is a CLI tool for local development and testing of Serverless applications" +optional = false +python-versions = "!=4.0,<=4.0,>=3.8" +files = [ + {file = "aws_sam_cli-1.119.0-py3-none-any.whl", hash = "sha256:ebcdc84d57c7388402036625b78e6287f06ccbf292c165607fcad3a94af09388"}, + {file = "aws_sam_cli-1.119.0.tar.gz", hash = "sha256:fc1879f90cef5bc7264aac62fa5d85a0ce83b88bb9e24a4c4434542bc59e1d68"}, +] + +[package.dependencies] +aws-lambda-builders = "1.50.0" +aws-sam-translator = "1.89.0" +boto3 = ">=1.29.2,<2" +boto3-stubs = {version = "1.34.123", extras = ["apigateway", "cloudformation", "ecr", "iam", "kinesis", "lambda", "s3", "schemas", "secretsmanager", "signer", "sqs", "stepfunctions", "sts", "xray"]} +cfn-lint = ">=0.87.7,<0.88.0" +chevron = ">=0.12,<1.0" +click = ">=8.1,<9.0" +cookiecutter = ">=2.6.0,<2.7.0" +dateparser = ">=1.2,<2.0" +docker = ">=7.1.0,<7.2.0" +Flask = "<3.1" +jmespath = ">=1.0.1,<1.1.0" +jsonschema = "<4.23" +pyopenssl = ">=24.1.0,<24.2.0" +PyYAML = ">=6.0.1,<7.0" +regex = "!=2021.10.8" +requests = ">=2.32.3,<2.33.0" +rich = ">=13.7.1,<13.8.0" +ruamel-yaml = ">=0.18.6,<0.19.0" +tomlkit = "0.12.5" +typing-extensions = ">=4.4.0,<5" +tzlocal = "5.2" +watchdog = "4.0.1" + +[package.extras] +dev = ["black (==24.4.2)", "coverage (==7.5.3)", "filelock (==3.14.0)", "mypy (==1.10.0)", "parameterized (==0.9.0)", "psutil (==5.9.8)", "pytest (==8.2.1)", "pytest-cov (==5.0.0)", "pytest-forked (==1.6.0)", "pytest-json-report-wip (==1.5.1)", "pytest-metadata (==3.1.1)", "pytest-rerunfailures (==14.0)", "pytest-timeout (==2.3.1)", "pytest-xdist (==3.6.1)", "ruff (==0.4.8)", "types-PyYAML (==6.0.12.20240311)", "types-Pygments (==2.18.0.20240506)", "types-chevron (==0.14.2.20240310)", "types-colorama (==0.4.15.20240311)", "types-dateparser (==1.2.0.20240420)", "types-docutils (==0.21.0.20240423)", "types-jsonschema (==4.22.0.20240610)", "types-psutil (==5.9.5.20240516)", "types-pyOpenSSL (==24.1.0.20240425)", "types-pywin32 (==306.0.0.20240408)", "types-requests (==2.31.0.6)", "types-requests (==2.32.0.20240602)", "types-setuptools (==70.0.0.20240524)", "types-urllib3 (==1.26.25.14)"] +pre-dev = ["ruff (==0.4.8)"] + +[[package]] +name = "aws-sam-translator" +version = "1.89.0" +description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" +optional = false +python-versions = "!=4.0,<=4.0,>=3.8" +files = [ + {file = "aws_sam_translator-1.89.0-py3-none-any.whl", hash = "sha256:843be1b5ca7634f700ad0c844a7e0dc42858f35da502e91691473eadd1731ded"}, + {file = "aws_sam_translator-1.89.0.tar.gz", hash = "sha256:fff1005d0b1f3cb511d0ac7e85f54af06afc9d9e433df013a2338d7a0168d174"}, +] + +[package.dependencies] +boto3 = ">=1.19.5,<2.dev0" +jsonschema = ">=3.2,<5" +pydantic = ">=1.8,<3" +typing-extensions = ">=4.4" + +[package.extras] +dev = ["black (==24.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (>=0.1.0,<0.2.0)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] + [[package]] name = "babel" version = "2.16.0" @@ -462,6 +559,20 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "binaryornot" +version = "0.4.4" +description = "Ultra-lightweight pure Python package to check if a file is binary or text." +optional = false +python-versions = "*" +files = [ + {file = "binaryornot-0.4.4-py2.py3-none-any.whl", hash = "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4"}, + {file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"}, +] + +[package.dependencies] +chardet = ">=3.0.2" + [[package]] name = "black" version = "24.10.0" @@ -508,6 +619,17 @@ d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + [[package]] name = "blis" version = "0.7.11" @@ -593,6 +715,425 @@ s3transfer = ">=0.10.0,<0.11.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] +[[package]] +name = "boto3-stubs" +version = "1.34.123" +description = "Type annotations for boto3 1.34.123 generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3_stubs-1.34.123-py3-none-any.whl", hash = "sha256:dd7106a254a2e0cfb0c4f83a6bf3b8a63aa7817667517af5de55bc4c05ed19be"}, + {file = "boto3_stubs-1.34.123.tar.gz", hash = "sha256:2fdbbce40108c2d838e3e8efaf6f839c23d897167e8d6168daf3655d41aa66af"}, +] + +[package.dependencies] +botocore-stubs = "*" +mypy-boto3-apigateway = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"apigateway\""} +mypy-boto3-cloudformation = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"cloudformation\""} +mypy-boto3-ecr = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"ecr\""} +mypy-boto3-iam = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"iam\""} +mypy-boto3-kinesis = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"kinesis\""} +mypy-boto3-lambda = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"lambda\""} +mypy-boto3-s3 = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"s3\""} +mypy-boto3-schemas = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"schemas\""} +mypy-boto3-secretsmanager = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"secretsmanager\""} +mypy-boto3-signer = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"signer\""} +mypy-boto3-sqs = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"sqs\""} +mypy-boto3-stepfunctions = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"stepfunctions\""} +mypy-boto3-sts = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"sts\""} +mypy-boto3-xray = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"xray\""} +types-s3transfer = "*" +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[package.extras] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)"] +account = ["mypy-boto3-account (>=1.34.0,<1.35.0)"] +acm = ["mypy-boto3-acm (>=1.34.0,<1.35.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.34.0,<1.35.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)", "mypy-boto3-account (>=1.34.0,<1.35.0)", "mypy-boto3-acm (>=1.34.0,<1.35.0)", "mypy-boto3-acm-pca (>=1.34.0,<1.35.0)", "mypy-boto3-amp (>=1.34.0,<1.35.0)", "mypy-boto3-amplify (>=1.34.0,<1.35.0)", "mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)", "mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)", "mypy-boto3-apigateway (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)", "mypy-boto3-appconfig (>=1.34.0,<1.35.0)", "mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)", "mypy-boto3-appfabric (>=1.34.0,<1.35.0)", "mypy-boto3-appflow (>=1.34.0,<1.35.0)", "mypy-boto3-appintegrations (>=1.34.0,<1.35.0)", "mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-application-insights (>=1.34.0,<1.35.0)", "mypy-boto3-application-signals (>=1.34.0,<1.35.0)", "mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-appmesh (>=1.34.0,<1.35.0)", "mypy-boto3-apprunner (>=1.34.0,<1.35.0)", "mypy-boto3-appstream (>=1.34.0,<1.35.0)", "mypy-boto3-appsync (>=1.34.0,<1.35.0)", "mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)", "mypy-boto3-artifact (>=1.34.0,<1.35.0)", "mypy-boto3-athena (>=1.34.0,<1.35.0)", "mypy-boto3-auditmanager (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)", "mypy-boto3-b2bi (>=1.34.0,<1.35.0)", "mypy-boto3-backup (>=1.34.0,<1.35.0)", "mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)", "mypy-boto3-backupstorage (>=1.34.0,<1.35.0)", "mypy-boto3-batch (>=1.34.0,<1.35.0)", "mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-billingconductor (>=1.34.0,<1.35.0)", "mypy-boto3-braket (>=1.34.0,<1.35.0)", "mypy-boto3-budgets (>=1.34.0,<1.35.0)", "mypy-boto3-ce (>=1.34.0,<1.35.0)", "mypy-boto3-chatbot (>=1.34.0,<1.35.0)", "mypy-boto3-chime (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)", "mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)", "mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)", "mypy-boto3-cloud9 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)", "mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)", "mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)", "mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)", "mypy-boto3-codeartifact (>=1.34.0,<1.35.0)", "mypy-boto3-codebuild (>=1.34.0,<1.35.0)", "mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)", "mypy-boto3-codecommit (>=1.34.0,<1.35.0)", "mypy-boto3-codeconnections (>=1.34.0,<1.35.0)", "mypy-boto3-codedeploy (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)", "mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-codepipeline (>=1.34.0,<1.35.0)", "mypy-boto3-codestar (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)", "mypy-boto3-comprehend (>=1.34.0,<1.35.0)", "mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)", "mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)", "mypy-boto3-config (>=1.34.0,<1.35.0)", "mypy-boto3-connect (>=1.34.0,<1.35.0)", "mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)", "mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)", "mypy-boto3-connectcases (>=1.34.0,<1.35.0)", "mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)", "mypy-boto3-controlcatalog (>=1.34.0,<1.35.0)", "mypy-boto3-controltower (>=1.34.0,<1.35.0)", "mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)", "mypy-boto3-cur (>=1.34.0,<1.35.0)", "mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)", "mypy-boto3-databrew (>=1.34.0,<1.35.0)", "mypy-boto3-dataexchange (>=1.34.0,<1.35.0)", "mypy-boto3-datapipeline (>=1.34.0,<1.35.0)", "mypy-boto3-datasync (>=1.34.0,<1.35.0)", "mypy-boto3-datazone (>=1.34.0,<1.35.0)", "mypy-boto3-dax (>=1.34.0,<1.35.0)", "mypy-boto3-deadline (>=1.34.0,<1.35.0)", "mypy-boto3-detective (>=1.34.0,<1.35.0)", "mypy-boto3-devicefarm (>=1.34.0,<1.35.0)", "mypy-boto3-devops-guru (>=1.34.0,<1.35.0)", "mypy-boto3-directconnect (>=1.34.0,<1.35.0)", "mypy-boto3-discovery (>=1.34.0,<1.35.0)", "mypy-boto3-dlm (>=1.34.0,<1.35.0)", "mypy-boto3-dms (>=1.34.0,<1.35.0)", "mypy-boto3-docdb (>=1.34.0,<1.35.0)", "mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)", "mypy-boto3-drs (>=1.34.0,<1.35.0)", "mypy-boto3-ds (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)", "mypy-boto3-ebs (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)", "mypy-boto3-ecr (>=1.34.0,<1.35.0)", "mypy-boto3-ecr-public (>=1.34.0,<1.35.0)", "mypy-boto3-ecs (>=1.34.0,<1.35.0)", "mypy-boto3-efs (>=1.34.0,<1.35.0)", "mypy-boto3-eks (>=1.34.0,<1.35.0)", "mypy-boto3-eks-auth (>=1.34.0,<1.35.0)", "mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)", "mypy-boto3-elasticache (>=1.34.0,<1.35.0)", "mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)", "mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)", "mypy-boto3-elb (>=1.34.0,<1.35.0)", "mypy-boto3-elbv2 (>=1.34.0,<1.35.0)", "mypy-boto3-emr (>=1.34.0,<1.35.0)", "mypy-boto3-emr-containers (>=1.34.0,<1.35.0)", "mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-entityresolution (>=1.34.0,<1.35.0)", "mypy-boto3-es (>=1.34.0,<1.35.0)", "mypy-boto3-events (>=1.34.0,<1.35.0)", "mypy-boto3-evidently (>=1.34.0,<1.35.0)", "mypy-boto3-finspace (>=1.34.0,<1.35.0)", "mypy-boto3-finspace-data (>=1.34.0,<1.35.0)", "mypy-boto3-firehose (>=1.34.0,<1.35.0)", "mypy-boto3-fis (>=1.34.0,<1.35.0)", "mypy-boto3-fms (>=1.34.0,<1.35.0)", "mypy-boto3-forecast (>=1.34.0,<1.35.0)", "mypy-boto3-forecastquery (>=1.34.0,<1.35.0)", "mypy-boto3-frauddetector (>=1.34.0,<1.35.0)", "mypy-boto3-freetier (>=1.34.0,<1.35.0)", "mypy-boto3-fsx (>=1.34.0,<1.35.0)", "mypy-boto3-gamelift (>=1.34.0,<1.35.0)", "mypy-boto3-glacier (>=1.34.0,<1.35.0)", "mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)", "mypy-boto3-glue (>=1.34.0,<1.35.0)", "mypy-boto3-grafana (>=1.34.0,<1.35.0)", "mypy-boto3-greengrass (>=1.34.0,<1.35.0)", "mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)", "mypy-boto3-groundstation (>=1.34.0,<1.35.0)", "mypy-boto3-guardduty (>=1.34.0,<1.35.0)", "mypy-boto3-health (>=1.34.0,<1.35.0)", "mypy-boto3-healthlake (>=1.34.0,<1.35.0)", "mypy-boto3-iam (>=1.34.0,<1.35.0)", "mypy-boto3-identitystore (>=1.34.0,<1.35.0)", "mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)", "mypy-boto3-importexport (>=1.34.0,<1.35.0)", "mypy-boto3-inspector (>=1.34.0,<1.35.0)", "mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)", "mypy-boto3-inspector2 (>=1.34.0,<1.35.0)", "mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-iot (>=1.34.0,<1.35.0)", "mypy-boto3-iot-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)", "mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)", "mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)", "mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)", "mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)", "mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)", "mypy-boto3-iotwireless (>=1.34.0,<1.35.0)", "mypy-boto3-ivs (>=1.34.0,<1.35.0)", "mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)", "mypy-boto3-ivschat (>=1.34.0,<1.35.0)", "mypy-boto3-kafka (>=1.34.0,<1.35.0)", "mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-kendra (>=1.34.0,<1.35.0)", "mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)", "mypy-boto3-keyspaces (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)", "mypy-boto3-kms (>=1.34.0,<1.35.0)", "mypy-boto3-lakeformation (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)", "mypy-boto3-lex-models (>=1.34.0,<1.35.0)", "mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-lightsail (>=1.34.0,<1.35.0)", "mypy-boto3-location (>=1.34.0,<1.35.0)", "mypy-boto3-logs (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)", "mypy-boto3-m2 (>=1.34.0,<1.35.0)", "mypy-boto3-machinelearning (>=1.34.0,<1.35.0)", "mypy-boto3-macie2 (>=1.34.0,<1.35.0)", "mypy-boto3-mailmanager (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)", "mypy-boto3-medialive (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)", "mypy-boto3-mediatailor (>=1.34.0,<1.35.0)", "mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)", "mypy-boto3-memorydb (>=1.34.0,<1.35.0)", "mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)", "mypy-boto3-mgh (>=1.34.0,<1.35.0)", "mypy-boto3-mgn (>=1.34.0,<1.35.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)", "mypy-boto3-mobile (>=1.34.0,<1.35.0)", "mypy-boto3-mq (>=1.34.0,<1.35.0)", "mypy-boto3-mturk (>=1.34.0,<1.35.0)", "mypy-boto3-mwaa (>=1.34.0,<1.35.0)", "mypy-boto3-neptune (>=1.34.0,<1.35.0)", "mypy-boto3-neptune-graph (>=1.34.0,<1.35.0)", "mypy-boto3-neptunedata (>=1.34.0,<1.35.0)", "mypy-boto3-network-firewall (>=1.34.0,<1.35.0)", "mypy-boto3-networkmanager (>=1.34.0,<1.35.0)", "mypy-boto3-networkmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-nimble (>=1.34.0,<1.35.0)", "mypy-boto3-oam (>=1.34.0,<1.35.0)", "mypy-boto3-omics (>=1.34.0,<1.35.0)", "mypy-boto3-opensearch (>=1.34.0,<1.35.0)", "mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)", "mypy-boto3-opsworks (>=1.34.0,<1.35.0)", "mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)", "mypy-boto3-organizations (>=1.34.0,<1.35.0)", "mypy-boto3-osis (>=1.34.0,<1.35.0)", "mypy-boto3-outposts (>=1.34.0,<1.35.0)", "mypy-boto3-panorama (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)", "mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)", "mypy-boto3-personalize (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-events (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-pi (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)", "mypy-boto3-pipes (>=1.34.0,<1.35.0)", "mypy-boto3-polly (>=1.34.0,<1.35.0)", "mypy-boto3-pricing (>=1.34.0,<1.35.0)", "mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)", "mypy-boto3-proton (>=1.34.0,<1.35.0)", "mypy-boto3-qbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-qconnect (>=1.34.0,<1.35.0)", "mypy-boto3-qldb (>=1.34.0,<1.35.0)", "mypy-boto3-qldb-session (>=1.34.0,<1.35.0)", "mypy-boto3-quicksight (>=1.34.0,<1.35.0)", "mypy-boto3-ram (>=1.34.0,<1.35.0)", "mypy-boto3-rbin (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-rds-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-rekognition (>=1.34.0,<1.35.0)", "mypy-boto3-repostspace (>=1.34.0,<1.35.0)", "mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)", "mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)", "mypy-boto3-resource-groups (>=1.34.0,<1.35.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)", "mypy-boto3-robomaker (>=1.34.0,<1.35.0)", "mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)", "mypy-boto3-route53 (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)", "mypy-boto3-route53domains (>=1.34.0,<1.35.0)", "mypy-boto3-route53profiles (>=1.34.0,<1.35.0)", "mypy-boto3-route53resolver (>=1.34.0,<1.35.0)", "mypy-boto3-rum (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-s3control (>=1.34.0,<1.35.0)", "mypy-boto3-s3outposts (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-savingsplans (>=1.34.0,<1.35.0)", "mypy-boto3-scheduler (>=1.34.0,<1.35.0)", "mypy-boto3-schemas (>=1.34.0,<1.35.0)", "mypy-boto3-sdb (>=1.34.0,<1.35.0)", "mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)", "mypy-boto3-securityhub (>=1.34.0,<1.35.0)", "mypy-boto3-securitylake (>=1.34.0,<1.35.0)", "mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)", "mypy-boto3-service-quotas (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)", "mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)", "mypy-boto3-ses (>=1.34.0,<1.35.0)", "mypy-boto3-sesv2 (>=1.34.0,<1.35.0)", "mypy-boto3-shield (>=1.34.0,<1.35.0)", "mypy-boto3-signer (>=1.34.0,<1.35.0)", "mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)", "mypy-boto3-sms (>=1.34.0,<1.35.0)", "mypy-boto3-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)", "mypy-boto3-snowball (>=1.34.0,<1.35.0)", "mypy-boto3-sns (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)", "mypy-boto3-ssm (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)", "mypy-boto3-sso (>=1.34.0,<1.35.0)", "mypy-boto3-sso-admin (>=1.34.0,<1.35.0)", "mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)", "mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)", "mypy-boto3-storagegateway (>=1.34.0,<1.35.0)", "mypy-boto3-sts (>=1.34.0,<1.35.0)", "mypy-boto3-supplychain (>=1.34.0,<1.35.0)", "mypy-boto3-support (>=1.34.0,<1.35.0)", "mypy-boto3-support-app (>=1.34.0,<1.35.0)", "mypy-boto3-swf (>=1.34.0,<1.35.0)", "mypy-boto3-synthetics (>=1.34.0,<1.35.0)", "mypy-boto3-taxsettings (>=1.34.0,<1.35.0)", "mypy-boto3-textract (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-influxdb (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-query (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-write (>=1.34.0,<1.35.0)", "mypy-boto3-tnb (>=1.34.0,<1.35.0)", "mypy-boto3-transcribe (>=1.34.0,<1.35.0)", "mypy-boto3-transfer (>=1.34.0,<1.35.0)", "mypy-boto3-translate (>=1.34.0,<1.35.0)", "mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)", "mypy-boto3-voice-id (>=1.34.0,<1.35.0)", "mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)", "mypy-boto3-waf (>=1.34.0,<1.35.0)", "mypy-boto3-waf-regional (>=1.34.0,<1.35.0)", "mypy-boto3-wafv2 (>=1.34.0,<1.35.0)", "mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)", "mypy-boto3-wisdom (>=1.34.0,<1.35.0)", "mypy-boto3-workdocs (>=1.34.0,<1.35.0)", "mypy-boto3-worklink (>=1.34.0,<1.35.0)", "mypy-boto3-workmail (>=1.34.0,<1.35.0)", "mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)", "mypy-boto3-xray (>=1.34.0,<1.35.0)"] +amp = ["mypy-boto3-amp (>=1.34.0,<1.35.0)"] +amplify = ["mypy-boto3-amplify (>=1.34.0,<1.35.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.34.0,<1.35.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.34.0,<1.35.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.34.0,<1.35.0)"] +appflow = ["mypy-boto3-appflow (>=1.34.0,<1.35.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.34.0,<1.35.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.34.0,<1.35.0)"] +application-signals = ["mypy-boto3-application-signals (>=1.34.0,<1.35.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.34.0,<1.35.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.34.0,<1.35.0)"] +appstream = ["mypy-boto3-appstream (>=1.34.0,<1.35.0)"] +appsync = ["mypy-boto3-appsync (>=1.34.0,<1.35.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)"] +artifact = ["mypy-boto3-artifact (>=1.34.0,<1.35.0)"] +athena = ["mypy-boto3-athena (>=1.34.0,<1.35.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.34.0,<1.35.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.34.0,<1.35.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)"] +b2bi = ["mypy-boto3-b2bi (>=1.34.0,<1.35.0)"] +backup = ["mypy-boto3-backup (>=1.34.0,<1.35.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.34.0,<1.35.0)"] +batch = ["mypy-boto3-batch (>=1.34.0,<1.35.0)"] +bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.34.0,<1.35.0)"] +bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)"] +bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.34.0,<1.35.0)"] +boto3 = ["boto3 (==1.34.123)", "botocore (==1.34.123)"] +braket = ["mypy-boto3-braket (>=1.34.0,<1.35.0)"] +budgets = ["mypy-boto3-budgets (>=1.34.0,<1.35.0)"] +ce = ["mypy-boto3-ce (>=1.34.0,<1.35.0)"] +chatbot = ["mypy-boto3-chatbot (>=1.34.0,<1.35.0)"] +chime = ["mypy-boto3-chime (>=1.34.0,<1.35.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)"] +cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.34.0,<1.35.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.34.0,<1.35.0)"] +cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.34.0,<1.35.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.34.0,<1.35.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.34.0,<1.35.0)"] +codeconnections = ["mypy-boto3-codeconnections (>=1.34.0,<1.35.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.34.0,<1.35.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.34.0,<1.35.0)"] +codestar = ["mypy-boto3-codestar (>=1.34.0,<1.35.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.34.0,<1.35.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)"] +config = ["mypy-boto3-config (>=1.34.0,<1.35.0)"] +connect = ["mypy-boto3-connect (>=1.34.0,<1.35.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.34.0,<1.35.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)"] +controlcatalog = ["mypy-boto3-controlcatalog (>=1.34.0,<1.35.0)"] +controltower = ["mypy-boto3-controltower (>=1.34.0,<1.35.0)"] +cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)"] +cur = ["mypy-boto3-cur (>=1.34.0,<1.35.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)"] +databrew = ["mypy-boto3-databrew (>=1.34.0,<1.35.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.34.0,<1.35.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.34.0,<1.35.0)"] +datasync = ["mypy-boto3-datasync (>=1.34.0,<1.35.0)"] +datazone = ["mypy-boto3-datazone (>=1.34.0,<1.35.0)"] +dax = ["mypy-boto3-dax (>=1.34.0,<1.35.0)"] +deadline = ["mypy-boto3-deadline (>=1.34.0,<1.35.0)"] +detective = ["mypy-boto3-detective (>=1.34.0,<1.35.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.34.0,<1.35.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.34.0,<1.35.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.34.0,<1.35.0)"] +discovery = ["mypy-boto3-discovery (>=1.34.0,<1.35.0)"] +dlm = ["mypy-boto3-dlm (>=1.34.0,<1.35.0)"] +dms = ["mypy-boto3-dms (>=1.34.0,<1.35.0)"] +docdb = ["mypy-boto3-docdb (>=1.34.0,<1.35.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)"] +drs = ["mypy-boto3-drs (>=1.34.0,<1.35.0)"] +ds = ["mypy-boto3-ds (>=1.34.0,<1.35.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.34.0,<1.35.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)"] +ebs = ["mypy-boto3-ebs (>=1.34.0,<1.35.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.34.0,<1.35.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)"] +ecr = ["mypy-boto3-ecr (>=1.34.0,<1.35.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.34.0,<1.35.0)"] +ecs = ["mypy-boto3-ecs (>=1.34.0,<1.35.0)"] +efs = ["mypy-boto3-efs (>=1.34.0,<1.35.0)"] +eks = ["mypy-boto3-eks (>=1.34.0,<1.35.0)"] +eks-auth = ["mypy-boto3-eks-auth (>=1.34.0,<1.35.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.34.0,<1.35.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)"] +elb = ["mypy-boto3-elb (>=1.34.0,<1.35.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.34.0,<1.35.0)"] +emr = ["mypy-boto3-emr (>=1.34.0,<1.35.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.34.0,<1.35.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.34.0,<1.35.0)"] +es = ["mypy-boto3-es (>=1.34.0,<1.35.0)"] +essential = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +events = ["mypy-boto3-events (>=1.34.0,<1.35.0)"] +evidently = ["mypy-boto3-evidently (>=1.34.0,<1.35.0)"] +finspace = ["mypy-boto3-finspace (>=1.34.0,<1.35.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.34.0,<1.35.0)"] +firehose = ["mypy-boto3-firehose (>=1.34.0,<1.35.0)"] +fis = ["mypy-boto3-fis (>=1.34.0,<1.35.0)"] +fms = ["mypy-boto3-fms (>=1.34.0,<1.35.0)"] +forecast = ["mypy-boto3-forecast (>=1.34.0,<1.35.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.34.0,<1.35.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.34.0,<1.35.0)"] +freetier = ["mypy-boto3-freetier (>=1.34.0,<1.35.0)"] +fsx = ["mypy-boto3-fsx (>=1.34.0,<1.35.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.34.0,<1.35.0)"] +glacier = ["mypy-boto3-glacier (>=1.34.0,<1.35.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)"] +glue = ["mypy-boto3-glue (>=1.34.0,<1.35.0)"] +grafana = ["mypy-boto3-grafana (>=1.34.0,<1.35.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.34.0,<1.35.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.34.0,<1.35.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.34.0,<1.35.0)"] +health = ["mypy-boto3-health (>=1.34.0,<1.35.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.34.0,<1.35.0)"] +iam = ["mypy-boto3-iam (>=1.34.0,<1.35.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.34.0,<1.35.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)"] +importexport = ["mypy-boto3-importexport (>=1.34.0,<1.35.0)"] +inspector = ["mypy-boto3-inspector (>=1.34.0,<1.35.0)"] +inspector-scan = ["mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.34.0,<1.35.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)"] +iot = ["mypy-boto3-iot (>=1.34.0,<1.35.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.34.0,<1.35.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.34.0,<1.35.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.34.0,<1.35.0)"] +ivs = ["mypy-boto3-ivs (>=1.34.0,<1.35.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.34.0,<1.35.0)"] +kafka = ["mypy-boto3-kafka (>=1.34.0,<1.35.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)"] +kendra = ["mypy-boto3-kendra (>=1.34.0,<1.35.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.34.0,<1.35.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.34.0,<1.35.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)"] +kms = ["mypy-boto3-kms (>=1.34.0,<1.35.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.34.0,<1.35.0)"] +lambda = ["mypy-boto3-lambda (>=1.34.0,<1.35.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.34.0,<1.35.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.34.0,<1.35.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.34.0,<1.35.0)"] +location = ["mypy-boto3-location (>=1.34.0,<1.35.0)"] +logs = ["mypy-boto3-logs (>=1.34.0,<1.35.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)"] +m2 = ["mypy-boto3-m2 (>=1.34.0,<1.35.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.34.0,<1.35.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.34.0,<1.35.0)"] +mailmanager = ["mypy-boto3-mailmanager (>=1.34.0,<1.35.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)"] +marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)"] +marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)"] +medialive = ["mypy-boto3-medialive (>=1.34.0,<1.35.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.34.0,<1.35.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.34.0,<1.35.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.34.0,<1.35.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.34.0,<1.35.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)"] +mgh = ["mypy-boto3-mgh (>=1.34.0,<1.35.0)"] +mgn = ["mypy-boto3-mgn (>=1.34.0,<1.35.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)"] +mobile = ["mypy-boto3-mobile (>=1.34.0,<1.35.0)"] +mq = ["mypy-boto3-mq (>=1.34.0,<1.35.0)"] +mturk = ["mypy-boto3-mturk (>=1.34.0,<1.35.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.34.0,<1.35.0)"] +neptune = ["mypy-boto3-neptune (>=1.34.0,<1.35.0)"] +neptune-graph = ["mypy-boto3-neptune-graph (>=1.34.0,<1.35.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.34.0,<1.35.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.34.0,<1.35.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.34.0,<1.35.0)"] +networkmonitor = ["mypy-boto3-networkmonitor (>=1.34.0,<1.35.0)"] +nimble = ["mypy-boto3-nimble (>=1.34.0,<1.35.0)"] +oam = ["mypy-boto3-oam (>=1.34.0,<1.35.0)"] +omics = ["mypy-boto3-omics (>=1.34.0,<1.35.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.34.0,<1.35.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.34.0,<1.35.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)"] +organizations = ["mypy-boto3-organizations (>=1.34.0,<1.35.0)"] +osis = ["mypy-boto3-osis (>=1.34.0,<1.35.0)"] +outposts = ["mypy-boto3-outposts (>=1.34.0,<1.35.0)"] +panorama = ["mypy-boto3-panorama (>=1.34.0,<1.35.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)"] +personalize = ["mypy-boto3-personalize (>=1.34.0,<1.35.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.34.0,<1.35.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)"] +pi = ["mypy-boto3-pi (>=1.34.0,<1.35.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.34.0,<1.35.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)"] +pipes = ["mypy-boto3-pipes (>=1.34.0,<1.35.0)"] +polly = ["mypy-boto3-polly (>=1.34.0,<1.35.0)"] +pricing = ["mypy-boto3-pricing (>=1.34.0,<1.35.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)"] +proton = ["mypy-boto3-proton (>=1.34.0,<1.35.0)"] +qbusiness = ["mypy-boto3-qbusiness (>=1.34.0,<1.35.0)"] +qconnect = ["mypy-boto3-qconnect (>=1.34.0,<1.35.0)"] +qldb = ["mypy-boto3-qldb (>=1.34.0,<1.35.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.34.0,<1.35.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.34.0,<1.35.0)"] +ram = ["mypy-boto3-ram (>=1.34.0,<1.35.0)"] +rbin = ["mypy-boto3-rbin (>=1.34.0,<1.35.0)"] +rds = ["mypy-boto3-rds (>=1.34.0,<1.35.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.34.0,<1.35.0)"] +redshift = ["mypy-boto3-redshift (>=1.34.0,<1.35.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.34.0,<1.35.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.34.0,<1.35.0)"] +repostspace = ["mypy-boto3-repostspace (>=1.34.0,<1.35.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.34.0,<1.35.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.34.0,<1.35.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)"] +route53 = ["mypy-boto3-route53 (>=1.34.0,<1.35.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.34.0,<1.35.0)"] +route53profiles = ["mypy-boto3-route53profiles (>=1.34.0,<1.35.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.34.0,<1.35.0)"] +rum = ["mypy-boto3-rum (>=1.34.0,<1.35.0)"] +s3 = ["mypy-boto3-s3 (>=1.34.0,<1.35.0)"] +s3control = ["mypy-boto3-s3control (>=1.34.0,<1.35.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.34.0,<1.35.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.34.0,<1.35.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.34.0,<1.35.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.34.0,<1.35.0)"] +schemas = ["mypy-boto3-schemas (>=1.34.0,<1.35.0)"] +sdb = ["mypy-boto3-sdb (>=1.34.0,<1.35.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.34.0,<1.35.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.34.0,<1.35.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.34.0,<1.35.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)"] +ses = ["mypy-boto3-ses (>=1.34.0,<1.35.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.34.0,<1.35.0)"] +shield = ["mypy-boto3-shield (>=1.34.0,<1.35.0)"] +signer = ["mypy-boto3-signer (>=1.34.0,<1.35.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)"] +sms = ["mypy-boto3-sms (>=1.34.0,<1.35.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.34.0,<1.35.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)"] +snowball = ["mypy-boto3-snowball (>=1.34.0,<1.35.0)"] +sns = ["mypy-boto3-sns (>=1.34.0,<1.35.0)"] +sqs = ["mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +ssm = ["mypy-boto3-ssm (>=1.34.0,<1.35.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)"] +sso = ["mypy-boto3-sso (>=1.34.0,<1.35.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.34.0,<1.35.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.34.0,<1.35.0)"] +sts = ["mypy-boto3-sts (>=1.34.0,<1.35.0)"] +supplychain = ["mypy-boto3-supplychain (>=1.34.0,<1.35.0)"] +support = ["mypy-boto3-support (>=1.34.0,<1.35.0)"] +support-app = ["mypy-boto3-support-app (>=1.34.0,<1.35.0)"] +swf = ["mypy-boto3-swf (>=1.34.0,<1.35.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.34.0,<1.35.0)"] +taxsettings = ["mypy-boto3-taxsettings (>=1.34.0,<1.35.0)"] +textract = ["mypy-boto3-textract (>=1.34.0,<1.35.0)"] +timestream-influxdb = ["mypy-boto3-timestream-influxdb (>=1.34.0,<1.35.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.34.0,<1.35.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.34.0,<1.35.0)"] +tnb = ["mypy-boto3-tnb (>=1.34.0,<1.35.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.34.0,<1.35.0)"] +transfer = ["mypy-boto3-transfer (>=1.34.0,<1.35.0)"] +translate = ["mypy-boto3-translate (>=1.34.0,<1.35.0)"] +trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.34.0,<1.35.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)"] +waf = ["mypy-boto3-waf (>=1.34.0,<1.35.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.34.0,<1.35.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.34.0,<1.35.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.34.0,<1.35.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.34.0,<1.35.0)"] +worklink = ["mypy-boto3-worklink (>=1.34.0,<1.35.0)"] +workmail = ["mypy-boto3-workmail (>=1.34.0,<1.35.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.34.0,<1.35.0)"] +workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)"] +xray = ["mypy-boto3-xray (>=1.34.0,<1.35.0)"] + [[package]] name = "botocore" version = "1.35.56" @@ -612,6 +1153,23 @@ urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version > [package.extras] crt = ["awscrt (==0.22.0)"] +[[package]] +name = "botocore-stubs" +version = "1.34.129" +description = "Type annotations and code completion for botocore" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "botocore_stubs-1.34.129-py3-none-any.whl", hash = "sha256:abb72262ff8ecb6a39215a51df81c885d2558211138f7fc7ab3e00f56e596367"}, + {file = "botocore_stubs-1.34.129.tar.gz", hash = "sha256:51bfcd000d9b69f164b9c64c6215189afc7a8609bc263f939abd10704cda87a4"}, +] + +[package.dependencies] +types-awscrt = "*" + +[package.extras] +botocore = ["botocore"] + [[package]] name = "bottle" version = "0.13.2" @@ -786,6 +1344,29 @@ files = [ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] +[[package]] +name = "cfn-lint" +version = "0.87.7" +description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" +optional = false +python-versions = "!=4.0,<=4.0,>=3.8" +files = [ + {file = "cfn_lint-0.87.7-py3-none-any.whl", hash = "sha256:ac6ac86dde1ba3d0fb0e217a4d329239a98f00af7862e4fa5ace6c416c4e056c"}, + {file = "cfn_lint-0.87.7.tar.gz", hash = "sha256:85f6b7f32cf155a74d670d53f86b39f99cfc282b02158d98fdab9fc1dba0809e"}, +] + +[package.dependencies] +aws-sam-translator = ">=1.89.0" +jschema-to-python = ">=1.2.3,<1.3.0" +jsonpatch = "*" +jsonschema = ">=3.0,<5" +junit-xml = ">=1.9,<2.0" +networkx = ">=2.4,<4" +pyyaml = ">5.4" +regex = ">=2021.7.1" +sarif-om = ">=1.0.4,<1.1.0" +sympy = ">=1.0.0" + [[package]] name = "chardet" version = "5.2.0" @@ -911,6 +1492,17 @@ files = [ {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] +[[package]] +name = "chevron" +version = "0.14.0" +description = "Mustache templating language renderer" +optional = false +python-versions = "*" +files = [ + {file = "chevron-0.14.0-py3-none-any.whl", hash = "sha256:fbf996a709f8da2e745ef763f482ce2d311aa817d287593a5b990d6d6e4f0443"}, + {file = "chevron-0.14.0.tar.gz", hash = "sha256:87613aafdf6d77b6a90ff073165a61ae5086e21ad49057aa0e53681601800ebf"}, +] + [[package]] name = "click" version = "8.1.7" @@ -1121,40 +1713,66 @@ mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pil test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] +[[package]] +name = "cookiecutter" +version = "2.6.0" +description = "A command-line utility that creates projects from project templates, e.g. creating a Python package project from a Python package project template." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cookiecutter-2.6.0-py3-none-any.whl", hash = "sha256:a54a8e37995e4ed963b3e82831072d1ad4b005af736bb17b99c2cbd9d41b6e2d"}, + {file = "cookiecutter-2.6.0.tar.gz", hash = "sha256:db21f8169ea4f4fdc2408d48ca44859349de2647fbe494a9d6c3edfc0542c21c"}, +] + +[package.dependencies] +arrow = "*" +binaryornot = ">=0.4.4" +click = ">=7.0,<9.0.0" +Jinja2 = ">=2.7,<4.0.0" +python-slugify = ">=4.0.0" +pyyaml = ">=5.3.1" +requests = ">=2.23.0" +rich = "*" + [[package]] name = "cryptography" -version = "43.0.3" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, - {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, - {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, - {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, - {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, - {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, - {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, - {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, - {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, - {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, - {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] @@ -1167,7 +1785,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -1362,6 +1980,28 @@ files = [ {file = "cython-3.0.11.tar.gz", hash = "sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff"}, ] +[[package]] +name = "dateparser" +version = "1.2.0" +description = "Date parsing library designed to parse dates from HTML pages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"}, + {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"}, +] + +[package.dependencies] +python-dateutil = "*" +pytz = "*" +regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27" +tzlocal = "*" + +[package.extras] +calendars = ["convertdate", "hijri-converter"] +fasttext = ["fasttext"] +langdetect = ["langdetect"] + [[package]] name = "decorator" version = "4.4.2" @@ -1462,6 +2102,28 @@ idna = ["idna (>=3.7)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + [[package]] name = "docopt" version = "0.6.2" @@ -1747,6 +2409,28 @@ files = [ flake8 = ">=3" pydocstyle = ">=2.1" +[[package]] +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + [[package]] name = "flatbuffers" version = "24.3.25" @@ -2769,6 +3453,17 @@ files = [ [package.extras] colors = ["colorama (>=0.4.6)"] +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + [[package]] name = "jedi" version = "0.19.1" @@ -2909,6 +3604,112 @@ files = [ {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, ] +[[package]] +name = "jschema-to-python" +version = "1.2.3" +description = "Generate source code for Python classes from a JSON schema." +optional = false +python-versions = ">= 2.7" +files = [ + {file = "jschema_to_python-1.2.3-py3-none-any.whl", hash = "sha256:8a703ca7604d42d74b2815eecf99a33359a8dccbb80806cce386d5e2dd992b05"}, + {file = "jschema_to_python-1.2.3.tar.gz", hash = "sha256:76ff14fe5d304708ccad1284e4b11f96a658949a31ee7faed9e0995279549b91"}, +] + +[package.dependencies] +attrs = "*" +jsonpickle = "*" +pbr = "*" + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpickle" +version = "3.2.1" +description = "Python library for serializing arbitrary object graphs into JSON" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpickle-3.2.1-py3-none-any.whl", hash = "sha256:ec291e4719674dd35d390fbdb521ac6517fbe9f541d361c8bffc8131133b1661"}, + {file = "jsonpickle-3.2.1.tar.gz", hash = "sha256:4b6d7640974199f7acf9035295365b5a1a71a91109effa15ba170fbb48cf871c"}, +] + +[package.extras] +docs = ["furo", "rst.linker (>=1.9)", "sphinx"] +packaging = ["build", "twine"] +testing = ["bson", "ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-benchmark", "pytest-benchmark[histogram]", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-ruff (>=0.2.1)", "scikit-learn", "scipy", "scipy (>=1.9.3)", "simplejson", "sqlalchemy", "ujson"] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + +[[package]] +name = "jsonschema" +version = "4.22.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, + {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "junit-xml" +version = "1.9" +description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" +optional = false +python-versions = "*" +files = [ + {file = "junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f"}, + {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, +] + +[package.dependencies] +six = "*" + [[package]] name = "kiwisolver" version = "1.4.7" @@ -3957,6 +4758,202 @@ files = [ {file = "murmurhash-1.0.10.tar.gz", hash = "sha256:5282aab1317804c6ebd6dd7f69f15ba9075aee671c44a34be2bde0f1b11ef88a"}, ] +[[package]] +name = "mypy-boto3-apigateway" +version = "1.34.56" +description = "Type annotations for boto3.APIGateway 1.34.56 service generated with mypy-boto3-builder 7.23.2" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-boto3-apigateway-1.34.56.tar.gz", hash = "sha256:5ab15db30d730198384d6445d50cc1fec98361b85366dd092b10e9b9f4a1a2cb"}, + {file = "mypy_boto3_apigateway-1.34.56-py3-none-any.whl", hash = "sha256:3695503bd9388b3067c1dc1dc4ec1486dcec450dbf1e9889f451120e333b92da"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-cloudformation" +version = "1.34.111" +description = "Type annotations for boto3.CloudFormation 1.34.111 service generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_cloudformation-1.34.111-py3-none-any.whl", hash = "sha256:526e928c504fa2880b1774aa10629a04fe0ec70ed2864ab3d3f7772386a1a925"}, + {file = "mypy_boto3_cloudformation-1.34.111.tar.gz", hash = "sha256:a02e201d1a9d9a8fb4db5b942d5c537a4e8861c611f0d986126674ac557cb9e8"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-ecr" +version = "1.34.101" +description = "Type annotations for boto3.ECR 1.34.101 service generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_ecr-1.34.101-py3-none-any.whl", hash = "sha256:723a3e6b65ec7451cc7c40232eb94e500fa51a77e360a52e028388e2b9afd161"}, + {file = "mypy_boto3_ecr-1.34.101.tar.gz", hash = "sha256:e9acb57088daa41b3fb378f8bb966ca4a4f1b62edb9a2483de582bcf9486a4fc"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-iam" +version = "1.34.83" +description = "Type annotations for boto3.IAM 1.34.83 service generated with mypy-boto3-builder 7.23.2" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-boto3-iam-1.34.83.tar.gz", hash = "sha256:7261315616757ebf7509df0e9b091d5942e470eb51c4b23c662a06873a9a8eca"}, + {file = "mypy_boto3_iam-1.34.83-py3-none-any.whl", hash = "sha256:dec66a98e29ec1e36178c24b8ff57aab6b91230df97557363bbd90ec06874768"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-kinesis" +version = "1.34.0" +description = "Type annotations for boto3.Kinesis 1.34.0 service generated with mypy-boto3-builder 7.21.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-kinesis-1.34.0.tar.gz", hash = "sha256:f404e75badd5977e9f09741b769b8888854bdd411c631344686ab889efe98741"}, + {file = "mypy_boto3_kinesis-1.34.0-py3-none-any.whl", hash = "sha256:1add81c53f6e36599e0c22e142024867759aaf2f4954bcb2baa21c284c852377"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-lambda" +version = "1.34.77" +description = "Type annotations for boto3.Lambda 1.34.77 service generated with mypy-boto3-builder 7.23.2" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-boto3-lambda-1.34.77.tar.gz", hash = "sha256:7b81d2a5604fb592e92fe0b284ecd259de071703360a33b71c9b54df46d81c9c"}, + {file = "mypy_boto3_lambda-1.34.77-py3-none-any.whl", hash = "sha256:e21022d2eef12aa731af80790410afdba9412b056339823252813bae2adbf553"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-s3" +version = "1.34.120" +description = "Type annotations for boto3.S3 1.34.120 service generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_s3-1.34.120-py3-none-any.whl", hash = "sha256:b123335d41882c5c955d24a09ff452ee836f24fb6dbc2f32654478580990aca1"}, + {file = "mypy_boto3_s3-1.34.120.tar.gz", hash = "sha256:d508a7bca6cc1100b2d4c8fc7dc9a0a71f3b2a275338191a0eac161c904ca7bc"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-schemas" +version = "1.34.0" +description = "Type annotations for boto3.Schemas 1.34.0 service generated with mypy-boto3-builder 7.21.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-schemas-1.34.0.tar.gz", hash = "sha256:3b25a71944192b0980c3bb5132deb7c06ee9b88580ed63f257fad97cf3bf2927"}, + {file = "mypy_boto3_schemas-1.34.0-py3-none-any.whl", hash = "sha256:28c016609dcffc606bd8425ee92894c46e943ab77033e1ae46481f00c39b7f75"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-secretsmanager" +version = "1.34.128" +description = "Type annotations for boto3.SecretsManager 1.34.128 service generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_secretsmanager-1.34.128-py3-none-any.whl", hash = "sha256:7ce9815d116fa1749971691355b1e1c8f462d46e7eaa9d84133b8db96dd3515f"}, + {file = "mypy_boto3_secretsmanager-1.34.128.tar.gz", hash = "sha256:ae2b398efa1a32214c3eddb6901efa67cfc24a893b113f549a06bb70bb43b402"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-signer" +version = "1.34.95" +description = "Type annotations for boto3.Signer 1.34.95 service generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_signer-1.34.95-py3-none-any.whl", hash = "sha256:2c8104508f975606d116cf5148333eeaa4e268eb8134afce2b3e41e580ebafc6"}, + {file = "mypy_boto3_signer-1.34.95.tar.gz", hash = "sha256:0118854f1664f7e27e03f5ce2fea3344cc4985fa6ed15a0d5c7a7146b4a4326f"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-sqs" +version = "1.34.121" +description = "Type annotations for boto3.SQS 1.34.121 service generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_sqs-1.34.121-py3-none-any.whl", hash = "sha256:e92aefacfa08e7094b79002576ef261e4075f5af9c25219fc47fb8452f53fc5f"}, + {file = "mypy_boto3_sqs-1.34.121.tar.gz", hash = "sha256:bdbc623235ffc8127cb8753f49323f74a919df552247b0b2caaf85cf9bb495b8"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-stepfunctions" +version = "1.34.92" +description = "Type annotations for boto3.SFN 1.34.92 service generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_stepfunctions-1.34.92-py3-none-any.whl", hash = "sha256:e92da1c245ef29b8ed784e5a70fcaef63ac754fa73de614324c30e4562bec8fd"}, + {file = "mypy_boto3_stepfunctions-1.34.92.tar.gz", hash = "sha256:499a5de2f854e5ecf45fee00a5ec20478b4130af1d9d61721e176a787970394c"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-sts" +version = "1.34.0" +description = "Type annotations for boto3.STS 1.34.0 service generated with mypy-boto3-builder 7.21.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-sts-1.34.0.tar.gz", hash = "sha256:b347e0a336d60162dd94074d9d10f614f2b09a455c9b42415850d54d676e2067"}, + {file = "mypy_boto3_sts-1.34.0-py3-none-any.whl", hash = "sha256:3ba1875c2792b4f35fd918dca957ce09ad197beb7cfbda61f42144ffa9cda05d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-xray" +version = "1.34.0" +description = "Type annotations for boto3.XRay 1.34.0 service generated with mypy-boto3-builder 7.21.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-xray-1.34.0.tar.gz", hash = "sha256:f30785798022b7f0c114e851790af9b92cb4026ed28757e962d30fb4391af8e2"}, + {file = "mypy_boto3_xray-1.34.0-py3-none-any.whl", hash = "sha256:742de92c57efc3e14ef27d9a5bfd2f528f095acf11ff4198be2cba6bfee4c7a1"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -4460,6 +5457,17 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +[[package]] +name = "pbr" +version = "6.0.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + [[package]] name = "pefile" version = "2023.2.7" @@ -5778,17 +6786,17 @@ pyobjc-framework-Cocoa = ">=10.3.1" [[package]] name = "pyopenssl" -version = "24.2.1" +version = "24.1.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, - {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, + {file = "pyOpenSSL-24.1.0-py3-none-any.whl", hash = "sha256:17ed5be5936449c5418d1cd269a1a9e9081bc54c17aed272b45856a3d3dc86ad"}, + {file = "pyOpenSSL-24.1.0.tar.gz", hash = "sha256:cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"}, ] [package.dependencies] -cryptography = ">=41.0.5,<44" +cryptography = ">=41.0.5,<43" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] @@ -6153,6 +7161,23 @@ files = [ {file = "python_multipart-0.0.17.tar.gz", hash = "sha256:41330d831cae6e2f22902704ead2826ea038d0419530eadff3ea80175aec5538"}, ] +[[package]] +name = "python-slugify" +version = "8.0.4" +description = "A Python slugify application that also handles Unicode" +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856"}, + {file = "python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8"}, +] + +[package.dependencies] +text-unidecode = ">=1.3" + +[package.extras] +unidecode = ["Unidecode (>=1.1.1)"] + [[package]] name = "python-xlib" version = "0.33" @@ -6525,6 +7550,21 @@ PyYAML = "*" Shapely = ">=1.7.1" six = ">=1.15.0" +[[package]] +name = "referencing" +version = "0.35.1" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "regex" version = "2024.11.6" @@ -6685,23 +7725,121 @@ requests = ">=1.0.0" [[package]] name = "rich" -version = "13.9.4" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.7.0" files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "rpds-py" +version = "0.21.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, + {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, + {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, + {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, + {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, + {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, + {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, + {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, + {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, + {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, + {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, + {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, + {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, + {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, + {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, + {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, + {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, + {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, + {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, + {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, + {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, +] + [[package]] name = "rsa" version = "4.9" @@ -6716,6 +7854,83 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + [[package]] name = "s3transfer" version = "0.10.3" @@ -6733,6 +7948,21 @@ botocore = ">=1.33.2,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] +[[package]] +name = "sarif-om" +version = "1.0.4" +description = "Classes implementing the SARIF 2.1.0 object model." +optional = false +python-versions = ">= 2.7" +files = [ + {file = "sarif_om-1.0.4-py3-none-any.whl", hash = "sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911"}, + {file = "sarif_om-1.0.4.tar.gz", hash = "sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98"}, +] + +[package.dependencies] +attrs = "*" +pbr = "*" + [[package]] name = "scikit-image" version = "0.24.0" @@ -7842,6 +9072,17 @@ files = [ [package.extras] tests = ["pytest", "pytest-cov"] +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +optional = false +python-versions = "*" +files = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] + [[package]] name = "thinc" version = "8.2.5" @@ -8099,13 +9340,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.13.2" +version = "0.12.5" description = "Style preserving TOML library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, - {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, + {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, + {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, ] [[package]] @@ -8399,6 +9640,39 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" +[[package]] +name = "types-awscrt" +version = "0.20.12" +description = "Type annotations and code completion for awscrt" +optional = false +python-versions = "<4.0,>=3.7" +files = [ + {file = "types_awscrt-0.20.12-py3-none-any.whl", hash = "sha256:521ce54cc4dad9fe6480556bb0f8315a508106938ba1f2a0baccfcea7d4a4dee"}, + {file = "types_awscrt-0.20.12.tar.gz", hash = "sha256:0beabdde0205dc1da679ea464fd3f98b570ef4f0fc825b155a974fb51b21e8d9"}, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20240316" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, + {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, +] + +[[package]] +name = "types-s3transfer" +version = "0.10.1" +description = "Type annotations and code completion for s3transfer" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "types_s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:49a7c81fa609ac1532f8de3756e64b58afcecad8767933310228002ec7adff74"}, + {file = "types_s3transfer-0.10.1.tar.gz", hash = "sha256:02154cce46528287ad76ad1a0153840e0492239a0887e8833466eccf84b98da0"}, +] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -8421,6 +9695,23 @@ files = [ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "ultralytics" version = "8.3.28" @@ -8657,6 +9948,50 @@ files = [ [package.dependencies] colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\" and python_version >= \"3.7\""} +[[package]] +name = "watchdog" +version = "4.0.1" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, + {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, + {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, + {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, + {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + [[package]] name = "watchfiles" version = "0.24.0" @@ -8864,6 +10199,23 @@ files = [ {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"}, ] +[[package]] +name = "werkzeug" +version = "3.0.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + [[package]] name = "wheel" version = "0.43.0" @@ -9124,4 +10476,4 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "3.10.x" -content-hash = "fbb9a8c0ac03708a131f06d1d3f7086d7718dacbf03d199b70e2df76e23640dd" +content-hash = "3856f7e8c4f57246e194f0d3bfb50a3041b4784023d08fc43dfc735b7cf73649" diff --git a/pyproject.toml b/pyproject.toml index 5a5b33795..abaa3cb79 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,6 +109,10 @@ tokencost = "^0.1.12" numba = "^0.60.0" llvmlite = "^0.43.0" ell-ai = "^0.0.14" + +[tool.poetry.group.dev.dependencies] +aws-sam-cli = "^1.119.0" + [tool.pytest.ini_options] filterwarnings = [ # suppress warnings starting from "setuptools>=67.3" diff --git a/scripts/recording_uploader/.gitignore b/scripts/recording_uploader/.gitignore new file mode 100644 index 000000000..bbccfb121 --- /dev/null +++ b/scripts/recording_uploader/.gitignore @@ -0,0 +1,244 @@ + +# Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### OSX ### +*.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff: +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/dictionaries + +# Sensitive or high-churn files: +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.xml +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml + +# Gradle: +.idea/**/gradle.xml +.idea/**/libraries + +# CMake +cmake-build-debug/ + +# Mongo Explorer plugin: +.idea/**/mongoSettings.xml + +## File-based project format: +*.iws + +## Plugin-specific files: + +# IntelliJ +/out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Ruby plugin and RubyMine +/.rakeTasks + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +.idea/sonarlint + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +.pytest_cache/ +nosetests.xml +coverage.xml +*.cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule.* + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +.history + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Folder config file +Desktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# Build folder + +*/build/* + +# End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode diff --git a/scripts/recording_uploader/README.md b/scripts/recording_uploader/README.md new file mode 100644 index 000000000..14cb8ac20 --- /dev/null +++ b/scripts/recording_uploader/README.md @@ -0,0 +1,87 @@ +# recording-uploader + +This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders. + +- uploader - Code for the application's Lambda function. +- template.yaml - A template that defines the application's AWS resources. + +The application uses several AWS resources, including Lambda functions and an API Gateway API. These resources are defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code. + +If you prefer to use an integrated development environment (IDE) to build and test your application, you can use the AWS Toolkit. +The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI to build and deploy serverless applications on AWS. The AWS Toolkit also adds a simplified step-through debugging experience for Lambda function code. See the following links to get started. + +* [CLion](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [GoLand](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [IntelliJ](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [WebStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [Rider](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [PhpStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [PyCharm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [RubyMine](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [DataGrip](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [VS Code](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/welcome.html) +* [Visual Studio](https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/welcome.html) + +## Deploy the application + +There is a `deploy` script that creates the s3 bucket and deploys the application using the SAM CLI (included as part of the dev dependencies of this project). The bucket name is hardcoded in the script. The SAM CLI is set up to run in `guided` mode, which will prompt the user every time befor deploying, in case the user wants to change the default values. + + +You can find your API Gateway Endpoint URL in the output values displayed after deployment. + +## Use the SAM CLI to build and test locally + +Build your application with the `sam build --use-container` command. + +```bash +recording-uploader$ sam build --use-container +``` + +The SAM CLI installs dependencies defined in `uploader/requirements.txt`, creates a deployment package, and saves it in the `.aws-sam/build` folder. + +Run functions locally and invoke them with the `sam local invoke` command. + +```bash +recording-uploader$ sam local invoke RecordingUploadFunction +``` + +The SAM CLI can also emulate your application's API. Use the `sam local start-api` to run the API locally on port 3000. + +```bash +recording-uploader$ sam local start-api +recording-uploader$ curl http://localhost:3000/ +``` + +The SAM CLI reads the application template to determine the API's routes and the functions that they invoke. The `Events` property on each function's definition includes the route and method for each path. + +```yaml + Events: + RecordingUpload: + Type: Api + Properties: + Path: /upload + Method: get +``` + +## Add a resource to your application +The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types. + +## Fetch, tail, and filter Lambda function logs + +To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug. + +`NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM. + +```bash +recording-uploader$ sam logs -n RecordingUploadFunction --stack-name "recording-uploader" --tail +``` + +You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html). + +## Cleanup + +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: + +```bash +sam delete --stack-name "recording-uploader" +``` diff --git a/scripts/recording_uploader/__init__.py b/scripts/recording_uploader/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/scripts/recording_uploader/deploy.py b/scripts/recording_uploader/deploy.py new file mode 100644 index 000000000..2fa0adc6d --- /dev/null +++ b/scripts/recording_uploader/deploy.py @@ -0,0 +1,41 @@ +"""Entrypoint to deploy the uploader to AWS Lambda.""" + + +import pathlib +import subprocess + +import boto3 +import fire + +CURRENT_DIR = pathlib.Path(__file__).parent + + +def main(region_name: str = "us-east-1", guided: bool = True): + s3 = boto3.client( + "s3", + region_name=region_name, + endpoint_url=f"https://s3.{region_name}.amazonaws.com", + ) + bucket = "openadapt" + + # create the S3 bucket, if it doesn't already exist + try: + s3.create_bucket( + ACL="private", + Bucket=bucket, + ) + except (s3.exceptions.BucketAlreadyExists, s3.exceptions.BucketAlreadyOwnedByYou): + proceed = input(f"Bucket '{bucket}' already exists. Proceed? [y/N] ") + if proceed.lower() != "y": + return + + # deploy the code to AWS Lambda + commands = ["sam", "deploy"] + if guided: + commands.append("--guided") + subprocess.run(commands, cwd=CURRENT_DIR, check=True) + print("Lambda function deployed successfully.") + + +if __name__ == "__main__": + fire.Fire(main) diff --git a/scripts/recording_uploader/samconfig.toml b/scripts/recording_uploader/samconfig.toml new file mode 100644 index 000000000..1edc0b6d3 --- /dev/null +++ b/scripts/recording_uploader/samconfig.toml @@ -0,0 +1,34 @@ +# More information about the configuration file can be found here: +# https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html +version = 0.1 + +[default] +[default.global.parameters] +stack_name = "recording-uploader" + +[default.build.parameters] +cached = true +parallel = true + +[default.validate.parameters] +lint = true + +[default.deploy.parameters] +capabilities = "CAPABILITY_IAM" +confirm_changeset = false +resolve_s3 = true +s3_prefix = "recording-uploader" +region = "us-east-1" +image_repositories = [] + +[default.package.parameters] +resolve_s3 = true + +[default.sync.parameters] +watch = true + +[default.local_start_api.parameters] +warm_containers = "EAGER" + +[default.local_start_lambda.parameters] +warm_containers = "EAGER" diff --git a/scripts/recording_uploader/template.yaml b/scripts/recording_uploader/template.yaml new file mode 100644 index 000000000..815aacdf2 --- /dev/null +++ b/scripts/recording_uploader/template.yaml @@ -0,0 +1,48 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + recording-uploader + + Sample SAM Template for recording-uploader + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 3 + +Resources: + RecordingUploadFunction: + Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction + Properties: + CodeUri: uploader/ + Handler: app.lambda_handler + Runtime: python3.10 + Architectures: + - x86_64 + Events: + RecordingUpload: + Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api + Properties: + Path: /upload + Method: get + Policies: + - Statement: + - Sid: S3PutObjectPolicy + Effect: Allow + Action: + - s3:PutObject + Resource: !Sub "arn:aws:s3:::openadapt/*" + +Outputs: + # ServerlessRestApi is an implicit API created out of Events key under Serverless::Function + # Find out more about other implicit resources you can reference within SAM + # https://github.com/awslabs/serverless-application-model/blob/master/docs/internals/generated_resources.rst#api + RecordingUploadApi: + Description: "API Gateway endpoint URL for Prod stage for Recording Upload function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/upload/" + RecordingUploadFunction: + Description: "Recording Upload Lambda Function ARN" + Value: !GetAtt RecordingUploadFunction.Arn + RecordingUploadFunctionIamRole: + Description: "Implicit IAM Role created for Recording Upload function" + Value: !GetAtt RecordingUploadFunctionRole.Arn diff --git a/scripts/recording_uploader/uploader/__init__.py b/scripts/recording_uploader/uploader/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/scripts/recording_uploader/uploader/app.py b/scripts/recording_uploader/uploader/app.py new file mode 100644 index 000000000..7a7c920c5 --- /dev/null +++ b/scripts/recording_uploader/uploader/app.py @@ -0,0 +1,40 @@ +"""Lambda function for generating a presigned URL for uploading a recording to S3.""" + +from typing import Any +from uuid import uuid4 +import json + +from botocore.client import Config +import boto3 + + +def lambda_handler(*args: Any, **kwargs: Any): + """Main entry point for the lambda function.""" + return { + "statusCode": 200, + "body": json.dumps(get_presigned_url()), + } + + +def get_presigned_url(): + """Generate a presigned URL for uploading a recording to S3.""" + bucket = "openadapt" + region_name = "us-east-1" + s3 = boto3.client( + "s3", + config=Config(signature_version="s3v4"), + region_name=region_name, + endpoint_url=f"https://s3.{region_name}.amazonaws.com", + ) + key = f"recordings/{uuid4()}.zip" + + presigned_url = s3.generate_presigned_url( + ClientMethod="put_object", + Params={ + "Bucket": bucket, + "Key": key, + }, + ExpiresIn=3600, + ) + + return {"url": presigned_url} diff --git a/scripts/recording_uploader/uploader/requirements.txt b/scripts/recording_uploader/uploader/requirements.txt new file mode 100644 index 000000000..1b7a5c13f --- /dev/null +++ b/scripts/recording_uploader/uploader/requirements.txt @@ -0,0 +1 @@ +boto3==1.34.84 From 68a973f65820296755a2705c77e14a4d79f16756 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Fri, 21 Jun 2024 16:28:55 +0530 Subject: [PATCH 02/13] feat: Add upload recording button in dashboard --- openadapt/app/dashboard/api/recordings.py | 9 ++++++ .../dashboard/app/recordings/detail/page.tsx | 31 +++++++++++++++++-- openadapt/config.py | 1 + openadapt/share.py | 26 ++++++++++++++++ openadapt/utils.py | 18 +++++++++++ 5 files changed, 83 insertions(+), 2 deletions(-) diff --git a/openadapt/app/dashboard/api/recordings.py b/openadapt/app/dashboard/api/recordings.py index 074e99f95..5066f5928 100644 --- a/openadapt/app/dashboard/api/recordings.py +++ b/openadapt/app/dashboard/api/recordings.py @@ -10,6 +10,7 @@ from openadapt.events import get_events from openadapt.models import Recording from openadapt.plotting import display_event +from openadapt.share import upload_recording_to_s3 from openadapt.utils import image2utf8, row2dict @@ -29,6 +30,9 @@ def attach_routes(self) -> APIRouter: self.app.add_api_route("/start", self.start_recording) self.app.add_api_route("/stop", self.stop_recording) self.app.add_api_route("/status", self.recording_status) + self.app.add_api_route( + "/{recording_id}/upload", self.upload_recording, methods=["POST"] + ) self.recording_detail_route() return self.app @@ -63,6 +67,11 @@ def recording_status() -> dict[str, bool]: """Get the recording status.""" return {"recording": cards.is_recording()} + def upload_recording(self, recording_id: int) -> dict[str, str]: + """Upload a recording.""" + upload_recording_to_s3(recording_id) + return {"message": "Recording uploaded"} + def recording_detail_route(self) -> None: """Add the recording detail route as a websocket.""" diff --git a/openadapt/app/dashboard/app/recordings/detail/page.tsx b/openadapt/app/dashboard/app/recordings/detail/page.tsx index ca4e23eda..d83000857 100644 --- a/openadapt/app/dashboard/app/recordings/detail/page.tsx +++ b/openadapt/app/dashboard/app/recordings/detail/page.tsx @@ -4,7 +4,8 @@ import { ActionEvents } from "@/components/ActionEvent/ActionEvents"; import { RecordingDetails } from "@/components/RecordingDetails"; import { ActionEvent as ActionEventType } from "@/types/action-event"; import { Recording as RecordingType } from "@/types/recording"; -import { Box, Loader, Progress } from "@mantine/core"; +import { Box, Button, Grid, Loader, Progress } from "@mantine/core"; +import { notifications } from "@mantine/notifications"; import { useSearchParams } from "next/navigation"; import { Suspense, useEffect, useState } from "react"; @@ -57,6 +58,25 @@ function Recording() { websocket.close(); } }, [id]); + function uploadRecording() { + fetch(`/api/recordings/${id}/upload`, { + method: "POST", + }).then(resp => { + if (resp.status === 200) { + notifications.show({ + title: "Recording uploaded", + message: "Recording has been uploaded successfully", + color: "teal", + }); + } else { + notifications.show({ + title: "Error", + message: "Failed to upload recording", + color: "red", + }); + } + }); + } if (!recordingInfo) { return ; } @@ -64,7 +84,14 @@ function Recording() { return ( - + + + + + + + + {actionEvents.length && actionEvents.length < recordingInfo.num_events && ( diff --git a/openadapt/config.py b/openadapt/config.py index ca3c01801..dfbf4ed75 100644 --- a/openadapt/config.py +++ b/openadapt/config.py @@ -29,6 +29,7 @@ VIDEO_DIR_PATH = DATA_DIR_PATH / "videos" DATABASE_FILE_PATH = (DATA_DIR_PATH / "openadapt.db").absolute() DATABASE_LOCK_FILE_PATH = DATA_DIR_PATH / "openadapt.db.lock" +RECORDING_UPLOAD_URL = "" STOP_STRS = [ "oa.stop", diff --git a/openadapt/share.py b/openadapt/share.py index 2008b9d99..4a6a7d8da 100644 --- a/openadapt/share.py +++ b/openadapt/share.py @@ -5,6 +5,7 @@ python -m openadapt.share receive """ +from threading import Thread from zipfile import ZIP_DEFLATED, ZipFile import os import re @@ -173,6 +174,31 @@ def receive_recording(wormhole_code: str) -> None: logger.info(f"deleted {zip_path=}") +def upload_recording_to_s3(recording_id): + """Upload a recording to an S3 bucket. + + Args: + recording_id (int): The ID of the recording to upload. + """ + + def _inner(): + try: + # Export the recording to a zip file + zip_file_path = export_recording_to_folder(recording_id) + + # Upload the zip file to the S3 bucket + utils.upload_file_to_s3(zip_file_path) + + # Delete the zip file after uploading + if os.path.exists(zip_file_path): + os.remove(zip_file_path) + logger.info(f"deleted {zip_file_path=}") + except Exception as exc: + logger.exception(exc) + + Thread(target=_inner).start() + + # Create a command-line interface using python-fire and utils.get_functions if __name__ == "__main__": fire.Fire( diff --git a/openadapt/utils.py b/openadapt/utils.py index c57763694..523540088 100644 --- a/openadapt/utils.py +++ b/openadapt/utils.py @@ -20,6 +20,7 @@ from jinja2 import Environment, FileSystemLoader from PIL import Image, ImageEnhance from posthog import Posthog +import requests from openadapt.build_utils import is_running_from_executable, redirect_stdout_stderr from openadapt.custom_logger import logger @@ -44,6 +45,7 @@ PERFORMANCE_PLOTS_DIR_PATH, POSTHOG_HOST, POSTHOG_PUBLIC_KEY, + RECORDING_UPLOAD_URL, config, ) from openadapt.custom_logger import filter_log_messages @@ -946,6 +948,22 @@ def get_posthog_instance() -> DistinctIDPosthog: return posthog +def upload_file_to_s3(file_path: str) -> None: + """Upload a file to an S3 bucket. + + Args: + file_path (str): The path to the file to upload. + """ + filename = os.path.basename(file_path) + resp = requests.get(RECORDING_UPLOAD_URL) + upload_url = resp.json()["url"] + + with open(file_path, "rb") as file: + files = {"file": (filename, file)} + resp = requests.put(upload_url, files=files) + resp.raise_for_status() + + def retry_with_exceptions(max_retries: int = 5) -> Callable: """Decorator to retry a function while keeping track of exceptions.""" From dbf76beafb1490026048e324ada78bde32a5e3cd Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Fri, 21 Jun 2024 19:28:03 +0530 Subject: [PATCH 03/13] chore: Fix flake8 lint errors --- openadapt/share.py | 4 ++-- scripts/recording_uploader/__init__.py | 1 + scripts/recording_uploader/deploy.py | 8 +++++++- scripts/recording_uploader/uploader/__init__.py | 1 + scripts/recording_uploader/uploader/app.py | 4 ++-- 5 files changed, 13 insertions(+), 5 deletions(-) diff --git a/openadapt/share.py b/openadapt/share.py index 4a6a7d8da..db7053f11 100644 --- a/openadapt/share.py +++ b/openadapt/share.py @@ -174,14 +174,14 @@ def receive_recording(wormhole_code: str) -> None: logger.info(f"deleted {zip_path=}") -def upload_recording_to_s3(recording_id): +def upload_recording_to_s3(recording_id: int) -> None: """Upload a recording to an S3 bucket. Args: recording_id (int): The ID of the recording to upload. """ - def _inner(): + def _inner() -> None: try: # Export the recording to a zip file zip_file_path = export_recording_to_folder(recording_id) diff --git a/scripts/recording_uploader/__init__.py b/scripts/recording_uploader/__init__.py index e69de29bb..0498cd0a6 100644 --- a/scripts/recording_uploader/__init__.py +++ b/scripts/recording_uploader/__init__.py @@ -0,0 +1 @@ +"""Init file for the recording_uploader package.""" diff --git a/scripts/recording_uploader/deploy.py b/scripts/recording_uploader/deploy.py index 2fa0adc6d..dd86efc85 100644 --- a/scripts/recording_uploader/deploy.py +++ b/scripts/recording_uploader/deploy.py @@ -10,7 +10,13 @@ CURRENT_DIR = pathlib.Path(__file__).parent -def main(region_name: str = "us-east-1", guided: bool = True): +def main(region_name: str = "us-east-1", guided: bool = True) -> None: + """Deploy the uploader to AWS Lambda. + + Args: + region_name (str): The AWS region to deploy the Lambda function to. + guided (bool): Whether to use the guided SAM deployment. + """ s3 = boto3.client( "s3", region_name=region_name, diff --git a/scripts/recording_uploader/uploader/__init__.py b/scripts/recording_uploader/uploader/__init__.py index e69de29bb..7ced81d1c 100644 --- a/scripts/recording_uploader/uploader/__init__.py +++ b/scripts/recording_uploader/uploader/__init__.py @@ -0,0 +1 @@ +"""Init file for the uploader package.""" diff --git a/scripts/recording_uploader/uploader/app.py b/scripts/recording_uploader/uploader/app.py index 7a7c920c5..7fe7e17f5 100644 --- a/scripts/recording_uploader/uploader/app.py +++ b/scripts/recording_uploader/uploader/app.py @@ -8,7 +8,7 @@ import boto3 -def lambda_handler(*args: Any, **kwargs: Any): +def lambda_handler(*args: Any, **kwargs: Any) -> dict: """Main entry point for the lambda function.""" return { "statusCode": 200, @@ -16,7 +16,7 @@ def lambda_handler(*args: Any, **kwargs: Any): } -def get_presigned_url(): +def get_presigned_url() -> dict: """Generate a presigned URL for uploading a recording to S3.""" bucket = "openadapt" region_name = "us-east-1" From 7fee87db2b7108aa44b38430cae51cc71580cd16 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Mon, 24 Jun 2024 11:55:49 +0530 Subject: [PATCH 04/13] feat: Upload recording to user id specific folders --- openadapt/app/dashboard/api/recordings.py | 3 +- openadapt/app/dashboard/app/providers.tsx | 8 +- .../app/settings/(api_keys)/page.tsx | 10 +- .../app/settings/record_and_replay/page.tsx | 10 +- .../dashboard/app/settings/scrubbing/page.tsx | 10 +- openadapt/app/dashboard/app/utils.ts | 8 + openadapt/share.py | 10 +- openadapt/utils.py | 7 +- scripts/recording_uploader/.gitignore | 244 ------------------ scripts/recording_uploader/README.md | 2 +- scripts/recording_uploader/deploy.py | 3 +- scripts/recording_uploader/template.yaml | 2 +- scripts/recording_uploader/uploader/app.py | 36 ++- 13 files changed, 61 insertions(+), 292 deletions(-) delete mode 100644 scripts/recording_uploader/.gitignore diff --git a/openadapt/app/dashboard/api/recordings.py b/openadapt/app/dashboard/api/recordings.py index 5066f5928..48a5fd47c 100644 --- a/openadapt/app/dashboard/api/recordings.py +++ b/openadapt/app/dashboard/api/recordings.py @@ -5,6 +5,7 @@ from fastapi import APIRouter, WebSocket from openadapt.custom_logger import logger +from openadapt.config import config from openadapt.db import crud from openadapt.deprecated.app import cards from openadapt.events import get_events @@ -69,7 +70,7 @@ def recording_status() -> dict[str, bool]: def upload_recording(self, recording_id: int) -> dict[str, str]: """Upload a recording.""" - upload_recording_to_s3(recording_id) + upload_recording_to_s3(config.UNIQUE_USER_ID, recording_id) return {"message": "Recording uploaded"} def recording_detail_route(self) -> None: diff --git a/openadapt/app/dashboard/app/providers.tsx b/openadapt/app/dashboard/app/providers.tsx index d607f2634..784d4092c 100644 --- a/openadapt/app/dashboard/app/providers.tsx +++ b/openadapt/app/dashboard/app/providers.tsx @@ -3,6 +3,7 @@ import { get } from '@/api' import posthog from 'posthog-js' import { PostHogProvider } from 'posthog-js/react' import { useEffect } from 'react' +import { getSettings } from './utils' if (typeof window !== 'undefined') { if (process.env.NEXT_PUBLIC_MODE !== "development") { @@ -12,13 +13,6 @@ if (typeof window !== 'undefined') { } } -async function getSettings(): Promise> { - return get('/api/settings?category=general', { - cache: 'no-store', - }) -} - - export function CSPostHogProvider({ children }: { children: React.ReactNode }) { useEffect(() => { if (process.env.NEXT_PUBLIC_MODE !== "development") { diff --git a/openadapt/app/dashboard/app/settings/(api_keys)/page.tsx b/openadapt/app/dashboard/app/settings/(api_keys)/page.tsx index 3c2d15005..fea6cabbc 100644 --- a/openadapt/app/dashboard/app/settings/(api_keys)/page.tsx +++ b/openadapt/app/dashboard/app/settings/(api_keys)/page.tsx @@ -3,18 +3,12 @@ import React, { useEffect, useState } from 'react' import { get } from '@/api'; import { Form } from './form'; - - -async function getSettings(): Promise> { - return get('/api/settings?category=api_keys', { - cache: 'no-store', - }) -} +import { getSettings } from '@/app/utils'; export default function APIKeys () { const [settings, setSettings] = useState({}); useEffect(() => { - getSettings().then(setSettings); + getSettings("api_keys").then(setSettings); }, []) return ( diff --git a/openadapt/app/dashboard/app/settings/record_and_replay/page.tsx b/openadapt/app/dashboard/app/settings/record_and_replay/page.tsx index 8046d9fcc..3a216c303 100644 --- a/openadapt/app/dashboard/app/settings/record_and_replay/page.tsx +++ b/openadapt/app/dashboard/app/settings/record_and_replay/page.tsx @@ -3,18 +3,12 @@ import React, { useEffect, useState } from 'react' import { get } from '@/api'; import { Form } from './form'; - - -async function getSettings(): Promise> { - return get('/api/settings?category=record_and_replay', { - cache: 'no-store', - }) -} +import { getSettings } from '@/app/utils'; export default function APIKeys () { const [settings, setSettings] = useState({}); useEffect(() => { - getSettings().then(setSettings) + getSettings("record_and_replay").then(setSettings) }, []) return (
diff --git a/openadapt/app/dashboard/app/settings/scrubbing/page.tsx b/openadapt/app/dashboard/app/settings/scrubbing/page.tsx index 6387ad6ec..2a6b7d44f 100644 --- a/openadapt/app/dashboard/app/settings/scrubbing/page.tsx +++ b/openadapt/app/dashboard/app/settings/scrubbing/page.tsx @@ -3,18 +3,12 @@ import React, { useEffect, useState } from 'react' import { get } from '@/api'; import { Form } from './form'; - - -function getSettings(): Promise> { - return get('/api/settings?category=scrubbing', { - cache: 'no-store', - }) -} +import { getSettings } from '@/app/utils'; export default function APIKeys () { const [settings, setSettings] = useState({}); useEffect(() => { - getSettings().then(setSettings) + getSettings("scrubbing").then(setSettings) }, []) return ( diff --git a/openadapt/app/dashboard/app/utils.ts b/openadapt/app/dashboard/app/utils.ts index 712ff2d8a..f5de0289b 100644 --- a/openadapt/app/dashboard/app/utils.ts +++ b/openadapt/app/dashboard/app/utils.ts @@ -1,3 +1,4 @@ +import { get } from '@/api'; import moment from 'moment'; @@ -7,3 +8,10 @@ export const timeStampToDateString = (timeStamp: number) => { } return moment.unix(timeStamp).format('DD/MM/YYYY HH:mm:ss'); } + + +export const getSettings = (category: string = 'general'): Promise> => { + return get(`/api/settings?category=${category}`, { + cache: 'no-store', + }) +} diff --git a/openadapt/share.py b/openadapt/share.py index db7053f11..b30ac9b65 100644 --- a/openadapt/share.py +++ b/openadapt/share.py @@ -174,10 +174,11 @@ def receive_recording(wormhole_code: str) -> None: logger.info(f"deleted {zip_path=}") -def upload_recording_to_s3(recording_id: int) -> None: +def upload_recording_to_s3(user_id: str, recording_id: int) -> None: """Upload a recording to an S3 bucket. Args: + user_id (str): The ID of the user who owns the recording. recording_id (int): The ID of the recording to upload. """ @@ -187,7 +188,12 @@ def _inner() -> None: zip_file_path = export_recording_to_folder(recording_id) # Upload the zip file to the S3 bucket - utils.upload_file_to_s3(zip_file_path) + utils.upload_file_to_s3( + zip_file_path, + { + "user_id": user_id, + }, + ) # Delete the zip file after uploading if os.path.exists(zip_file_path): diff --git a/openadapt/utils.py b/openadapt/utils.py index 523540088..64654d38c 100644 --- a/openadapt/utils.py +++ b/openadapt/utils.py @@ -948,20 +948,21 @@ def get_posthog_instance() -> DistinctIDPosthog: return posthog -def upload_file_to_s3(file_path: str) -> None: +def upload_file_to_s3(file_path: str, body: dict) -> requests.Response: """Upload a file to an S3 bucket. Args: file_path (str): The path to the file to upload. + body (dict): The body of the request. """ filename = os.path.basename(file_path) - resp = requests.get(RECORDING_UPLOAD_URL) + resp = requests.post(RECORDING_UPLOAD_URL, json=body) upload_url = resp.json()["url"] with open(file_path, "rb") as file: files = {"file": (filename, file)} resp = requests.put(upload_url, files=files) - resp.raise_for_status() + return resp def retry_with_exceptions(max_retries: int = 5) -> Callable: diff --git a/scripts/recording_uploader/.gitignore b/scripts/recording_uploader/.gitignore deleted file mode 100644 index bbccfb121..000000000 --- a/scripts/recording_uploader/.gitignore +++ /dev/null @@ -1,244 +0,0 @@ - -# Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode - -### Linux ### -*~ - -# temporary files which can be created if a process still has a handle open of a deleted file -.fuse_hidden* - -# KDE directory preferences -.directory - -# Linux trash folder which might appear on any partition or disk -.Trash-* - -# .nfs files are created when an open file is removed but is still being accessed -.nfs* - -### OSX ### -*.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -### PyCharm ### -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm -# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 - -# User-specific stuff: -.idea/**/workspace.xml -.idea/**/tasks.xml -.idea/dictionaries - -# Sensitive or high-churn files: -.idea/**/dataSources/ -.idea/**/dataSources.ids -.idea/**/dataSources.xml -.idea/**/dataSources.local.xml -.idea/**/sqlDataSources.xml -.idea/**/dynamic.xml -.idea/**/uiDesigner.xml - -# Gradle: -.idea/**/gradle.xml -.idea/**/libraries - -# CMake -cmake-build-debug/ - -# Mongo Explorer plugin: -.idea/**/mongoSettings.xml - -## File-based project format: -*.iws - -## Plugin-specific files: - -# IntelliJ -/out/ - -# mpeltonen/sbt-idea plugin -.idea_modules/ - -# JIRA plugin -atlassian-ide-plugin.xml - -# Cursive Clojure plugin -.idea/replstate.xml - -# Ruby plugin and RubyMine -/.rakeTasks - -# Crashlytics plugin (for Android Studio and IntelliJ) -com_crashlytics_export_strings.xml -crashlytics.properties -crashlytics-build.properties -fabric.properties - -### PyCharm Patch ### -# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 - -# *.iml -# modules.xml -# .idea/misc.xml -# *.ipr - -# Sonarlint plugin -.idea/sonarlint - -### Python ### -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -.pytest_cache/ -nosetests.xml -coverage.xml -*.cover -.hypothesis/ - -# Translations -*.mo -*.pot - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule.* - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ - -### VisualStudioCode ### -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json -.history - -### Windows ### -# Windows thumbnail cache files -Thumbs.db -ehthumbs.db -ehthumbs_vista.db - -# Folder config file -Desktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msm -*.msp - -# Windows shortcuts -*.lnk - -# Build folder - -*/build/* - -# End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode diff --git a/scripts/recording_uploader/README.md b/scripts/recording_uploader/README.md index 14cb8ac20..4871ef715 100644 --- a/scripts/recording_uploader/README.md +++ b/scripts/recording_uploader/README.md @@ -24,7 +24,7 @@ The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI ## Deploy the application -There is a `deploy` script that creates the s3 bucket and deploys the application using the SAM CLI (included as part of the dev dependencies of this project). The bucket name is hardcoded in the script. The SAM CLI is set up to run in `guided` mode, which will prompt the user every time befor deploying, in case the user wants to change the default values. +There is a `deploy` script that creates the s3 bucket and deploys the application using the SAM CLI (included as part of the dev dependencies of this project). The bucket name is hardcoded in the script. The SAM CLI is set up to run in `guided` mode, which will prompt the user every time before deploying, in case the user wants to change the default values. You can find your API Gateway Endpoint URL in the output values displayed after deployment. diff --git a/scripts/recording_uploader/deploy.py b/scripts/recording_uploader/deploy.py index dd86efc85..78a26d17f 100644 --- a/scripts/recording_uploader/deploy.py +++ b/scripts/recording_uploader/deploy.py @@ -4,6 +4,7 @@ import pathlib import subprocess +from loguru import logger import boto3 import fire @@ -40,7 +41,7 @@ def main(region_name: str = "us-east-1", guided: bool = True) -> None: if guided: commands.append("--guided") subprocess.run(commands, cwd=CURRENT_DIR, check=True) - print("Lambda function deployed successfully.") + logger.info("Lambda function deployed successfully.") if __name__ == "__main__": diff --git a/scripts/recording_uploader/template.yaml b/scripts/recording_uploader/template.yaml index 815aacdf2..328e4cda8 100644 --- a/scripts/recording_uploader/template.yaml +++ b/scripts/recording_uploader/template.yaml @@ -24,7 +24,7 @@ Resources: Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api Properties: Path: /upload - Method: get + Method: post Policies: - Statement: - Sid: S3PutObjectPolicy diff --git a/scripts/recording_uploader/uploader/app.py b/scripts/recording_uploader/uploader/app.py index 7fe7e17f5..798ae24a3 100644 --- a/scripts/recording_uploader/uploader/app.py +++ b/scripts/recording_uploader/uploader/app.py @@ -7,26 +7,46 @@ from botocore.client import Config import boto3 +DEFAULT_REGION_NAME = "us-east-1" +DEFAULT_BUCKET = "openadapt" +ONE_HOUR_IN_SECONDS = 3600 -def lambda_handler(*args: Any, **kwargs: Any) -> dict: + +def lambda_handler(event: dict, context: Any) -> dict: """Main entry point for the lambda function.""" + try: + user_id = json.loads(event["body"])["user_id"] + except Exception as e: + print(e) + return { + "statusCode": 400, + "body": json.dumps({"error": "Missing 'user_id' in request body."}), + } return { "statusCode": 200, - "body": json.dumps(get_presigned_url()), + "body": json.dumps(get_presigned_url(user_id)), } -def get_presigned_url() -> dict: - """Generate a presigned URL for uploading a recording to S3.""" - bucket = "openadapt" - region_name = "us-east-1" +def get_presigned_url( + user_id: str, bucket: str = DEFAULT_BUCKET, region_name: str = DEFAULT_REGION_NAME +) -> dict: + """Generate a presigned URL for uploading a recording to S3. + + Args: + bucket (str): The S3 bucket to upload the recording to. + region_name (str): The AWS region the bucket is in. + + Returns: + dict: A dictionary containing the presigned URL. + """ s3 = boto3.client( "s3", config=Config(signature_version="s3v4"), region_name=region_name, endpoint_url=f"https://s3.{region_name}.amazonaws.com", ) - key = f"recordings/{uuid4()}.zip" + key = f"recordings/{user_id}/{uuid4()}.zip" presigned_url = s3.generate_presigned_url( ClientMethod="put_object", @@ -34,7 +54,7 @@ def get_presigned_url() -> dict: "Bucket": bucket, "Key": key, }, - ExpiresIn=3600, + ExpiresIn=ONE_HOUR_IN_SECONDS, ) return {"url": presigned_url} From cd3351017f066feeb5bcc8ed9198eb2869845261 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Mon, 1 Jul 2024 18:13:46 +0530 Subject: [PATCH 05/13] chore: Replace package with module and remove unwanted code --- scripts/recording_uploader/deploy.py | 14 ++++---------- scripts/recording_uploader/uploader/__init__.py | 2 +- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/scripts/recording_uploader/deploy.py b/scripts/recording_uploader/deploy.py index 78a26d17f..f17e1b87c 100644 --- a/scripts/recording_uploader/deploy.py +++ b/scripts/recording_uploader/deploy.py @@ -25,16 +25,10 @@ def main(region_name: str = "us-east-1", guided: bool = True) -> None: ) bucket = "openadapt" - # create the S3 bucket, if it doesn't already exist - try: - s3.create_bucket( - ACL="private", - Bucket=bucket, - ) - except (s3.exceptions.BucketAlreadyExists, s3.exceptions.BucketAlreadyOwnedByYou): - proceed = input(f"Bucket '{bucket}' already exists. Proceed? [y/N] ") - if proceed.lower() != "y": - return + s3.create_bucket( + ACL="private", + Bucket=bucket, + ) # deploy the code to AWS Lambda commands = ["sam", "deploy"] diff --git a/scripts/recording_uploader/uploader/__init__.py b/scripts/recording_uploader/uploader/__init__.py index 7ced81d1c..545508ded 100644 --- a/scripts/recording_uploader/uploader/__init__.py +++ b/scripts/recording_uploader/uploader/__init__.py @@ -1 +1 @@ -"""Init file for the uploader package.""" +"""Init file for the uploader module.""" From 23238a0fe985314e1efe7ca5a875ed5d8d74be80 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Mon, 15 Jul 2024 15:33:45 +0530 Subject: [PATCH 06/13] chore: Move recording uploader to separate admin folder --- {scripts => admin}/recording_uploader/README.md | 0 {scripts => admin}/recording_uploader/__init__.py | 0 {scripts => admin}/recording_uploader/deploy.py | 0 {scripts => admin}/recording_uploader/samconfig.toml | 0 {scripts => admin}/recording_uploader/template.yaml | 0 {scripts => admin}/recording_uploader/uploader/__init__.py | 0 {scripts => admin}/recording_uploader/uploader/app.py | 0 {scripts => admin}/recording_uploader/uploader/requirements.txt | 0 8 files changed, 0 insertions(+), 0 deletions(-) rename {scripts => admin}/recording_uploader/README.md (100%) rename {scripts => admin}/recording_uploader/__init__.py (100%) rename {scripts => admin}/recording_uploader/deploy.py (100%) rename {scripts => admin}/recording_uploader/samconfig.toml (100%) rename {scripts => admin}/recording_uploader/template.yaml (100%) rename {scripts => admin}/recording_uploader/uploader/__init__.py (100%) rename {scripts => admin}/recording_uploader/uploader/app.py (100%) rename {scripts => admin}/recording_uploader/uploader/requirements.txt (100%) diff --git a/scripts/recording_uploader/README.md b/admin/recording_uploader/README.md similarity index 100% rename from scripts/recording_uploader/README.md rename to admin/recording_uploader/README.md diff --git a/scripts/recording_uploader/__init__.py b/admin/recording_uploader/__init__.py similarity index 100% rename from scripts/recording_uploader/__init__.py rename to admin/recording_uploader/__init__.py diff --git a/scripts/recording_uploader/deploy.py b/admin/recording_uploader/deploy.py similarity index 100% rename from scripts/recording_uploader/deploy.py rename to admin/recording_uploader/deploy.py diff --git a/scripts/recording_uploader/samconfig.toml b/admin/recording_uploader/samconfig.toml similarity index 100% rename from scripts/recording_uploader/samconfig.toml rename to admin/recording_uploader/samconfig.toml diff --git a/scripts/recording_uploader/template.yaml b/admin/recording_uploader/template.yaml similarity index 100% rename from scripts/recording_uploader/template.yaml rename to admin/recording_uploader/template.yaml diff --git a/scripts/recording_uploader/uploader/__init__.py b/admin/recording_uploader/uploader/__init__.py similarity index 100% rename from scripts/recording_uploader/uploader/__init__.py rename to admin/recording_uploader/uploader/__init__.py diff --git a/scripts/recording_uploader/uploader/app.py b/admin/recording_uploader/uploader/app.py similarity index 100% rename from scripts/recording_uploader/uploader/app.py rename to admin/recording_uploader/uploader/app.py diff --git a/scripts/recording_uploader/uploader/requirements.txt b/admin/recording_uploader/uploader/requirements.txt similarity index 100% rename from scripts/recording_uploader/uploader/requirements.txt rename to admin/recording_uploader/uploader/requirements.txt From 3554bb459808a0dbb52ac713a31d132808a01991 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Mon, 15 Jul 2024 15:51:46 +0530 Subject: [PATCH 07/13] docs: Update README.md with details on how to deploy the recording uploader stack --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index a5e64883c..905115c3b 100644 --- a/README.md +++ b/README.md @@ -226,6 +226,18 @@ To record browser events in Google Chrome (required by the `BrowserReplayStrateg 6. Set the `RECORD_BROWSER_EVENTS` flag to `true` in `openadapt/data/config.json`. +### Admin features +If you want to self host the app, you should run the following scripts + +**recording_uploader** +- Ensure that you have valid AWS credentials added in your environment +- Run the following command to create a stack on AWS that you can upload recordings to +```bash +python -m admin.recording_uploader.deploy +``` +- If the script runs successfully, you should see an API Gateway URL in the output +- Copy the URL and change the value of `RECORDING_UPLOAD_URL` in `openadapt/config.py` to the URL you copied + ## Features ### State-of-the-art GUI understanding via [Segment Anything in High Quality](https://github.com/SysCV/sam-hq): From 489d28b963b0a02664afed452e3f5b630d8487f1 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Sat, 9 Nov 2024 21:32:53 +0530 Subject: [PATCH 08/13] lint: Fix linting --- admin/recording_uploader/deploy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/admin/recording_uploader/deploy.py b/admin/recording_uploader/deploy.py index f17e1b87c..fd5582aa8 100644 --- a/admin/recording_uploader/deploy.py +++ b/admin/recording_uploader/deploy.py @@ -1,6 +1,5 @@ """Entrypoint to deploy the uploader to AWS Lambda.""" - import pathlib import subprocess From da83b18b1dda2255c955e231e4ab9ba1e9053b89 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Mon, 11 Nov 2024 01:00:01 +0530 Subject: [PATCH 09/13] feat: Add upload button to recordings page --- admin/recording_uploader/template.yaml | 3 +- admin/recording_uploader/uploader/app.py | 38 +++--- .../46d03b666cd4_add_upload_fields.py | 64 ++++++++++ openadapt/app/dashboard/api/recordings.py | 20 +++- openadapt/app/dashboard/api/settings.py | 7 +- .../app/recordings/RawRecordings.tsx | 109 +++++++++++++++--- .../app/dashboard/app/settings/layout.tsx | 19 +-- .../app/settings/recording_upload/form.tsx | 93 +++++++++++++++ .../app/settings/recording_upload/page.tsx | 22 ++++ openadapt/app/dashboard/app/settings/utils.ts | 103 +++++++++++------ openadapt/config.defaults.json | 5 + openadapt/config.py | 16 ++- openadapt/db/crud.py | 27 ++++- openadapt/db/db.py | 2 + openadapt/models.py | 16 +++ openadapt/share.py | 17 +-- openadapt/utils.py | 69 ++++++++++- 17 files changed, 535 insertions(+), 95 deletions(-) create mode 100644 openadapt/alembic/versions/46d03b666cd4_add_upload_fields.py create mode 100644 openadapt/app/dashboard/app/settings/recording_upload/form.tsx create mode 100644 openadapt/app/dashboard/app/settings/recording_upload/page.tsx diff --git a/admin/recording_uploader/template.yaml b/admin/recording_uploader/template.yaml index 328e4cda8..f96a3b61d 100644 --- a/admin/recording_uploader/template.yaml +++ b/admin/recording_uploader/template.yaml @@ -27,10 +27,11 @@ Resources: Method: post Policies: - Statement: - - Sid: S3PutObjectPolicy + - Sid: S3GetPutObjectPolicy Effect: Allow Action: - s3:PutObject + - s3:GetObject Resource: !Sub "arn:aws:s3:::openadapt/*" Outputs: diff --git a/admin/recording_uploader/uploader/app.py b/admin/recording_uploader/uploader/app.py index 798ae24a3..79fb1737e 100644 --- a/admin/recording_uploader/uploader/app.py +++ b/admin/recording_uploader/uploader/app.py @@ -14,42 +14,50 @@ def lambda_handler(event: dict, context: Any) -> dict: """Main entry point for the lambda function.""" - try: - user_id = json.loads(event["body"])["user_id"] - except Exception as e: - print(e) + data = json.loads(event["body"]) + lambda_function = data["lambda_function"] + if lambda_function == "get_presigned_url": return { - "statusCode": 400, - "body": json.dumps({"error": "Missing 'user_id' in request body."}), + "body": json.dumps(get_presigned_url(data)), + "statusCode": 200, } return { - "statusCode": 200, - "body": json.dumps(get_presigned_url(user_id)), + "statusCode": 400, + "body": json.dumps({"error": f"Unknown lambda function: {lambda_function}"}), } -def get_presigned_url( - user_id: str, bucket: str = DEFAULT_BUCKET, region_name: str = DEFAULT_REGION_NAME -) -> dict: +def get_presigned_url(data: dict) -> dict: """Generate a presigned URL for uploading a recording to S3. Args: - bucket (str): The S3 bucket to upload the recording to. - region_name (str): The AWS region the bucket is in. + data (dict): The data from the request. Returns: dict: A dictionary containing the presigned URL. """ + try: + key = data["key"] + client_method = data["client_method"] + except Exception as e: + print(e) + return { + "statusCode": 400, + "body": json.dumps( + {"error": "Missing 'key' or 'client_method' in request body."} + ), + } + region_name = DEFAULT_REGION_NAME + bucket = DEFAULT_BUCKET s3 = boto3.client( "s3", config=Config(signature_version="s3v4"), region_name=region_name, endpoint_url=f"https://s3.{region_name}.amazonaws.com", ) - key = f"recordings/{user_id}/{uuid4()}.zip" presigned_url = s3.generate_presigned_url( - ClientMethod="put_object", + ClientMethod=client_method, Params={ "Bucket": bucket, "Key": key, diff --git a/openadapt/alembic/versions/46d03b666cd4_add_upload_fields.py b/openadapt/alembic/versions/46d03b666cd4_add_upload_fields.py new file mode 100644 index 000000000..e793f4ebc --- /dev/null +++ b/openadapt/alembic/versions/46d03b666cd4_add_upload_fields.py @@ -0,0 +1,64 @@ +"""add_upload_fields + +Revision ID: 46d03b666cd4 +Revises: 98505a067995 +Create Date: 2024-11-10 23:14:21.187860 + +""" +from alembic import op +import sqlalchemy as sa + +import openadapt + +# revision identifiers, used by Alembic. +revision = "46d03b666cd4" +down_revision = "98505a067995" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "replay", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column( + "timestamp", + openadapt.models.ForceFloat(precision=10, scale=2, asdecimal=False), + nullable=True, + ), + sa.Column("strategy_name", sa.String(), nullable=True), + sa.Column("strategy_args", sa.JSON(), nullable=True), + sa.Column("git_hash", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id", name=op.f("pk_replay")), + ) + with op.batch_alter_table("recording", schema=None) as batch_op: + batch_op.add_column( + sa.Column( + "upload_status", + sa.Enum("NOT_UPLOADED", "UPLOADING", "UPLOADED", name="uploadstatus"), + nullable=True, + ) + ) + batch_op.add_column(sa.Column("uploaded_key", sa.String(), nullable=True)) + batch_op.add_column( + sa.Column("uploaded_to_custom_bucket", sa.Boolean(), nullable=True) + ) + + # update all recordings to not uploaded + op.execute("UPDATE recording SET upload_status = 'NOT_UPLOADED' WHERE 1=1") + # update all recordings to not uploaded to custom bucket + op.execute("UPDATE recording SET uploaded_to_custom_bucket = FALSE WHERE 1=1") + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("recording", schema=None) as batch_op: + batch_op.drop_column("uploaded_to_custom_bucket") + batch_op.drop_column("uploaded_key") + batch_op.drop_column("upload_status") + + op.drop_table("replay") + # ### end Alembic commands ### diff --git a/openadapt/app/dashboard/api/recordings.py b/openadapt/app/dashboard/api/recordings.py index 48a5fd47c..f4ee3885c 100644 --- a/openadapt/app/dashboard/api/recordings.py +++ b/openadapt/app/dashboard/api/recordings.py @@ -3,16 +3,17 @@ import json from fastapi import APIRouter, WebSocket +from starlette.responses import RedirectResponse -from openadapt.custom_logger import logger from openadapt.config import config +from openadapt.custom_logger import logger from openadapt.db import crud from openadapt.deprecated.app import cards from openadapt.events import get_events from openadapt.models import Recording from openadapt.plotting import display_event from openadapt.share import upload_recording_to_s3 -from openadapt.utils import image2utf8, row2dict +from openadapt.utils import get_recording_url, image2utf8, row2dict class RecordingsAPI: @@ -34,6 +35,9 @@ def attach_routes(self) -> APIRouter: self.app.add_api_route( "/{recording_id}/upload", self.upload_recording, methods=["POST"] ) + self.app.add_api_route( + "/{recording_id}/view", self.view_recording, methods=["GET"] + ) self.recording_detail_route() return self.app @@ -70,9 +74,21 @@ def recording_status() -> dict[str, bool]: def upload_recording(self, recording_id: int) -> dict[str, str]: """Upload a recording.""" + with crud.get_new_session(read_and_write=True) as session: + crud.start_uploading_recording(session, recording_id) upload_recording_to_s3(config.UNIQUE_USER_ID, recording_id) return {"message": "Recording uploaded"} + @staticmethod + def view_recording(recording_id: int) -> dict[str, str]: + """View a recording.""" + session = crud.get_new_session(read_only=True) + recording = crud.get_recording_by_id(session, recording_id) + url = get_recording_url( + recording.uploaded_key, recording.uploaded_to_custom_bucket + ) + return RedirectResponse(url) + def recording_detail_route(self) -> None: """Add the recording detail route as a websocket.""" diff --git a/openadapt/app/dashboard/api/settings.py b/openadapt/app/dashboard/api/settings.py index 716203102..ebf9e3b2f 100644 --- a/openadapt/app/dashboard/api/settings.py +++ b/openadapt/app/dashboard/api/settings.py @@ -21,7 +21,12 @@ def attach_routes(self) -> APIRouter: return self.app Category = Literal[ - "api_keys", "scrubbing", "record_and_replay", "general", "onboarding" + "api_keys", + "scrubbing", + "record_and_replay", + "general", + "onboarding", + "recording_upload", ] @staticmethod diff --git a/openadapt/app/dashboard/app/recordings/RawRecordings.tsx b/openadapt/app/dashboard/app/recordings/RawRecordings.tsx index 17b6c413b..409c2419a 100644 --- a/openadapt/app/dashboard/app/recordings/RawRecordings.tsx +++ b/openadapt/app/dashboard/app/recordings/RawRecordings.tsx @@ -1,40 +1,113 @@ -import { SimpleTable } from '@/components/SimpleTable'; -import { Recording } from '@/types/recording'; +import { SimpleTable } from '@/components/SimpleTable' +import { Recording, UploadStatus } from '@/types/recording' import React, { useEffect, useState } from 'react' -import { timeStampToDateString } from '../utils'; -import { useRouter } from 'next/navigation'; +import { timeStampToDateString } from '../utils' +import { useRouter } from 'next/navigation' +import { Anchor, Button, Group, Text, Tooltip } from '@mantine/core' +import { IconInfoCircle } from '@tabler/icons-react' export const RawRecordings = () => { - const [recordings, setRecordings] = useState([]); - const router = useRouter(); + const [recordings, setRecordings] = useState([]) + const router = useRouter() function fetchRecordings() { - fetch('/api/recordings').then(res => { + fetch('/api/recordings').then((res) => { if (res.ok) { res.json().then((data) => { - setRecordings(data.recordings); - }); + setRecordings(data.recordings) + }) } }) } useEffect(() => { - fetchRecordings(); - }, []); + fetchRecordings() + }, []) function onClickRow(recording: Recording) { - return () => router.push(`/recordings/detail/?id=${recording.id}`); + return () => router.push(`/recordings/detail/?id=${recording.id}`) + } + function goToSettings() { + router.push('/settings/recording_upload') + } + + function uploadRecording( + e: React.MouseEvent, + recording_id: number + ) { + e.stopPropagation() + fetch(`/api/recordings/${recording_id}/upload`, { + method: 'POST', + }).then((res) => { + if (res.ok) { + fetchRecordings() + } + }) } return ( recording.video_start_time ? timeStampToDateString(recording.video_start_time) : 'N/A'}, - {name: 'Timestamp', accessor: (recording: Recording) => recording.timestamp ? timeStampToDateString(recording.timestamp) : 'N/A'}, - {name: 'Monitor Width/Height', accessor: (recording: Recording) => `${recording.monitor_width}/${recording.monitor_height}`}, - {name: 'Double Click Interval Seconds/Pixels', accessor: (recording: Recording) => `${recording.double_click_interval_seconds}/${recording.double_click_distance_pixels}`}, + { name: 'ID', accessor: 'id' }, + { name: 'Description', accessor: 'task_description' }, + { + name: 'Start time', + accessor: (recording: Recording) => + recording.video_start_time + ? timeStampToDateString(recording.video_start_time) + : 'N/A', + }, + { + name: 'Timestamp', + accessor: (recording: Recording) => + recording.timestamp + ? timeStampToDateString(recording.timestamp) + : 'N/A', + }, + { + name: 'Monitor Width/Height', + accessor: (recording: Recording) => + `${recording.monitor_width}/${recording.monitor_height}`, + }, + { + name: 'Double Click Interval Seconds/Pixels', + accessor: (recording: Recording) => + `${recording.double_click_interval_seconds}/${recording.double_click_distance_pixels}`, + }, + { + name: ( + + Upload to cloud + + + + + + + ), + accessor: (recording: Recording) => + recording.upload_status === UploadStatus.UPLOADED ? ( + e.stopPropagation()} + href={`/api/recordings/${recording.id}/view`} + target="_blank" + > + View + + ) : UploadStatus.UPLOADING === + recording.upload_status ? ( + 'Uploading...' + ) : ( + + ), + }, ]} data={recordings} refreshData={fetchRecordings} diff --git a/openadapt/app/dashboard/app/settings/layout.tsx b/openadapt/app/dashboard/app/settings/layout.tsx index f75d1632b..7a471af27 100644 --- a/openadapt/app/dashboard/app/settings/layout.tsx +++ b/openadapt/app/dashboard/app/settings/layout.tsx @@ -1,11 +1,12 @@ -import { Box, Flex } from "@mantine/core" -import { Navbar } from "./navbar" -import { SettingsHeader } from "./SettingsHeader" +import { Box, Flex } from '@mantine/core' +import { Navbar } from './navbar' +import { SettingsHeader } from './SettingsHeader' const routes = [ { name: 'API Keys', path: '/settings' }, { name: 'Scrubbing', path: '/settings/scrubbing' }, { name: 'Record & Replay', path: '/settings/record_and_replay' }, + { name: 'Recording Upload', path: '/settings/recording_upload' }, ] export default function RootLayout({ @@ -16,13 +17,17 @@ export default function RootLayout({ return ( - + - - {children} - + {children} ) diff --git a/openadapt/app/dashboard/app/settings/recording_upload/form.tsx b/openadapt/app/dashboard/app/settings/recording_upload/form.tsx new file mode 100644 index 000000000..3785a761c --- /dev/null +++ b/openadapt/app/dashboard/app/settings/recording_upload/form.tsx @@ -0,0 +1,93 @@ +'use client' + +import { Button, Checkbox, Flex, Grid, PasswordInput, TextInput } from '@mantine/core' +import { useForm } from '@mantine/form' +import React, { useEffect } from 'react' +import { + RecordingUploadSettings, + saveSettings, + validateRecordingUploadSettings, +} from '../utils' + +type Props = { + settings: RecordingUploadSettings +} + +export const Form = ({ settings }: Props) => { + const form = useForm({ + initialValues: JSON.parse(JSON.stringify(settings)), + validate: (values) => { + return validateRecordingUploadSettings(values) + }, + }) + + useEffect(() => { + form.setValues(JSON.parse(JSON.stringify(settings))) + form.setInitialValues(JSON.parse(JSON.stringify(settings))) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [settings]) + + function resetForm() { + form.reset() + } + + return ( + + + + + + + + + + + + + + + + + + + + + + + + ) +} diff --git a/openadapt/app/dashboard/app/settings/recording_upload/page.tsx b/openadapt/app/dashboard/app/settings/recording_upload/page.tsx new file mode 100644 index 000000000..92f68e65f --- /dev/null +++ b/openadapt/app/dashboard/app/settings/recording_upload/page.tsx @@ -0,0 +1,22 @@ +'use client' + +import React, { useEffect, useState } from 'react' +import { getSettings } from '@/app/utils' +import { Form } from './form' +import { RecordingUploadSettings } from '../utils' + +export default function RecordingUpload() { + const [settings, setSettings] = useState({ + OVERWRITE_RECORDING_DESTINATION: false, + RECORDING_PUBLIC_KEY: '', + RECORDING_PRIVATE_KEY: '', + RECORDING_BUCKET_NAME: '', + RECORDING_BUCKET_REGION: '', + }) + useEffect(() => { + getSettings('recording_upload').then( + setSettings + ) + }, []) + return
+} diff --git a/openadapt/app/dashboard/app/settings/utils.ts b/openadapt/app/dashboard/app/settings/utils.ts index 142684ea7..ce52da808 100644 --- a/openadapt/app/dashboard/app/settings/utils.ts +++ b/openadapt/app/dashboard/app/settings/utils.ts @@ -1,11 +1,10 @@ -import { UseFormReturnType } from '@mantine/form'; -import { notifications } from '@mantine/notifications'; - +import { UseFormReturnType } from '@mantine/form' +import { notifications } from '@mantine/notifications' export function validateScrubbingSettings(settings: Record) { const errors: Record = {} if (settings.SCRUB_ENABLED) { - return errors; + return errors } if (settings.SCRUB_CHAR.length === 0) { errors.SCRUB_CHAR = 'Scrubbing character is required' @@ -17,14 +16,16 @@ export function validateScrubbingSettings(settings: Record) { errors.SCRUB_LANGUAGE = 'Scrubbing language is required' } if (settings.SCRUB_LANGUAGE.length > 2) { - errors.SCRUB_LANGUAGE = 'Scrubbing language must be a two character language code' + errors.SCRUB_LANGUAGE = + 'Scrubbing language must be a two character language code' } return errors } - -export function validateRecordAndReplaySettings(settings: Record) { +export function validateRecordAndReplaySettings( + settings: Record +) { const errors: Record = {} if (settings.VIDEO_PIXEL_FORMAT.length === 0) { errors.VIDEO_PIXEL_FORMAT = 'Video pixel format is required' @@ -32,40 +33,72 @@ export function validateRecordAndReplaySettings(settings: Record return errors } - -export function saveSettings( - form: UseFormReturnType, +export function validateRecordingUploadSettings( + settings: RecordingUploadSettings ) { - return function(values: Record) { + const errors: Record = {} + if (settings.OVERWRITE_RECORDING_DESTINATION) { + if (settings.RECORDING_PUBLIC_KEY.length === 0) { + errors.RECORDING_PUBLIC_KEY = + 'Recording destination public key is required' + } + if (settings.RECORDING_PRIVATE_KEY.length === 0) { + errors.RECORDING_PRIVATE_KEY = + 'Recording destination private key is required' + } + if (settings.RECORDING_BUCKET_NAME.length === 0) { + errors.RECORDING_BUCKET_NAME = + 'Recording destination bucket name is required' + } + if (settings.RECORDING_BUCKET_REGION.length === 0) { + errors.RECORDING_BUCKET_REGION = + 'Recording destination bucket region is required' + } + } + console.log(settings) + return errors +} + +export function saveSettings(form: UseFormReturnType) { + return function (values: Record) { fetch('/api/settings', { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify(values), - }).then(resp => { - if (resp.ok) { - notifications.show({ - title: 'Settings saved', - message: 'Your settings have been saved', - color: 'green', - }); - return resp.json(); - } else { - notifications.show({ - title: 'Failed to save settings', - message: 'Please try again', - color: 'red', - }) - return null; - } - - }).then((resp) => { - if (!resp) { - return; - } - form.setInitialValues(values); - form.setDirty({}); - }); + }) + .then((resp) => { + if (resp.ok) { + notifications.show({ + title: 'Settings saved', + message: 'Your settings have been saved', + color: 'green', + }) + return resp.json() + } else { + notifications.show({ + title: 'Failed to save settings', + message: 'Please try again', + color: 'red', + }) + return null + } + }) + .then((resp) => { + if (!resp) { + return + } + form.setInitialValues(values) + form.setDirty({}) + }) } } + +export type RecordingUploadSettings = { + OVERWRITE_RECORDING_DESTINATION: boolean + RECORDING_PUBLIC_KEY: string + RECORDING_PRIVATE_KEY: string + RECORDING_BUCKET_NAME: string + RECORDING_BUCKET_REGION: string +} diff --git a/openadapt/config.defaults.json b/openadapt/config.defaults.json index ef1d15608..a1b511333 100644 --- a/openadapt/config.defaults.json +++ b/openadapt/config.defaults.json @@ -21,6 +21,11 @@ "RECORD_BROWSER_EVENTS": false, "RECORD_FULL_VIDEO": false, "RECORD_IMAGES": false, + "OVERWRITE_RECORDING_DESTINATION": false, + "RECORDING_PUBLIC_KEY": "", + "RECORDING_PRIVATE_KEY": "", + "RECORDING_BUCKET_NAME": "openadapt", + "RECORDING_BUCKET_REGION": "us-east-1", "LOG_MEMORY": false, "STOP_SEQUENCES": [ [ diff --git a/openadapt/config.py b/openadapt/config.py index dfbf4ed75..d12019e3b 100644 --- a/openadapt/config.py +++ b/openadapt/config.py @@ -29,7 +29,6 @@ VIDEO_DIR_PATH = DATA_DIR_PATH / "videos" DATABASE_FILE_PATH = (DATA_DIR_PATH / "openadapt.db").absolute() DATABASE_LOCK_FILE_PATH = DATA_DIR_PATH / "openadapt.db.lock" -RECORDING_UPLOAD_URL = "" STOP_STRS = [ "oa.stop", @@ -180,6 +179,14 @@ class SegmentationAdapter(str, Enum): # App configurations APP_DARK_MODE: bool = False + # Upload recording configurations + RECORDING_UPLOAD_URL: ClassVar[str] = "" + OVERWRITE_RECORDING_DESTINATION: bool = False + RECORDING_PUBLIC_KEY: str = "" + RECORDING_PRIVATE_KEY: str = "" + RECORDING_BUCKET_NAME: str = "openadapt" + RECORDING_BUCKET_REGION: str = "us-east-1" + # Scrubbing configurations SCRUB_ENABLED: bool = False SCRUB_CHAR: str = "*" @@ -301,6 +308,13 @@ def __setattr__(self, key: str, value: Any) -> None: "UNIQUE_USER_ID", "REDIRECT_TO_ONBOARDING", ], + "recording_upload": [ + "OVERWRITE_RECORDING_DESTINATION", + "RECORDING_PUBLIC_KEY", + "RECORDING_PRIVATE_KEY", + "RECORDING_BUCKET_NAME", + "RECORDING_BUCKET_REGION", + ], } diff --git a/openadapt/db/crud.py b/openadapt/db/crud.py index 3ebf8a152..60ad88266 100644 --- a/openadapt/db/crud.py +++ b/openadapt/db/crud.py @@ -20,8 +20,8 @@ from openadapt.db.db import Session, get_read_only_session_maker from openadapt.models import ( ActionEvent, - BrowserEvent, AudioInfo, + BrowserEvent, MemoryStat, PerformanceStat, Recording, @@ -918,6 +918,31 @@ def mark_scrubbing_complete(session: SaSession, scrubbed_recording_id: int) -> N session.commit() +def start_uploading_recording(session: SaSession, recording_id: int) -> None: + """Mark a recording as being uploaded.""" + session.query(Recording).filter(Recording.id == recording_id).update( + {"upload_status": Recording.UploadStatus.UPLOADING} + ) + session.commit() + + +def mark_uploading_complete( + session: SaSession, + recording_id: int, + uploaded_key: str, + uploaded_to_custom_bucket: bool, +) -> None: + """Mark a recording as being uploaded.""" + session.query(Recording).filter(Recording.id == recording_id).update( + { + "upload_status": Recording.UploadStatus.UPLOADED, + "uploaded_key": uploaded_key, + "uploaded_to_custom_bucket": uploaded_to_custom_bucket, + } + ) + session.commit() + + def acquire_db_lock(timeout: int = 60) -> bool: """Check if the database is locked. diff --git a/openadapt/db/db.py b/openadapt/db/db.py index 2fd827e05..4d14438b5 100644 --- a/openadapt/db/db.py +++ b/openadapt/db/db.py @@ -219,4 +219,6 @@ def export_recording(recording_id: int) -> str: target_engine = create_engine(target_db_url, future=True) db_file_path = copy_recording_data(engine, target_engine, recording_id) + # close the target engine + target_engine.dispose() return db_file_path diff --git a/openadapt/models.py b/openadapt/models.py index 131652f13..275061c64 100644 --- a/openadapt/models.py +++ b/openadapt/models.py @@ -5,6 +5,7 @@ from itertools import zip_longest from typing import Any, Type, Union import copy +import enum import io import sys @@ -99,6 +100,15 @@ class Recording(db.Base): "AudioInfo", back_populates="recording", cascade="all, delete-orphan" ) + class UploadStatus(enum.Enum): + NOT_UPLOADED = "not_uploaded" + UPLOADING = "uploading" + UPLOADED = "uploaded" + + upload_status = sa.Column(sa.Enum(UploadStatus), default=UploadStatus.NOT_UPLOADED) + uploaded_key = sa.Column(sa.String) + uploaded_to_custom_bucket = sa.Column(sa.Boolean, default=False) + _processed_action_events = None @property @@ -120,6 +130,12 @@ def scrub(self, scrubber: ScrubbingProvider) -> None: """ self.task_description = scrubber.scrub_text(self.task_description) + def asdict(self) -> dict: + """Get a dictionary representation of the recording.""" + ret = super().asdict() + ret["upload_status"] = ret["upload_status"].value + return ret + class ActionEvent(db.Base): """Class representing an action event in the database.""" diff --git a/openadapt/share.py b/openadapt/share.py index b30ac9b65..d7ee3f721 100644 --- a/openadapt/share.py +++ b/openadapt/share.py @@ -18,7 +18,7 @@ import fire from openadapt import db, utils -from openadapt.config import RECORDING_DIR_PATH +from openadapt.config import RECORDING_DIR_PATH, config from openadapt.db import crud from openadapt.video import get_video_file_path @@ -124,7 +124,6 @@ def send_recording(recording_id: int) -> None: recording_id (int): The ID of the recording to send. """ zip_file_path = export_recording_to_folder(recording_id) - print(zip_file_path) assert zip_file_path, zip_file_path try: @@ -188,17 +187,19 @@ def _inner() -> None: zip_file_path = export_recording_to_folder(recording_id) # Upload the zip file to the S3 bucket - utils.upload_file_to_s3( - zip_file_path, - { - "user_id": user_id, - }, - ) + key = utils.upload_file_to_s3(zip_file_path) # Delete the zip file after uploading if os.path.exists(zip_file_path): os.remove(zip_file_path) logger.info(f"deleted {zip_file_path=}") + with crud.get_new_session(read_and_write=True) as session: + crud.mark_uploading_complete( + session, + recording_id, + uploaded_key=key, + uploaded_to_custom_bucket=config.OVERWRITE_RECORDING_DESTINATION, + ) except Exception as exc: logger.exception(exc) diff --git a/openadapt/utils.py b/openadapt/utils.py index 64654d38c..35db0cf0f 100644 --- a/openadapt/utils.py +++ b/openadapt/utils.py @@ -7,6 +7,7 @@ from io import BytesIO from logging import StreamHandler from typing import Any, Callable +from uuid import uuid4 import ast import base64 import importlib.metadata @@ -16,10 +17,12 @@ import threading import time +from botocore.config import Config from bs4 import BeautifulSoup from jinja2 import Environment, FileSystemLoader from PIL import Image, ImageEnhance from posthog import Posthog +import boto3 import requests from openadapt.build_utils import is_running_from_executable, redirect_stdout_stderr @@ -45,7 +48,6 @@ PERFORMANCE_PLOTS_DIR_PATH, POSTHOG_HOST, POSTHOG_PUBLIC_KEY, - RECORDING_UPLOAD_URL, config, ) from openadapt.custom_logger import filter_log_messages @@ -948,21 +950,49 @@ def get_posthog_instance() -> DistinctIDPosthog: return posthog -def upload_file_to_s3(file_path: str, body: dict) -> requests.Response: +def upload_file_to_s3(file_path: str) -> requests.Response: """Upload a file to an S3 bucket. Args: file_path (str): The path to the file to upload. - body (dict): The body of the request. """ filename = os.path.basename(file_path) - resp = requests.post(RECORDING_UPLOAD_URL, json=body) - upload_url = resp.json()["url"] + key = f"recordings/{config.UNIQUE_USER_ID}/{uuid4()}.zip" + if not config.OVERWRITE_RECORDING_DESTINATION: + resp = requests.post( + config.RECORDING_UPLOAD_URL, + json={ + "key": key, + "lambda_function": "get_presigned_url", + "client_method": "put_object", + }, + ) + upload_url = resp.json()["url"] + else: + # create a presigned url + s3 = boto3.client( + "s3", + config=Config(signature_version="s3v4"), + region_name=config.RECORDING_BUCKET_REGION, + endpoint_url=f"https://s3.{config.RECORDING_BUCKET_REGION}.amazonaws.com", + aws_access_key_id=config.RECORDING_PUBLIC_KEY, + aws_secret_access_key=config.RECORDING_PRIVATE_KEY, + ) + + ONE_HOUR_IN_SECONDS = 3600 + upload_url = s3.generate_presigned_url( + ClientMethod="put_object", + Params={ + "Bucket": config.RECORDING_BUCKET_NAME, + "Key": key, + }, + ExpiresIn=ONE_HOUR_IN_SECONDS, + ) with open(file_path, "rb") as file: files = {"file": (filename, file)} resp = requests.put(upload_url, files=files) - return resp + return key def retry_with_exceptions(max_retries: int = 5) -> Callable: @@ -1072,5 +1102,32 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any: return +def get_recording_url(uploaded_key: str, uploaded_to_custom_bucket: bool) -> str: + """Get the URL of a recording.""" + if not uploaded_key: + return None + if uploaded_to_custom_bucket: + s3 = boto3.client( + "s3", + region_name=config.RECORDING_BUCKET_REGION, + aws_access_key_id=config.RECORDING_PUBLIC_KEY, + aws_secret_access_key=config.RECORDING_PRIVATE_KEY, + ) + return s3.generate_presigned_url( + ClientMethod="get_object", + Params={"Bucket": config.RECORDING_BUCKET_NAME, "Key": uploaded_key}, + ) + else: + resp = requests.post( + config.RECORDING_UPLOAD_URL, + json={ + "key": uploaded_key, + "lambda_function": "get_presigned_url", + "client_method": "get_object", + }, + ) + return resp.json()["url"] + + if __name__ == "__main__": fire.Fire(get_functions(__name__)) From f673cb890712277078c7e888ac433200365f3b45 Mon Sep 17 00:00:00 2001 From: Shohan Dutta Roy Date: Mon, 11 Nov 2024 00:58:27 +0530 Subject: [PATCH 10/13] fix: Linting --- openadapt/app/dashboard/api.ts | 7 +- openadapt/app/dashboard/app/layout.tsx | 8 +- .../app/dashboard/app/onboarding/page.tsx | 10 +- openadapt/app/dashboard/app/providers.tsx | 30 +-- .../app/recordings/ScrubbedRecordings.tsx | 43 ++-- .../dashboard/app/recordings/detail/page.tsx | 152 +++++++------ .../app/dashboard/app/recordings/page.tsx | 57 +++-- openadapt/app/dashboard/app/routes.ts | 2 +- .../app/scrubbing/ScrubbingUpdates.tsx | 138 +++++++---- .../app/dashboard/app/scrubbing/page.tsx | 148 ++++++------ .../app/settings/(api_keys)/form.tsx | 58 +++-- .../app/settings/(api_keys)/page.tsx | 18 +- .../dashboard/app/settings/SettingsHeader.tsx | 14 +- .../app/dashboard/app/settings/navbar.tsx | 14 +- .../app/settings/record_and_replay/form.tsx | 66 ++++-- .../app/settings/record_and_replay/page.tsx | 18 +- .../dashboard/app/settings/scrubbing/form.tsx | 97 +++++--- .../dashboard/app/settings/scrubbing/page.tsx | 18 +- openadapt/app/dashboard/app/utils.ts | 12 +- .../components/ActionEvent/ActionEvent.tsx | 215 +++++++++++++----- .../components/ActionEvent/ActionEvents.tsx | 48 ++-- .../ActionEvent/RemoveActionEvent.tsx | 51 +++-- .../components/ActionEvent/Screenshots.tsx | 73 +++--- .../components/ActionEvent/index.tsx | 2 +- .../dashboard/components/Navbar/Navbar.tsx | 4 +- .../components/Onboarding/steps/BookACall.tsx | 26 ++- .../Onboarding/steps/RegisterForUpdates.tsx | 9 +- .../components/Onboarding/steps/Tutorial.tsx | 116 ++++++---- .../RecordingDetails/RecordingDetails.tsx | 58 +++-- .../components/RecordingDetails/index.tsx | 2 +- .../app/dashboard/components/Shell/Shell.tsx | 4 +- .../components/SimpleTable/SimpleTable.tsx | 79 ++++--- .../components/SimpleTable/index.tsx | 2 +- openadapt/app/dashboard/index.js | 20 +- openadapt/app/dashboard/next.config.js | 2 +- openadapt/app/dashboard/types/action-event.ts | 62 ++--- openadapt/app/dashboard/types/recording.ts | 42 ++-- openadapt/app/dashboard/types/scrubbing.ts | 22 +- 38 files changed, 1052 insertions(+), 695 deletions(-) diff --git a/openadapt/app/dashboard/api.ts b/openadapt/app/dashboard/api.ts index dcc6502d6..ef535703e 100644 --- a/openadapt/app/dashboard/api.ts +++ b/openadapt/app/dashboard/api.ts @@ -1,3 +1,6 @@ -export async function get(url: string, options: Partial = {}): Promise { - return fetch(url, options).then((res) => res.json()); +export async function get( + url: string, + options: Partial = {} +): Promise { + return fetch(url, options).then((res) => res.json()) } diff --git a/openadapt/app/dashboard/app/layout.tsx b/openadapt/app/dashboard/app/layout.tsx index 3cd56c206..41d62c259 100644 --- a/openadapt/app/dashboard/app/layout.tsx +++ b/openadapt/app/dashboard/app/layout.tsx @@ -1,9 +1,9 @@ import './globals.css' import { ColorSchemeScript, MantineProvider } from '@mantine/core' -import { Notifications } from '@mantine/notifications'; +import { Notifications } from '@mantine/notifications' import { Shell } from '@/components/Shell' -import { CSPostHogProvider } from './providers'; +import { CSPostHogProvider } from './providers' export const metadata = { title: 'OpenAdapt.AI', @@ -23,9 +23,7 @@ export default function RootLayout({ - - {children} - + {children} diff --git a/openadapt/app/dashboard/app/onboarding/page.tsx b/openadapt/app/dashboard/app/onboarding/page.tsx index d4a3ecdde..fe49955ff 100644 --- a/openadapt/app/dashboard/app/onboarding/page.tsx +++ b/openadapt/app/dashboard/app/onboarding/page.tsx @@ -1,7 +1,7 @@ -import { BookACall } from "@/components/Onboarding/steps/BookACall"; -import { RegisterForUpdates } from "@/components/Onboarding/steps/RegisterForUpdates"; -import { Tutorial } from "@/components/Onboarding/steps/Tutorial"; -import { Box, Divider } from "@mantine/core"; +import { BookACall } from '@/components/Onboarding/steps/BookACall' +import { RegisterForUpdates } from '@/components/Onboarding/steps/RegisterForUpdates' +import { Tutorial } from '@/components/Onboarding/steps/Tutorial' +import { Box, Divider } from '@mantine/core' export default function Onboarding() { return ( @@ -12,5 +12,5 @@ export default function Onboarding() { - ); + ) } diff --git a/openadapt/app/dashboard/app/providers.tsx b/openadapt/app/dashboard/app/providers.tsx index 784d4092c..4d42be3ec 100644 --- a/openadapt/app/dashboard/app/providers.tsx +++ b/openadapt/app/dashboard/app/providers.tsx @@ -6,23 +6,23 @@ import { useEffect } from 'react' import { getSettings } from './utils' if (typeof window !== 'undefined') { - if (process.env.NEXT_PUBLIC_MODE !== "development") { - posthog.init(process.env.NEXT_PUBLIC_POSTHOG_PUBLIC_KEY as string, { - api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST, - }) - } + if (process.env.NEXT_PUBLIC_MODE !== 'development') { + posthog.init(process.env.NEXT_PUBLIC_POSTHOG_PUBLIC_KEY as string, { + api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST, + }) + } } export function CSPostHogProvider({ children }: { children: React.ReactNode }) { - useEffect(() => { - if (process.env.NEXT_PUBLIC_MODE !== "development") { - getSettings().then((settings) => { - posthog.identify(settings['UNIQUE_USER_ID']) - }) + useEffect(() => { + if (process.env.NEXT_PUBLIC_MODE !== 'development') { + getSettings().then((settings) => { + posthog.identify(settings['UNIQUE_USER_ID']) + }) + } + }, []) + if (process.env.NEXT_PUBLIC_MODE === 'development') { + return <>{children} } - }, []) - if (process.env.NEXT_PUBLIC_MODE === "development") { - return <>{children}; - } - return {children} + return {children} } diff --git a/openadapt/app/dashboard/app/recordings/ScrubbedRecordings.tsx b/openadapt/app/dashboard/app/recordings/ScrubbedRecordings.tsx index 7558420dc..2654ea75d 100644 --- a/openadapt/app/dashboard/app/recordings/ScrubbedRecordings.tsx +++ b/openadapt/app/dashboard/app/recordings/ScrubbedRecordings.tsx @@ -1,37 +1,50 @@ -import { SimpleTable } from '@/components/SimpleTable'; -import { Recording, ScrubbedRecording } from '@/types/recording'; -import { useRouter } from 'next/navigation'; +import { SimpleTable } from '@/components/SimpleTable' +import { Recording, ScrubbedRecording } from '@/types/recording' +import { useRouter } from 'next/navigation' import React, { useEffect, useState } from 'react' export const ScrubbedRecordings = () => { - const [recordings, setRecordings] = useState([]); - const router = useRouter(); + const [recordings, setRecordings] = useState([]) + const router = useRouter() function fetchScrubbedRecordings() { - fetch('/api/recordings/scrubbed').then(res => { + fetch('/api/recordings/scrubbed').then((res) => { if (res.ok) { res.json().then((data) => { - setRecordings(data.recordings); - }); + setRecordings(data.recordings) + }) } }) } function onClickRow(recording: ScrubbedRecording) { - return () => router.push(`/recordings/detail/?id=${recording.recording_id}`); + return () => + router.push(`/recordings/detail/?id=${recording.recording_id}`) } useEffect(() => { - fetchScrubbedRecordings(); - }, []); + fetchScrubbedRecordings() + }, []) return ( recording.recording_id}, - {name: 'Description', accessor: (recording: ScrubbedRecording) => recording.recording.task_description}, - {name: 'Provider', accessor: 'provider'}, - {name: 'Original Recording', accessor: (recording: ScrubbedRecording) => recording.original_recording.task_description}, + { + name: 'ID', + accessor: (recording: ScrubbedRecording) => + recording.recording_id, + }, + { + name: 'Description', + accessor: (recording: ScrubbedRecording) => + recording.recording.task_description, + }, + { name: 'Provider', accessor: 'provider' }, + { + name: 'Original Recording', + accessor: (recording: ScrubbedRecording) => + recording.original_recording.task_description, + }, ]} data={recordings} refreshData={fetchScrubbedRecordings} diff --git a/openadapt/app/dashboard/app/recordings/detail/page.tsx b/openadapt/app/dashboard/app/recordings/detail/page.tsx index d83000857..329a282b0 100644 --- a/openadapt/app/dashboard/app/recordings/detail/page.tsx +++ b/openadapt/app/dashboard/app/recordings/detail/page.tsx @@ -1,86 +1,94 @@ -'use client'; +'use client' -import { ActionEvents } from "@/components/ActionEvent/ActionEvents"; -import { RecordingDetails } from "@/components/RecordingDetails"; -import { ActionEvent as ActionEventType } from "@/types/action-event"; -import { Recording as RecordingType } from "@/types/recording"; -import { Box, Button, Grid, Loader, Progress } from "@mantine/core"; -import { notifications } from "@mantine/notifications"; -import { useSearchParams } from "next/navigation"; -import { Suspense, useEffect, useState } from "react"; +import { ActionEvents } from '@/components/ActionEvent/ActionEvents' +import { RecordingDetails } from '@/components/RecordingDetails' +import { ActionEvent as ActionEventType } from '@/types/action-event' +import { Recording as RecordingType } from '@/types/recording' +import { Box, Button, Grid, Loader, Progress } from '@mantine/core' +import { notifications } from '@mantine/notifications' +import { useSearchParams } from 'next/navigation' +import { Suspense, useEffect, useState } from 'react' function Recording() { - const searchParams = useSearchParams(); - const id = searchParams.get("id"); + const searchParams = useSearchParams() + const id = searchParams.get('id') const [recordingInfo, setRecordingInfo] = useState<{ - recording: RecordingType, - action_events: ActionEventType[], - num_events: number, - }>(); + recording: RecordingType + action_events: ActionEventType[] + num_events: number + }>() useEffect(() => { if (!id) { - return; + return } - const websocket = new WebSocket(`ws://${window.location.host}/api/recordings/${id}`); + const websocket = new WebSocket( + `ws://${window.location.host}/api/recordings/${id}` + ) websocket.onmessage = (event) => { - const data = JSON.parse(event.data); - if (data.type === "recording") { - setRecordingInfo(prev => { + const data = JSON.parse(event.data) + if (data.type === 'recording') { + setRecordingInfo((prev) => { if (!prev) { return { - "recording": data.value, - "action_events": [], - "num_events": 0, + recording: data.value, + action_events: [], + num_events: 0, } } - return prev; - }); - } else if (data.type === "action_event") { - setRecordingInfo(prev => { - if (!prev) return prev; + return prev + }) + } else if (data.type === 'action_event') { + setRecordingInfo((prev) => { + if (!prev) return prev return { ...prev, - "action_events": [...prev.action_events, modifyActionEvent(data.value, prev.recording.original_recording_id === null)], + action_events: [ + ...prev.action_events, + modifyActionEvent( + data.value, + prev.recording.original_recording_id === null + ), + ], } - }); - } else if (data.type === "num_events") { - setRecordingInfo(prev => { - if (!prev) return prev; + }) + } else if (data.type === 'num_events') { + setRecordingInfo((prev) => { + if (!prev) return prev return { ...prev, - "num_events": data.value, + num_events: data.value, } - }); + }) } } return () => { - websocket.close(); + websocket.close() } - }, [id]); + }, [id]) function uploadRecording() { fetch(`/api/recordings/${id}/upload`, { - method: "POST", - }).then(resp => { + method: 'POST', + }).then((resp) => { if (resp.status === 200) { notifications.show({ - title: "Recording uploaded", - message: "Recording has been uploaded successfully", - color: "teal", - }); + title: 'Recording uploaded', + message: 'Recording has been uploaded successfully', + color: 'teal', + }) } else { notifications.show({ - title: "Error", - message: "Failed to upload recording", - color: "red", - }); + title: 'Error', + message: 'Failed to upload recording', + color: 'red', + }) } - }); + }) } if (!recordingInfo) { - return ; + return } - const actionEvents = recordingInfo.action_events; + const actionEvents = recordingInfo.action_events return ( @@ -92,31 +100,46 @@ function Recording() { - {actionEvents.length && actionEvents.length < recordingInfo.num_events && ( - - - Loading events {actionEvents.length}/{recordingInfo.num_events} - - - )} + {actionEvents.length && + actionEvents.length < recordingInfo.num_events && ( + + + + Loading events {actionEvents.length}/ + {recordingInfo.num_events} + + + + )} ) } -function modifyActionEvent(actionEvent: ActionEventType, isOriginal: boolean): ActionEventType { - let children = actionEvent.children; +function modifyActionEvent( + actionEvent: ActionEventType, + isOriginal: boolean +): ActionEventType { + let children = actionEvent.children if (actionEvent.children) { - children = actionEvent.children.map(child => modifyActionEvent(child, isOriginal)); + children = actionEvent.children.map((child) => + modifyActionEvent(child, isOriginal) + ) } - let id = actionEvent.id; - let isComputed = false; + let id = actionEvent.id + let isComputed = false if (!id) { // this is usually the case, when new events like 'singleclick' // or 'doubleclick' are created while merging several events together, // but they are not saved in the database - id = crypto.randomUUID(); - isComputed = true; + id = crypto.randomUUID() + isComputed = true } return { ...actionEvent, @@ -127,7 +150,6 @@ function modifyActionEvent(actionEvent: ActionEventType, isOriginal: boolean): A } } - export default function RecordingPage() { return ( diff --git a/openadapt/app/dashboard/app/recordings/page.tsx b/openadapt/app/dashboard/app/recordings/page.tsx index 901444f76..08e14b3bc 100644 --- a/openadapt/app/dashboard/app/recordings/page.tsx +++ b/openadapt/app/dashboard/app/recordings/page.tsx @@ -1,53 +1,54 @@ -'use client'; - -import { Box, Button, Tabs } from "@mantine/core"; -import { useEffect, useState } from "react"; -import { RecordingStatus } from "@/types/recording"; -import { ScrubbedRecordings } from "./ScrubbedRecordings"; -import { RawRecordings } from "./RawRecordings"; +'use client' +import { Box, Button, Tabs } from '@mantine/core' +import { useEffect, useState } from 'react' +import { RecordingStatus } from '@/types/recording' +import { ScrubbedRecordings } from './ScrubbedRecordings' +import { RawRecordings } from './RawRecordings' export default function Recordings() { - const [recordingStatus, setRecordingStatus] = useState(RecordingStatus.UNKNOWN); + const [recordingStatus, setRecordingStatus] = useState( + RecordingStatus.UNKNOWN + ) function startRecording() { if (recordingStatus === RecordingStatus.RECORDING) { - return; + return } - fetch('/api/recordings/start').then(res => { + fetch('/api/recordings/start').then((res) => { if (res.ok) { - setRecordingStatus(RecordingStatus.RECORDING); + setRecordingStatus(RecordingStatus.RECORDING) } - }); + }) } function stopRecording() { if (recordingStatus === RecordingStatus.STOPPED) { - return; + return } - setRecordingStatus(RecordingStatus.UNKNOWN); - fetch('/api/recordings/stop').then(res => { + setRecordingStatus(RecordingStatus.UNKNOWN) + fetch('/api/recordings/stop').then((res) => { if (res.ok) { - setRecordingStatus(RecordingStatus.STOPPED); + setRecordingStatus(RecordingStatus.STOPPED) } - }); + }) } function fetchRecordingStatus() { - fetch('/api/recordings/status').then(res => { + fetch('/api/recordings/status').then((res) => { if (res.ok) { res.json().then((data) => { if (data.recording) { - setRecordingStatus(RecordingStatus.RECORDING); + setRecordingStatus(RecordingStatus.RECORDING) } else { - setRecordingStatus(RecordingStatus.STOPPED); + setRecordingStatus(RecordingStatus.STOPPED) } - }); + }) } - }); + }) } useEffect(() => { - fetchRecordingStatus(); - }, []); + fetchRecordingStatus() + }, []) return ( {recordingStatus === RecordingStatus.RECORDING && ( @@ -67,12 +68,8 @@ export default function Recordings() { )} - - Recordings - - - Scrubbed recordings - + Recordings + Scrubbed recordings diff --git a/openadapt/app/dashboard/app/routes.ts b/openadapt/app/dashboard/app/routes.ts index b0ef5f0d0..adaf0e422 100644 --- a/openadapt/app/dashboard/app/routes.ts +++ b/openadapt/app/dashboard/app/routes.ts @@ -19,5 +19,5 @@ export const routes: Route[] = [ { name: 'Onboarding', path: '/onboarding', - } + }, ] diff --git a/openadapt/app/dashboard/app/scrubbing/ScrubbingUpdates.tsx b/openadapt/app/dashboard/app/scrubbing/ScrubbingUpdates.tsx index 094ecc1a1..720e9b208 100644 --- a/openadapt/app/dashboard/app/scrubbing/ScrubbingUpdates.tsx +++ b/openadapt/app/dashboard/app/scrubbing/ScrubbingUpdates.tsx @@ -1,56 +1,96 @@ -import { ScrubbingUpdate } from '@/types/scrubbing'; -import { Box, Button, Container, Progress, Stack, Text } from '@mantine/core'; +import { ScrubbingUpdate } from '@/types/scrubbing' +import { Box, Button, Container, Progress, Stack, Text } from '@mantine/core' import React from 'react' type Props = { - data?: ScrubbingUpdate; - resetScrubbingStatus: () => void; + data?: ScrubbingUpdate + resetScrubbingStatus: () => void } export const ScrubbingUpdates = ({ data, resetScrubbingStatus }: Props) => { - if (!data) { - return null; - } - const isScrubbingComplete = ( - data.total_action_events > 0 && - data.total_window_events > 0 && - data.total_screenshots > 0 && - data.num_action_events_scrubbed === data.total_action_events && - data.num_window_events_scrubbed === data.total_window_events && - data.num_screenshots_scrubbed === data.total_screenshots - ) || data.error; - return ( - - - Scrubbing updates for recording {data.recording.task_description} - Provider: {data.provider} - {data.copying_recording ? ( - Copying recording (this may take a while if Spacy dependencies need to be downloaded on the first run)... - ) : data.error ? ( - {data.error} - ) : ( - - - - {data.num_action_events_scrubbed} / {data.total_action_events} action events scrubbed - - - - {data.num_window_events_scrubbed} / {data.total_window_events} window events scrubbed - - - - {data.num_screenshots_scrubbed} / {data.total_screenshots} screenshots scrubbed - - - )} - {isScrubbingComplete && ( - - Scrubbing complete! - - - )} - - - ) + if (!data) { + return null + } + const isScrubbingComplete = + (data.total_action_events > 0 && + data.total_window_events > 0 && + data.total_screenshots > 0 && + data.num_action_events_scrubbed === data.total_action_events && + data.num_window_events_scrubbed === data.total_window_events && + data.num_screenshots_scrubbed === data.total_screenshots) || + data.error + return ( + + + + Scrubbing updates for recording{' '} + {data.recording.task_description} + + Provider: {data.provider} + {data.copying_recording ? ( + + Copying recording (this may take a while if Spacy + dependencies need to be downloaded on the first run)... + + ) : data.error ? ( + {data.error} + ) : ( + + + + + {data.num_action_events_scrubbed} /{' '} + {data.total_action_events} action events + scrubbed + + + + + + {data.num_window_events_scrubbed} /{' '} + {data.total_window_events} window events + scrubbed + + + + + + {data.num_screenshots_scrubbed} /{' '} + {data.total_screenshots} screenshots scrubbed + + + + )} + {isScrubbingComplete && ( + + Scrubbing complete! + + + )} + + + ) } diff --git a/openadapt/app/dashboard/app/scrubbing/page.tsx b/openadapt/app/dashboard/app/scrubbing/page.tsx index aa33bc1f0..ae230558a 100644 --- a/openadapt/app/dashboard/app/scrubbing/page.tsx +++ b/openadapt/app/dashboard/app/scrubbing/page.tsx @@ -1,20 +1,23 @@ -'use client'; - -import { Recording } from "@/types/recording"; -import { ScrubbingStatus, ScrubbingUpdate } from "@/types/scrubbing"; -import { Box, Button, Container, Grid, Select, Text } from "@mantine/core"; -import { isNotEmpty, useForm } from "@mantine/form"; -import { notifications } from "@mantine/notifications"; -import { useEffect, useState } from "react"; -import { ScrubbingUpdates } from "./ScrubbingUpdates"; -import Link from "next/link"; +'use client' +import { Recording } from '@/types/recording' +import { ScrubbingStatus, ScrubbingUpdate } from '@/types/scrubbing' +import { Box, Button, Container, Grid, Select, Text } from '@mantine/core' +import { isNotEmpty, useForm } from '@mantine/form' +import { notifications } from '@mantine/notifications' +import { useEffect, useState } from 'react' +import { ScrubbingUpdates } from './ScrubbingUpdates' +import Link from 'next/link' export default function Recordings() { const [recordings, setRecordings] = useState([]) - const [scrubbingProviders, setScrubbingProviders] = useState>({}); - const [scrubbingStatus, setScrubbingStatus] = useState(ScrubbingStatus.UNKNOWN); - const [scrubbingUpdate, setScrubbingUpdate] = useState(); + const [scrubbingProviders, setScrubbingProviders] = useState< + Record + >({}) + const [scrubbingStatus, setScrubbingStatus] = useState( + ScrubbingStatus.UNKNOWN + ) + const [scrubbingUpdate, setScrubbingUpdate] = useState() const scrubbingForm = useForm({ initialValues: { recordingId: '', @@ -23,82 +26,87 @@ export default function Recordings() { validate: { recordingId: isNotEmpty('Recording is required'), providerId: isNotEmpty('Provider is required'), - } - }); + }, + }) function fetchRecordings() { - fetch('/api/recordings').then(res => { + fetch('/api/recordings').then((res) => { if (res.ok) { res.json().then((data) => { - setRecordings(data.recordings); - }); + setRecordings(data.recordings) + }) } }) } function fetchScrubbingProviders() { - fetch('/api/scrubbing/providers').then(res => { + fetch('/api/scrubbing/providers').then((res) => { if (res.ok) { res.json().then((data) => { - setScrubbingProviders(data); - }); + setScrubbingProviders(data) + }) } - }); + }) } function fetchScrubbingStatus() { - fetch('/api/scrubbing/status').then(res => { + fetch('/api/scrubbing/status').then((res) => { if (res.ok) { res.json().then((data) => { if (data.status) { - setScrubbingStatus(ScrubbingStatus.SCRUBBING); - fetchScrubbingUpdates(); + setScrubbingStatus(ScrubbingStatus.SCRUBBING) + fetchScrubbingUpdates() } else { - setScrubbingStatus(ScrubbingStatus.STOPPED); + setScrubbingStatus(ScrubbingStatus.STOPPED) } - }); + }) } - }); + }) } - function scrubRecording(values: { recordingId: string, providerId: string}) { - setScrubbingStatus(ScrubbingStatus.UNKNOWN); - const { recordingId, providerId } = values; + function scrubRecording(values: { + recordingId: string + providerId: string + }) { + setScrubbingStatus(ScrubbingStatus.UNKNOWN) + const { recordingId, providerId } = values fetch(`/api/scrubbing/scrub/${recordingId}/${providerId}`, { method: 'POST', - }).then(res => res.json()).then((data) => { - if (data.status === "success") { - setScrubbingStatus(ScrubbingStatus.SCRUBBING); - } else { - notifications.show({ - title: 'Error while scrubbing recording', - message: data.message, - color: 'red', - }); - } - fetchScrubbingStatus(); - }); + }) + .then((res) => res.json()) + .then((data) => { + if (data.status === 'success') { + setScrubbingStatus(ScrubbingStatus.SCRUBBING) + } else { + notifications.show({ + title: 'Error while scrubbing recording', + message: data.message, + color: 'red', + }) + } + fetchScrubbingStatus() + }) } async function fetchScrubbingUpdates() { - fetch('/api/scrubbing/updates').then(res => { - const reader = res.body?.getReader(); - const textDecoder = new TextDecoder('utf-8'); - setScrubbingStatus(ScrubbingStatus.SCRUBBING); + fetch('/api/scrubbing/updates').then((res) => { + const reader = res.body?.getReader() + const textDecoder = new TextDecoder('utf-8') + setScrubbingStatus(ScrubbingStatus.SCRUBBING) reader?.read().then(function processText({ done, value }) { if (done) { - return; + return } - const data = JSON.parse(textDecoder.decode(value)); - setScrubbingUpdate(data); - reader?.read().then(processText); - }); - }); + const data = JSON.parse(textDecoder.decode(value)) + setScrubbingUpdate(data) + reader?.read().then(processText) + }) + }) } useEffect(() => { - fetchRecordings(); - fetchScrubbingProviders(); - fetchScrubbingStatus(); - }, []); + fetchRecordings() + fetchScrubbingProviders() + fetchScrubbingStatus() + }, []) function resetScrubbingStatus() { - fetchScrubbingStatus(); + fetchScrubbingStatus() } return ( @@ -106,7 +114,10 @@ export default function Recordings() { {scrubbingStatus === ScrubbingStatus.UNKNOWN ? ( Checking scrubbing status... ) : scrubbingStatus === ScrubbingStatus.SCRUBBING ? ( - + ) : ( @@ -123,22 +134,29 @@ export default function Recordings() {