diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 9b5261c8..58160004 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -9,7 +9,7 @@ on: release: types: ['published'] push: - branches: [ "dockerify", "testnet_pretask" ] + branches: [ "dockerify", "testnet_pretask", "testnet_5_pairs", "zkevm_quests", "eth_uniswapv2"] # Publish semver tags as releases. tags: [ 'v*.*.*' ] pull_request: @@ -37,6 +37,8 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v3 + with: + submodules: recursive # Install the cosign tool except on PR # https://github.com/sigstore/cosign-installer diff --git a/.gitignore b/.gitignore index 8dcc3d73..1820a719 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ uniswapTokenData **/*aggregator.json config/*settings.json **/*.backup +.vscode/* diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..2f2c77d9 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "config"] + path = config + url = https://github.com/PowerLoom/snapshotter-configs/ +[submodule "snapshotter/modules/computes"] + path = snapshotter/modules/computes + url = https://github.com/PowerLoom/snapshotter-computes/ diff --git a/README.md b/README.md index 6ca17e67..efbd93fa 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,22 @@ ## Table of Contents - [Table of Contents](#table-of-contents) - [Overview](#overview) + - [Architecture](#architecture) - [Setup](#setup) - [State transitions and data composition](#state-transitions-and-data-composition) - [Epoch Generation](#epoch-generation) - [Preloading](#preloading) - [Base Snapshot Generation](#base-snapshot-generation) + - [Bulk Mode](#bulk-mode) + - [Data source signaling](#data-source-signaling) - [Snapshot Finalization](#snapshot-finalization) + - [Epoch processing state transitions](#epoch-processing-state-transitions) + - [`EPOCH_RELEASED`](#epoch_released) + - [`PRELOAD`](#preload) + - [`SNAPSHOT_BUILD`](#snapshot_build) + - [`SNAPSHOT_SUBMIT_PAYLOAD_COMMIT`](#snapshot_submit_payload_commit) + - [`RELAYER_SEND`](#relayer_send) + - [`SNAPSHOT_FINALIZE`](#snapshot_finalize) - [Aggregation and data composition - snapshot generation of higher-order data points on base snapshots](#aggregation-and-data-composition---snapshot-generation-of-higher-order-data-points-on-base-snapshots) - [Major Components](#major-components) - [System Event Detector](#system-event-detector) @@ -16,26 +26,35 @@ - [Callback Workers](#callback-workers) - [RPC Helper](#rpc-helper) - [Core API](#core-api) -- [Development Instructions](#development-instructions) +- [Development setup and instructions](#development-setup-and-instructions) - [Configuration](#configuration) - [Monitoring and Debugging](#monitoring-and-debugging) + - [Internal Snapshotter APIs](#internal-snapshotter-apis) + - [`GET /internal/snapshotter/epochProcessingStatus`](#get-internalsnapshotterepochprocessingstatus) + - [`GET /internal/snapshotter/status`](#get-internalsnapshotterstatus) + - [`GET /internal/snapshotter/status/{project_id}`](#get-internalsnapshotterstatusproject_id) + - [`GET /internal/snapshotter/status/{project_id}?data=true`](#get-internalsnapshotterstatusproject_iddatatrue) - [For Contributors](#for-contributors) -- [Pooler: Case study and extending this implementation](#pooler-case-study-and-extending-this-implementation) -- [Extending pooler with a Uniswap v2 data point](#extending-pooler-with-a-uniswap-v2-data-point) - - [Step 1. Review: Base snapshot extraction logic for trade information](#step-1-review-base-snapshot-extraction-logic-for-trade-information) - - [Step 2. Review: 24 hour aggregate of trade volume snapshots over a single pair contract](#step-2-review-24-hour-aggregate-of-trade-volume-snapshots-over-a-single-pair-contract) - - [Step 3. New Datapoint: 2 hours aggregate of only swap events](#step-3-new-datapoint-2-hours-aggregate-of-only-swap-events) +- [Case Studies](#case-studies) + - [1. Pooler: Case study and extending this implementation](#1-pooler-case-study-and-extending-this-implementation) + - [Extending pooler with a Uniswap v2 data point](#extending-pooler-with-a-uniswap-v2-data-point) + - [Step 1. Review: Base snapshot extraction logic for trade information](#step-1-review-base-snapshot-extraction-logic-for-trade-information) + - [Step 2. Review: 24 hour aggregate of trade volume snapshots over a single pair contract](#step-2-review-24-hour-aggregate-of-trade-volume-snapshots-over-a-single-pair-contract) + - [Step 3. New Datapoint: 2 hours aggregate of only swap events](#step-3-new-datapoint-2-hours-aggregate-of-only-swap-events) + - [2. Zkevm Quests: A Case Study of Implementation](#2-zkevm-quests-a-case-study-of-implementation) + - [Review: Base snapshots](#review-base-snapshots) + - [`zkevm:bungee_bridge`](#zkevmbungee_bridge) - [Find us](#find-us) ## Overview ![Snapshotter workflow](snapshotter/static/docs/assets/OverallArchitecture.png) -A snapshotter peer as part of Powerloom Protocol does exactly what the name suggests: It synchronizes with other snapshotter peers over a smart contract running on the present version of the PowerLoom Protocol testnet. It follows an architecture that is driven by state transitions which makes it easy to understand and modify. +A snapshotter peer as part of Powerloom Protocol does exactly what the name suggests: It synchronizes with other snapshotter peers over a smart contract running on Powerloom Prost chain. It follows an architecture that is driven by state transitions which makes it easy to understand and modify. Because of its decentralized nature, the snapshotter specification and its implementations share some powerful features that can adapt to your specific information requirements on blockchain applications: -* each data point is calculated, updated, and synchronized with other snapshotter peers participating in the network +* Each data point is calculated, updated, and synchronized with other snapshotter peers participating in the network * synchronization of data points is defined as a function of an epoch ID(identifier) where epoch refers to an equally spaced collection of blocks on the data source blockchain (for eg, Ethereum Mainnet/Polygon Mainnet/Polygon Testnet -- Mumbai). This simplifies the building of use cases that are stateful (i.e. can be accessed according to their state at a given height of the data source chain), synchronized, and depend on reliable data. For example, * dashboards by offering higher-order aggregate datapoints * trading strategies and bots @@ -43,10 +62,28 @@ Because of its decentralized nature, the snapshotter specification and its imple * all the datasets are decentralized on IPFS/Filecoin * the power of these decentralized storage networks can be leveraged fully by applying the [principle of composability](#aggregation-and-data-composition---snapshot-generation-of-higher-order-datapoints-on-base-snapshots) +### Architecture + +The Snapshotter Peer is thoughtfully designed with a modular and highly configurable architecture, allowing for easy customization and seamless integration. It consists of three core components: + +1. **Main Snapshotter Codebase**: + - This foundational component defines all the essential interfaces and handles a wide range of tasks, from listening to epoch release events to distributing tasks and managing snapshot submissions. + +2. **Configuration Files**: + - Configuration files, located in the `/config` directory are linked to [snapshotter-configs](https://github.com/PowerLoom/snapshotter-configs/) repo, play a pivotal role in defining project types, specifying paths for individual compute modules, and managing various project-related settings. + +3. **Compute Modules**: + - The heart of the system resides in the `snapshotter/modules` directory are linked to [snapshotter-computes](https://github.com/PowerLoom/snapshotter-computes/), where the actual computation logic for each project type is defined. These modules drive the snapshot generation process for specific project types. + +![Snapshotter Architecture](snapshotter/static/docs/assets/SnapshotterArchitecture.png) + +The architecture has been designed to facilitate the seamless interchange of configuration and modules. To achieve this, we maintain these components in separate Git repositories, which are then integrated into the Snapshotter Peer using Git Submodules. As a result, adapting the system to different use cases is as straightforward as changing a Git branch, offering unparalleled flexibility and versatility. + +For more information on using Git Submodules, please refer to the [Git Submodules Documentation](https://git-scm.com/book/en/v2/Git-Tools-Submodules). ## Setup -The snapshotter is a distributed system with multiple moving parts. The easiest way to get started is by using the Docker-based setup from the [deploy](https://github.com/PowerLoom/deploy) repository. +The snapshotter is a distributed system with multiple moving parts. The easiest way to get started is by using the Docker-based setup according to the instructions in the section: [Development setup and instructions](#development-setup-and-instructions). If you're planning to participate as a snapshotter, refer to [these instructions](https://github.com/PowerLoom/deploy#for-snapshotters) to start snapshotting. @@ -61,7 +98,7 @@ If you're a developer, you can follow the [manual configuration steps for pooler ### Epoch Generation -An epoch denotes a range of block heights on the data source blockchain, Ethereum mainnet in the case of Uniswap v2. This makes it easier to collect state transitions and snapshots of data on equally spaced block height intervals, as well as to support future work on other lightweight anchor proof mechanisms like Merkle proofs, succinct proofs, etc. +An epoch denotes a range of block heights on the EVM-compatible data source blockchain, for eg Ethereum mainnet/Polygon PoS mainnet/testnet. This makes it easier to collect state transitions and snapshots of data on equally spaced block height intervals, as well as to support future work on other lightweight anchor proof mechanisms like Merkle proofs, succinct proofs, etc. The size of an epoch is configurable. Let that be referred to as `size(E)` @@ -86,9 +123,9 @@ The size of an epoch is configurable. Let that be referred to as `size(E)` Preloaders perform an important function of fetching low-level data for eg. block details, and transaction receipts so that subsequent base snapshot building can proceed without performing unnecessary redundant calls that ultimately save on access costs on RPC and other queries on the underlying node infrastructure for the source data blockchain. -Each project type within the project configuration as found in [`config/projects.json`](config/projects.example.json) can specify the preloaders that their base snapshot builds depend on. Once the dependent preloaders have completed their fetches, the [Processor Distributor](#processor-distributor) subsequently triggers the base snapshot builders for each project type. +Each project type within the project configuration as found in [`config/projects.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/projects.example.json) can specify the preloaders that their base snapshot builds depend on. Once the dependent preloaders have completed their fetches, the [Processor Distributor](#processor-distributor) subsequently triggers the base snapshot builders for each project type. -https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/config/projects.example.json#L3-L12 +https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/config/projects.example.json#L2-L8 The preloaders implement one of the following two generic interfaces @@ -96,12 +133,12 @@ The preloaders implement one of the following two generic interfaces https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/utils/callback_helpers.py#L109-L126 -* `GenericDelegatorPreloader`. Such preloaders are tasked with fetching large volumes of data and utilize [delegated workers](#delegation-workers-for-preloaders) to whom they submit large workloads over a request queue and wait for the results to be returned over a response queue. +* `GenericDelegatorPreloader`. Such preloaders are tasked with fetching large volumes of data and utilize [delegated workers](#delegation-workers-for-preloaders) to which they submit large workloads over a request queue and wait for the results to be returned over a response queue. https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/utils/callback_helpers.py#L129-L161 -The preloaders can be found in the [`snapshotter/utils/preloaders`](snapshotter/utils/preloaders/) directory. The preloaders that are available to project configuration entries are exposed through the [`config/preloader.json`](config/preloader.json) configuration. +The preloaders can be found in the [`snapshotter/utils/preloaders`](snapshotter/utils/preloaders/) directory. The preloaders that are available to project configuration entries are exposed through the [`config/preloader.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/preloader.json) configuration. https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/config/preloader.json#L1-L27 @@ -114,20 +151,50 @@ More preloaders can be easily added depending on the use case user is snapshotti ### Base Snapshot Generation - Workers, as mentioned in the configuration section for [`config/projects.json`](#configuration), calculate base snapshots against this `epochId` which corresponds to collections of state observations and event logs between the blocks at height in the range `[begin, end]`. The data sources are determined according to the following specification for the `projects` key: + Workers, as mentioned in the configuration section for [`config/projects.json`](#configuration), calculate base snapshots against this `epochId` which corresponds to collections of state observations and event logs between the blocks at height in the range `[begin, end]`. + + The data sources are determined according to the following specification for the `projects` key: - * an empty array against the `projects` indicates the data sources are to be loaded from the protocol state contract on initialization + * an empty array against the `projects` indicates no specific data source is defined * an array of EVM-compatible wallet address strings can also be listed * an array of "_" strings that denote the relationship between two EVM addresses (for eg ERC20 balance of `addr2` against a token contract `addr1`) - * data sources can be dynamically added on the protocol state contract which the [processor distributor](#processor-distributor) [syncs with](https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/processor_distributor.py#L597) + * data sources can be dynamically added on the protocol state contract which the [processor distributor](#processor-distributor) [syncs with](https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/processor_distributor.py#L1107): The project ID is ultimately generated in the following manner: -https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/utils/snapshot_worker.py#L29-L38 +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/snapshot_worker.py#L51-L71 The snapshots generated by workers defined in this config are the fundamental data models on which higher-order aggregates and richer data points are built. The `SnapshotSubmitted` event generated on such base snapshots further triggers the building of sophisticated aggregates, super-aggregates, filters, and other data composites on top of them. +#### Bulk Mode + +For situations where data sources are constantly changing or numerous, making it impractical to maintain an extensive list of them, the Snapshotter Peer offers a Bulk Mode. This feature is particularly useful in scenarios where specific data sources need not be defined explicitly. + +In Bulk Mode, the system monitors all transactions and blocks without the need for predefined data sources. The Processor Distributor generates a `SnapshotProcessMessage` with bulk mode enabled for each project type. When snapshot workers receive this message, they leverage preloaded transaction receipts for entire blocks, filtering out relevant transactions to generate snapshots for all data sources that interacted with the blockchain during that epoch. Snapshot worker then generates relevant project Ids for these snapshots and submits them for further processing. + +Bulk Mode is highly effective in situations where the project list is continually expanding or where snapshots don't need to be submitted in every epoch, perhaps because the data hasn't changed significantly. Example use cases include monitoring on-chain activities and tracking task or quest completion statuses on the blockchain. + +An important advantage of Bulk Mode is that, since all transaction receipts are preloaded, this approach can efficiently scale to accommodate a large number of project types with little to no increase in RPC (Remote Procedure Call) calls. + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/snapshot_worker.py#L260-L299 + + ### Data source signaling + + As seen above in the section on [base snapshot generation](#base-snapshot-generation), data sources can be dynamically added to the contract according to the role of certain peers in the ecosystem known as 'signallers'. This is the most significant aspect of the Powerloom Protocol ecosystem apart from snapshotting and will soon be decentralized to factor in on-chain activity, and market forces and accommodate a demand-driven, dynamic data ecosystem. + +In the existing setup, when the `project_type` is set to an empty array (`[]`) and bulk mode is not activated, the snapshotter node attempts to retrieve data sources corresponding to the `projects` key from the protocol state contract. + +Whenever a data source is added or removed by a combination of the data source-detector and signaller, the protocol state smart contract emits a `ProjectUpdated` event, adhering to the defined data model. + +https://github.com/PowerLoom/pooler/blob/5892eeb9433d8f4b8aa677006d98a1dde0458cb7/snapshotter/utils/models/data_models.py#L102-L105 + +The snapshotting for every such dynamically added project is initiated only when the `epochId`, corresponding to the field `enableEpochId` contained within the `ProjectUpdated` event, is released. The [processor distributor](#processor-distributor) correctly triggers the snapshotting workflow for such dynamically added data sources in the following segment: + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/processor_distributor.py#L765-L796 + + + ### Snapshot Finalization All snapshots per project reach consensus on the protocol state contract which results in a `SnapshotFinalized` event being triggered. @@ -136,6 +203,48 @@ All snapshots per project reach consensus on the protocol state contract which r event SnapshotFinalized(uint256 indexed epochId, uint256 epochEnd, string projectId, string snapshotCid, uint256 timestamp); ``` +### Epoch processing state transitions + +The following is a sequence of states that an epoch goes through from the point epoch is released until `SnapshotFinalized` event is received by the processor distributor for the specific epoch. These state transitions can be inspected in detail as noted in the section on [internal snapshotter APIs](#internal-snapshotter-apis). + +--- + +#### `EPOCH_RELEASED` + +The state name is self explanatory. + + +#### `PRELOAD` + +For every [project type's preloader specifications](https://github.com/PowerLoom/pooler/blob/bcc245d228acce504ba803b9b50fd89c8eb05984/README.md#preloading), the status of all the preloading dependencies being satisfied is captured here: + +https://github.com/PowerLoom/pooler/blob/bcc245d228acce504ba803b9b50fd89c8eb05984/snapshotter/processor_distributor.py#L227-L251 + +#### `SNAPSHOT_BUILD` + +The snapshot builders as configured in [`projects.json`](https://github.com/PowerLoom/pooler/blob/56c3dd71b5ec0abf58db3407ef3539f3457076f5/README.md#base-snapshot-generation) are executed. Also refer to the [case study of the current implementation of Pooler](https://github.com/PowerLoom/pooler/blob/56c3dd71b5ec0abf58db3407ef3539f3457076f5/README.md#1-pooler-case-study-and-extending-this-implementation) for a detailed look at snapshot building for base as well as aggregates. + + +https://github.com/PowerLoom/pooler/blob/bcc245d228acce504ba803b9b50fd89c8eb05984/snapshotter/utils/snapshot_worker.py#L100-L120 + +#### `SNAPSHOT_SUBMIT_PAYLOAD_COMMIT` + +Captures the status of propagation of the built snapshot to the [payload commit service in Audit Protocol](https://github.com/PowerLoom/audit-protocol/blob/1d8b1ae0789ba3260ddb358231ac4b597ec8a65f/docs/Introduction.md#payload-commit-service) for further submission to the protocol state contract. + +https://github.com/PowerLoom/pooler/blob/bcc245d228acce504ba803b9b50fd89c8eb05984/snapshotter/utils/generic_worker.py#L166-L195 + + +#### `RELAYER_SEND` + +Payload commit service has sent the snapshot to a transaction relayer to submit to the protocol state contract. + + +#### `SNAPSHOT_FINALIZE` + +[Finalized snapshot](https://github.com/PowerLoom/pooler/blob/56c3dd71b5ec0abf58db3407ef3539f3457076f5/README.md#snapshot-finalization) accepted against an epoch via a `SnapshotFinalized` event. + +https://github.com/PowerLoom/pooler/blob/bcc245d228acce504ba803b9b50fd89c8eb05984/snapshotter/processor_distributor.py#L475-L482 + ### Aggregation and data composition - snapshot generation of higher-order data points on base snapshots Workers as defined in `config/aggregator.json` are triggered by the appropriate signals forwarded to [`Processor Distributor`](pooler/processor_distributor.py) corresponding to the project ID filters as explained in the [Configuration](#configuration) section. This is best seen in action in Pooler, the snapshotter implementation that serves multiple aggregated data points for Uniswap v2 trade information. @@ -143,8 +252,7 @@ Workers as defined in `config/aggregator.json` are triggered by the appropriate In case of aggregation over multiple projects, their project IDs are generated with a combination of the hash of the dependee project IDs along with the namespace -https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/utils/aggregation_worker.py#L116-L124 - +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/aggregation_worker.py#L59-L112 ## Major Components @@ -170,21 +278,44 @@ The Processor Distributor, defined in [`processor_distributor.py`](snapshotter/p * It reads the events forwarded by the event detector to the `f'powerloom-event-detector:{settings.namespace}:{settings.instance_id}'` RabbitMQ queue bound to a topic exchange as configured in `settings.rabbitmq.setup.event_detector.exchange`([code-ref: RabbitMQ exchanges and queue setup in pooler](snapshotter/init_rabbitmq.py)) * It creates and distributes processing messages based on the preloader configuration present in `config/preloader.json`, the project configuration present in `config/projects.json` and `config/aggregator.json`, and the topic pattern used in the routing key received from the topic exchange * For [`EpochReleased` events](#epoch-generation), it forwards such messages to base snapshot builders for data source contracts as configured in `config/projects.json` for the current epoch information contained in the event. - https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/processor_distributor.py#L125-L141 + https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/processor_distributor.py#L694-L810 * For [`SnapshotSubmitted` events](#base-snapshot-generation), it forwards such messages to single and multi-project aggregate topic routing keys. - https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/processor_distributor.py#L228-L303 + https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/processor_distributor.py#L928-L1042 ### Delegation Workers for preloaders The preloaders often fetch and cache large volumes of data, for eg, all the transaction receipts for a block on the data source blockchain. In such a case, a single worker will never be enough to feasibly fetch the data for a timely base snapshot generation and subsequent aggregate snapshot generations to finally reach a consensus. -Hence such workers are defined as `delegate_tasks` in [`config/preloader.json`](config/preloader.json) and the [process hub core](#process-hub-core) launches a certain number of workers as defined in the primary settings file, `config/settings.json` under the key `callback_worker_config.num_delegate_workers`. +Hence such workers are defined as `delegate_tasks` in [`config/preloader.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/preloader.json) and the [process hub core](#process-hub-core) launches a certain number of workers as defined in the primary settings file, `config/settings.json` under the key `callback_worker_config.num_delegate_workers`. https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/config/preloader.json#L19-L25 https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/config/settings.example.json#L86-L90 +Delegation workers operate over a simple request-response queue architecture over RabbitMQ. + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/init_rabbitmq.py#L90-L111 + +One of the preloaders bundled with this snapshotter peer is tasked with fetching all the transaction receipts within a given epoch's block range and because of the volume of data to be fetched it delegates this work to a bunch of delegation worker + +* The Preloader: [snapshotter/utils/preloaders/tx_receipts/preloader.py](snapshotter/utils/preloaders/tx_receipts/preloader.py). +* The Delegation Workers: [snapshotter/utils/preloaders/tx_receipts/delegated_worker/tx_receipts.py](snapshotter/utils/preloaders/tx_receipts/delegated_worker/tx_receipts.py) + +As a common functionality shared by all preloaders that utilize delegate workers, this logic is present in the generic class `DelegatorPreloaderAsyncWorker` that all such preloaders inherit. Here you can observe the workload is sent to the delegation workers + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/generic_delegator_preloader.py#L188-L227 + +Upon sending out the workloads tagged by unique request IDs, the delegator sets up a temporary exclusive queue to which only the delegation workers meant for the task type push their responses. + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/generic_delegator_preloader.py#L158-L186 + +The corresponding response being pushed by the delegation workers can be found here in the generic class `DelegateAsyncWorker` that all such workers should inherit from: + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/delegate_worker.py#L74-L84 + +![Delegation worker dependent preloading architecture](snapshotter/static/docs/assets/DelegationPreloading.png) + ### Callback Workers The callback workers are the ones that build the base snapshot and aggregation snapshots and as explained above, are launched by the [process hub core](#process-hub-core) according to the configurations in `aggregator/projects.json` and `config/aggregator.json`. @@ -193,9 +324,9 @@ They listen to new messages on the RabbitMQ topic exchange as described in the f https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/config/settings.example.json#L42-L44 -https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/init_rabbitmq.py#L118-L140 +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/init_rabbitmq.py#L182-L213 -Upon receiving a message from the processor distributor after preloading is complete, the workers do most of the heavy lifting along with some sanity checks and then call the actual `compute` function defined in the project configuration to transform the dependent data points as cached by the preloaders to finally generate the base snapshots. +Upon receiving a message from the processor distributor after preloading is complete, the workers do most of the heavy lifting along with some sanity checks and then call the `compute()` callback function on the project's configured snapshot worker class to transform the dependent data points as cached by the preloaders to finally generate the base snapshots. * [Base Snapshot builder](pooler/utils/snapshot_worker.py) * [Aggregation Snapshot builder](pooler/utils/aggregation_worker.py) @@ -215,8 +346,9 @@ Among many things, the core API allows you to **access the finalized CID as well The main endpoint implementations can be found as follows: -https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/core_api.py#L186-L268 -https://github.com/PowerLoom/pooler/blob/5e7cc3812074d91e8d7d85058554bb1175bf8070/snapshotter/core_api.py#L273-L324 +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/core_api.py#L248-L339 + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/core_api.py#L343-L404 The first endpoint in `GET /last_finalized_epoch/{project_id}` returns the last finalized EpochId for a given project ID and the second one is `GET /data/{epoch_id}/{project_id}/` which can be used to return the actual snapshot data for a given EpochId and ProjectId. @@ -243,15 +375,15 @@ try { ``` -## Development Instructions +## Development setup and instructions -These instructions are needed if you're planning to run the system using `build-dev.sh` from [deploy](https://github.com/PowerLoom/deploy). +These instructions are needed to run the system using [`build-docker.sh`](build-docker.sh). ### Configuration Pooler needs the following config files to be present -* **`settings.json` in `pooler/auth/settings`**: Changes are trivial. Copy [`config/auth_settings.example.json`](config/auth_settings.example.json) to `config/auth_settings.json`. This enables an authentication layer over the core API exposed by the pooler snapshotter. +* **`settings.json` in `pooler/auth/settings`**: Changes are trivial. Copy [`config/auth_settings.example.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/auth_settings.example.json) to `config/auth_settings.json`. This enables an authentication layer over the core API exposed by the pooler snapshotter. * settings files in `config/` - * **[`config/projects.json`](config/projects.example.json)**: Each entry in this configuration file defines the most fundamental unit of data representation in Powerloom Protocol, that is, a project. It is of the following schema + * **[`config/projects.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/projects.example.json)**: Each entry in this configuration file defines the most fundamental unit of data representation in Powerloom Protocol, that is, a project. It is of the following schema ```javascript { "project_type": "snapshot_project_name_prefix_", @@ -266,11 +398,11 @@ Pooler needs the following config files to be present } } ``` - Copy over [`config/projects.example.json`](config/projects.example.json) to `config/projects.json`. For more details, read on in the [use case study](#pooler-case-study-and-extending-this-implementation) for this current implementation. + Copy over [`config/projects.example.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/projects.example.json) to `config/projects.json`. For more details, read on in the [use case study](#1-pooler-case-study-and-extending-this-implementation) for this current implementation. - * **`config/aggregator.json`** : This lists out different type of aggregation work to be performed over a span of snapshots. Copy over [`config/aggregator.example.json`](config/aggregator.example.json) to `config/aggregator.json`. The span is usually calculated as a function of the epoch size and average block time on the data source network. For eg, + * **`config/aggregator.json`** : This lists out different type of aggregation work to be performed over a span of snapshots. Copy over [`config/aggregator.example.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/aggregator.example.json) to `config/aggregator.json`. The span is usually calculated as a function of the epoch size and average block time on the data source network. For eg, * the following configuration calculates a snapshot of total trade volume over a 24 hour time period, based on the [snapshot finalization](#snapshot-finalization) of a project ID corresponding to a pair contract. This can be seen by the `aggregate_on` key being set to `SingleProject`. - * This is specified by the `filters` key below. When a snapshot build is achieved for an epoch over a project ID [(ref:generation of project ID for snapshot building workers)](#epoch-generation). For eg, a snapshot build on `pairContract_trade_volume:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2` triggers the worker [`AggreagateTradeVolumeProcessor`](snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_24h.py) as defined in the `processor` section of the config against the pair contract `0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc`. + * This is specified by the `filters` key below. When a snapshot build is achieved for an epoch over a project ID [(ref:generation of project ID for snapshot building workers)](#epoch-generation). For eg, a snapshot build on `pairContract_trade_volume:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2` triggers the worker [`AggregateTradeVolumeProcessor`](https://github.com/PowerLoom/snapshotter-computes/blob/6fb98b1bbc22be8b5aba8bdc860004d35786f4df/aggregate/single_uniswap_trade_volume_24h.py) as defined in the `processor` section of the config against the pair contract `0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc`. ```javascript { @@ -286,7 +418,7 @@ Pooler needs the following config files to be present }, "processor": { "module": "snapshotter.modules.uniswapv2.aggregate.single_uniswap_trade_volume_24h", - "class_name": "AggreagateTradeVolumeProcessor" + "class_name": "AggregateTradeVolumeProcessor" } } ] @@ -326,7 +458,7 @@ Pooler needs the following config files to be present ``` * To begin with, you can keep the workers and contracts as specified in the example files. - * **`config/settings.json`**: This is the primary configuration. We've provided a settings template in `config/settings.example.json` to help you get started. Copy over [`config/settings.example.json`](config/settings.example.json) to `config/settings.json`. There can be a lot to fine tune but the following are essential. + * **`config/settings.json`**: This is the primary configuration. We've provided a settings template in `config/settings.example.json` to help you get started. Copy over [`config/settings.example.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/settings.example.json) to `config/settings.json`. There can be a lot to fine tune but the following are essential. - `instance_id`: This is the unique public key for your node to participate in consensus. It is currently registered on approval of an application (refer [deploy](https://github.com/PowerLoom/deploy) repo for more details on applying). - `namespace`, is the unique key used to identify your project namespace around which all consensus activity takes place. - RPC service URL(s) and rate limit configurations. Rate limits are service provider specific, different RPC providers have different rate limits. Example rate limit config for a node looks something like this `"100000000/day;20000/minute;2500/second"` @@ -343,6 +475,167 @@ Login to the pooler docker container using `docker exec -it deploy-boost-1 bash` - To see logs for a specific process you can run `pm2 logs ` - To see only error logs you can run `pm2 logs --err` +### Internal Snapshotter APIs + +All implementations of a snapshotter come equipped with a barebones API service that return detailed insights into its state. You can tunnel into port 8002 of an instance running the snapshotter and right away try out the internal APIs among others by visting the FastAPI generated SwaggerUI. + +``` +http://localhost:8002/docs +``` + +![Snapshotter API SwaggerUI](snapshotter/static/docs/assets/SnapshotterSwaggerUI.png) + +#### `GET /internal/snapshotter/epochProcessingStatus` + +As detailed out in the section on [epoch processing state transitions](#epoch-processing-state-transitions), this internal API endpoint offers the most detailed insight into each epoch's processing status as it passes through the snapshot builders and is sent out for consensus. + +>NOTE: The endpoint, though paginated and cached, serves a raw dump of insights into an epoch's state transitions and the payloads are significantly large enough for requests to timeout or to clog the internal API's limited resource. Hence it is advisable to query somewhere between 1 to 5 epochs. The same can be specified as the `size` query parameter. + +**Sample Request:** + +```bash +curl -X 'GET' \ + 'http://localhost:8002/internal/snapshotter/epochProcessingStatus?page=1&size=3' \ + -H 'accept: application/json' +``` + +**Sample Response:** + +```json +{ + "items": [ + { + "epochId": 43523, + "transitionStatus": { + "EPOCH_RELEASED": { + "status": "success", + "error": null, + "extra": null, + "timestamp": 1692530595 + }, + "PRELOAD": { + "pairContract_pair_total_reserves": { + "status": "success", + "error": null, + "extra": null, + "timestamp": 1692530595 + }, + }, + "SNAPSHOT_BUILD": { + "aggregate_24h_stats_lite:35ee1886fa4665255a0d0486c6079c4719c82f0f62ef9e96a98f26fde2e8a106:UNISWAPV2": { + "status": "success", + "error": null, + "extra": null, + "timestamp": 1692530596 + }, + }, + "SNAPSHOT_SUBMIT_PAYLOAD_COMMIT": { + + }, + "RELAYER_SEND": { + + }, + "SNAPSHOT_FINALIZE": { + + }, + }, + } + ], + "total": 3, + "page": 1, + "size": 3, + "pages": 1 +} +``` + +`/status` +Returns the overall status of all the projects + +Response +```json +{ + "totalSuccessfulSubmissions": 10, + "totalMissedSubmissions": 5, + "totalIncorrectSubmissions": 1, + "projects":[ + { + "projectId": "projectid" + "successfulSubmissions": 3, + "missedSubmissions": 2, + "incorrectSubmissions": 1 + }, + ] +} +``` +#### `GET /internal/snapshotter/status` +Returns the overall status of all the projects + +Response +```json +{ + "totalSuccessfulSubmissions": 10, + "totalMissedSubmissions": 5, + "totalIncorrectSubmissions": 1, + "projects":[ + { + "projectId": "projectid" + "successfulSubmissions": 3, + "missedSubmissions": 2, + "incorrectSubmissions": 1 + }, + ] +} +``` + +#### `GET /internal/snapshotter/status/{project_id}` +Returns project specific detailed status report + +Response +```json +{ + "missedSubmissions": [ + { + "epochId": 10, + "finalizedSnapshotCid": "cid", + "reason": "error/exception/trace" + } + ], + "incorrectSubmissions": [ + { + "epochId": 12, + "submittedSnapshotCid": "snapshotcid", + "finalizedSnapshotCid": "finalizedsnapshotcid", + "reason": "reason for incorrect submission" + } + ] +} +``` +#### `GET /internal/snapshotter/status/{project_id}?data=true` +Returns project specific detailed status report with snapshot data + +Response +```json +{ + "missedSubmissions": [ + { + "epochId": 10, + "finalizedSnapshotCid": "cid", + "reason": "error/exception/trace" + } + ], + "incorrectSubmissions": [ + { + "epochId": 12, + "submittedSnapshotCid": "snapshotcid", + "submittedSnapshot": {} + "finalizedSnapshotCid": "finalizedsnapshotcid", + "finalizedSnapshot": {}, + "reason": "reason for incorrect submission" + } + ] +} +``` + ## For Contributors We use [pre-commit hooks](https://pre-commit.com/) to ensure our code quality is maintained over time. For this contributors need to do a one-time setup by running the following commands. * Install the required dependencies using `pip install -r dev-requirements.txt`, this will set up everything needed for pre-commit checks. @@ -350,7 +643,9 @@ We use [pre-commit hooks](https://pre-commit.com/) to ensure our code quality is Now, whenever you commit anything, it'll automatically check the files you've changed/edited for code quality issues and suggest improvements. -## Pooler: Case study and extending this implementation +## Case Studies + +### 1. Pooler: Case study and extending this implementation Pooler is a Uniswap specific implementation of what is known as a 'snapshotter' in the PowerLoom Protocol ecosystem. It synchronizes with other snapshotter peers over a smart contract running on the present version of the PowerLoom Protocol testnet. It follows an architecture that is driven by state transitions which makes it easy to understand and modify. This present release ultimately provide access to rich aggregates that can power a Uniswap v2 dashboard with the following data points: @@ -364,18 +659,18 @@ Pooler is a Uniswap specific implementation of what is known as a 'snapshotter' - 7 days - Transactions containing `Swap`, `Mint`, and `Burn` events -## Extending pooler with a Uniswap v2 data point +#### Extending pooler with a Uniswap v2 data point In this section, let us take a look at the data composition abilities of Pooler to build on the base snapshot being built that captures information on Uniswap trades. -### Step 1. Review: Base snapshot extraction logic for trade information +##### Step 1. Review: Base snapshot extraction logic for trade information Required reading: * [Base Snapshot Generation](#base-snapshot-generation) and * [configuring `config/projects.json`](#configuration) * [Aggregation and data composition](#aggregation-and-data-composition---snapshot-generation-of-higher-order-data-points-on-base-snapshots) -As you can notice in [`config/projects.example.json`](config/projects.example.json), each project config needs to have the following components +As you can notice in [`config/projects.example.json`](https://github.com/PowerLoom/snapshotter-configs/blob/f46cc86cd08913014decf7bced128433442c8f84/projects.example.json), each project config needs to have the following components - `project_type` (unique identifier prefix for the usecase, [used to generate project ID](#base-snapshot-generation)) - `projects` (smart contracts to extract data from, pooler can generate different snapshots from multiple sources as long as the Contract ABI is same) @@ -386,10 +681,7 @@ There's currently no limitation on the number or type of usecases you can build https://github.com/PowerLoom/pooler/blob/1452c166bef7534568a61b3a2ab0ff94535d7229/config/projects.example.json#L1-L35 -If we take a look at the `TradeVolumeProcessor` class present at [`snapshotter/modules/pooler/uniswapv2/trade_volume.py`](snapshotter/modules/pooler/uniswapv2/trade_volume.py) it implements the interface of `GenericProcessorSnapshot` defined in [`pooler/utils/callback_helpers.py`](pooler/utils/callback_helpers.py). - - -https://github.com/PowerLoom/pooler/blob/1452c166bef7534568a61b3a2ab0ff94535d7229/snapshotter/modules/pooler/uniswapv2/trade_volume.py#L13-L86 +If we take a look at the `TradeVolumeProcessor` class present at [`snapshotter/modules/computes/trade_volume.py`](https://github.com/PowerLoom/snapshotter-computes/blob/6fb98b1bbc22be8b5aba8bdc860004d35786f4df/trade_volume.py) it implements the interface of `GenericProcessorSnapshot` defined in [`pooler/utils/callback_helpers.py`](pooler/utils/callback_helpers.py). There are a couple of important concepts here necessary to write your extraction logic: @@ -399,30 +691,27 @@ There are a couple of important concepts here necessary to write your extraction - `rpc_helper` ([`RpcHelper`](pooler/utils/rpc.py) instance to help with any calls to the data source contract's chain) * `transformation_lambdas` provide an additional layer for computation on top of the generated snapshot (if needed). If `compute` function handles everything you can just set `transformation_lambdas` to `[]` otherwise pass the list of transformation function sequence. Each function referenced in `transformation_lambdas` must have same input interface. It should receive the following inputs - - - `sn`apshot` (the generated snapshot to apply transformation on) + - `snapshot` (the generated snapshot to apply transformation on) - `address` (contract address to extract data from) - `epoch_begin` (epoch begin block) - `epoch_end` (epoch end block) Output format can be anything depending on the usecase requirements. Although it is recommended to use proper [`pydantic`](https://pypi.org/project/pydantic/) models to define the snapshot interface. -The resultant output model in this specific example is `UniswapTradesSnapshot` as defined in the Uniswap v2 specific modules directory: [`utils/models/message_models.py`](snapshotter/modules/pooler/uniswapv2/utils/models/message_models.py). This encapsulates state information captured by `TradeVolumeProcessor` between the block heights of the epoch: `min_chain_height` and `max_chain_height`. +The resultant output model in this specific example is `UniswapTradesSnapshot` as defined in the Uniswap v2 specific modules directory: [`utils/models/message_models.py`](https://github.com/PowerLoom/snapshotter-computes/blob/6fb98b1bbc22be8b5aba8bdc860004d35786f4df/utils/models/message_models.py#L47-L54). This encapsulates state information captured by `TradeVolumeProcessor` between the block heights of the epoch: `min_chain_height` and `max_chain_height`. -https://github.com/PowerLoom/pooler/blob/1452c166bef7534568a61b3a2ab0ff94535d7229/snapshotter/modules/pooler/uniswapv2/utils/models/message_models.py#L37-L44 - -### Step 2. Review: 24 hour aggregate of trade volume snapshots over a single pair contract +##### Step 2. Review: 24 hour aggregate of trade volume snapshots over a single pair contract * As demonstrated in the previous section, the `TradeVolumeProcessor` logic takes care of capturing a snapshot of information regarding Uniswap v2 trades between the block heights of `min_chain_height` and `max_chain_height`. * The epoch size as described in the prior section on [epoch generation](#epoch-generation) can be considered to be constant for this specific implementation of the Uniswap v2 use case on PowerLoom Protocol, and by extension, the time duration captured within the epoch. -* As shown in the section on [dependency graph of data composition](#aggregation-and-data-composition---snapshot-generation), every aggregate is calculated relative to the `epochId` at which the dependee [`SnapshotFinalized` event](#snapshot-finalization) is receieved. +* As shown in the section on [dependency graph of data composition](#aggregation-and-data-composition---snapshot-generation-of-higher-order-data-points-on-base-snapshots), every aggregate is calculated relative to the `epochId` at which the dependee [`SnapshotFinalized` event](#snapshot-finalization) is receieved. * The finalized state and data CID corresponding to each epoch can be accessed on the smart contract on the anchor chain that holds the protocol state. The corresponding helpers for that can be found in `get_project_epoch_snapshot()` in [`pooler/utils/data_utils`](pooler/utils/data_utils.py) -https://github.com/PowerLoom/pooler/blob/1452c166bef7534568a61b3a2ab0ff94535d7229/pooler/utils/data_utils.py#L183-L191 - +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/data_utils.py#L273-L295 * Considering the incoming `epochId` to be the head of the span, the quickest formula to arrive at the tail of the span of 24 hours worth of snapshots and trade information becomes, @@ -431,18 +720,26 @@ time_in_seconds = 86400 tail_epoch_id = current_epoch_id - int(time_in_seconds / (source_chain_epoch_size * source_chain_block_time)) ``` -https://github.com/PowerLoom/pooler/blob/1452c166bef7534568a61b3a2ab0ff94535d7229/pooler/utils/data_utils.py#L263-L290 +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/data_utils.py#L507-L547 * The worker class for such aggregation is defined in `config/aggregator.json` in the following manner -https://github.com/PowerLoom/pooler/blob/1452c166bef7534568a61b3a2ab0ff94535d7229/config/aggregator.example.json#L3-L10 - +```javascript + { + "project_type": "aggregate_pairContract_24h_trade_volume", + "aggregate_on": "SingleProject", + "filters": { + "projectId": "pairContract_trade_volume" + }, + "processor": { + "module": "snapshotter.modules.computes.aggregate.single_uniswap_trade_volume_24h", + "class_name": "AggregateTradeVolumeProcessor" + } + } +``` * Each finalized `epochId` is registered with a snapshot commit against the aggregated data set generated by running summations on trade volumes on all the base snapshots contained within the span calculated above. -https://github.com/PowerLoom/pooler/blob/1452c166bef7534568a61b3a2ab0ff94535d7229/snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_24h.py#L84-L157 - - -### Step 3. New Datapoint: 2 hours aggregate of only swap events +##### Step 3. New Datapoint: 2 hours aggregate of only swap events From the information provided above, the following is left as an exercise for the reader to generate aggregate datasets at every `epochId` finalization for a pair contract, spanning 2 hours worth of snapshots and containing only `Swap` event logs and the trade volume generated from them as a result. @@ -450,13 +747,68 @@ From the information provided above, the following is left as an exercise for th * Add a new configuration entry in `config/aggregator.json` for this new aggregation worker class -* Define a new data model in [`utils/message_models.py`](snapshotter/modules/pooler/uniswapv2/utils/models/message_models.py) referring to +* Define a new data model in [`utils/message_models.py`](https://github.com/PowerLoom/snapshotter-computes/blob/6fb98b1bbc22be8b5aba8bdc860004d35786f4df/aggregate/single_uniswap_trade_volume_24h.py) referring to * `UniswapTradesAggregateSnapshot` as used in above example * `UniswapTradesSnapshot` used to capture each epoch's trade snapshots which includes the raw event logs as well -* Follow the example of the aggregator worker [as implemented for 24 hours aggregation calculation](snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_24h.py) , and work on calculating an `epochId` span of 2 hours and filtering out only the `Swap` events and the trade volume contained within. +* Follow the example of the aggregator worker [as implemented for 24 hours aggregation calculation](https://github.com/PowerLoom/snapshotter-computes/blob/6fb98b1bbc22be8b5aba8bdc860004d35786f4df/aggregate/single_uniswap_trade_volume_24h.py) , and work on calculating an `epochId` span of 2 hours and filtering out only the `Swap` events and the trade volume contained within. + + +### 2. Zkevm Quests: A Case Study of Implementation + +Phase 2 quests form a crucial part of the Powerloom testnet program, where we leverage Snapshotter Peers to monitor on-chain activities of testnet participants across various chains and protocols. These quests predominantly operate in [Bulk Mode](#bulk-mode) due to their one-time nature and the highly dynamic set of participants involved. + +In this particular implementation of the peer, known as 'Snapshotter' in the Powerloom Protocol, we have successfully harnessed its capabilities to provide accurate metrics, verified through consensus, pertaining to fundamental data points. These metrics allow us to determine if and when a quest is completed by a testnet participant. + +This case study serves as a testament to the effectiveness and versatility of the Snapshotter Peer in real-world scenarios, highlighting its ability to support complex use cases with precision and reliability. + +#### Review: Base snapshots + +The snapshot builders can be found under the snapshotter-specific implementation directory: [`snapshotter/modules/computes`](https://github.com/PowerLoom/snapshotter-computes/tree/1e145c7f458ce48b8cd2ac860c2ae4a78fad7ea9). Every snapshot builder must implement the interface of [`GenericProcessorSnapshot`](snapshotter/utils/callback_helpers.py) + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/callback_helpers.py#L179-L197 + + +* `compute()` is the callback where the snapshot extraction and generation logic needs to be written. It receives the following inputs: + * `epoch` (current epoch details) + * `redis` (async redis connection) + * `rpc_helper` ([`RpcHelper`](pooler/utils/rpc.py) instance to help with any calls to the data source contract's chain) + +* `transformation_lambdas` provide an additional layer for computation on top of the generated snapshot (if needed). If the `compute()` callback handles everything you can just set `transformation_lambdas` to `[]` otherwise pass the list of transformation function sequences. Each function referenced in `transformation_lambdas` must have the same input interface. It should receive the following inputs - + * `snapshot` (the generated snapshot to apply the transformation on) + * `address` (contract address to extract data from) + * `epoch_begin` (epoch begin block) + * `epoch_end` (epoch end block) + +`compute()` should return an instance of a Pydantic model which is in turn uploaded to IPFS by the payload commit service helper method. + +https://github.com/PowerLoom/pooler/blob/d8b7be32ad329e8dcf0a7e5c1b27862894bc990a/snapshotter/utils/generic_worker.py#L179-L191 + +Looking at the pre-supplied [example configuration of `config/projects.json`](https://github.com/PowerLoom/snapshotter-configs/blob/544f3f3355f0b25b99bac7fe8288cec1a4aea3f3/projects.example.json), we can find the following snapshots being generated + +#### `zkevm:bungee_bridge` + +Snapshot builder: [snapshotter/modules/computes/bungee_bridge.py](https://github.com/PowerLoom/snapshotter-computes/blob/29199feab449ad0361b5867efcaae9854992966f/bungee_bridge.py) + +```javascript + { + "project_type": "zkevm:bungee_bridge", + "projects":[ + ], + "preload_tasks":[ + "block_transactions" + ], + "processor":{ + "module": "snapshotter.modules.boost.bungee_bridge", + "class_name": "BungeeBridgeProcessor" + } + }, +``` +Its preloader dependency is [`block_transactions`](snapshotter/utils/preloaders/tx_receipts/preloader.py) as seen in the [preloader configuration](#preloading). +The snapshot builder then goes through all preloaded block transactions, filters out, and then generates relevant snapshots for wallet address that received funds from the Bungee Bridge refuel contract during that epoch. +https://github.com/PowerLoom/snapshotter-computes/blob/29199feab449ad0361b5867efcaae9854992966f/bungee_bridge.py#L40-L92 ## Find us @@ -464,4 +816,5 @@ From the information provided above, the following is left as an exercise for th * [Twitter](https://twitter.com/PowerLoomHQ) * [Github](https://github.com/PowerLoom) * [Careers](https://wellfound.com/company/powerloom/jobs) +* [Blog](https://blog.powerloom.io/) * [Medium Engineering Blog](https://medium.com/powerloom) diff --git a/config b/config new file mode 160000 index 00000000..6e34c5b6 --- /dev/null +++ b/config @@ -0,0 +1 @@ +Subproject commit 6e34c5b68fa3fba7cad3b140f8676dcbdab687c5 diff --git a/config/aggregator.example.json b/config/aggregator.example.json deleted file mode 100644 index da31ba46..00000000 --- a/config/aggregator.example.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "config": [{ - "project_type": "aggregate_pairContract_24h_trade_volume", - "aggregate_on": "SingleProject", - "filters": { - "projectId": "pairContract_trade_volume" - }, - "processor": { - "module": "snapshotter.modules.pooler.uniswapv2.aggregate.single_uniswap_trade_volume_24h", - "class_name": "AggreagateTradeVolumeProcessor" - } - }, - { - "project_type": "aggregate_pairContract_7d_trade_volume", - "aggregate_on": "SingleProject", - "filters": { - "projectId": "aggregate_pairContract_24h_trade_volume" - }, - "processor": { - "module": "snapshotter.modules.pooler.uniswapv2.aggregate.single_uniswap_trade_volume_7d", - "class_name": "AggreagateTradeVolumeProcessor" - } - }, - { - "project_type": "aggregate_24h_top_pairs_lite", - "aggregate_on": "MultiProject", - "projects_to_wait_for": [ - "aggregate_pairContract_24h_trade_volume:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2", - "pairContract_pair_total_reserves:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5:UNISWAPV2", - "pairContract_pair_total_reserves:0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852:UNISWAPV2", - "pairContract_pair_total_reserves:0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0x3041cbd36888becc7bbcbc0045e3b1f144466f5f:UNISWAPV2", - "pairContract_pair_total_reserves:0x3041cbd36888becc7bbcbc0045e3b1f144466f5f:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xd3d2e2692501a5c9ca623199d38826e513033a17:UNISWAPV2", - "pairContract_pair_total_reserves:0xd3d2e2692501a5c9ca623199d38826e513033a17:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xbb2b8038a1640196fbe3e38816f3e67cba72d940:UNISWAPV2", - "pairContract_pair_total_reserves:0xbb2b8038a1640196fbe3e38816f3e67cba72d940:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xa478c2975ab1ea89e8196811f51a7b7ade33eb11:UNISWAPV2", - "pairContract_pair_total_reserves:0xa478c2975ab1ea89e8196811f51a7b7ade33eb11:UNISWAPV2" - ], - "processor": { - "module": "snapshotter.modules.pooler.uniswapv2.aggregate.multi_uniswap_top_pairs_24h", - "class_name": "AggreagateTopPairsProcessor" - } - }, - { - "project_type": "aggregate_24h_top_tokens_lite", - "aggregate_on": "MultiProject", - "projects_to_wait_for": [ - "aggregate_pairContract_24h_trade_volume:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2", - "pairContract_pair_total_reserves:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5:UNISWAPV2", - "pairContract_pair_total_reserves:0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852:UNISWAPV2", - "pairContract_pair_total_reserves:0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0x3041cbd36888becc7bbcbc0045e3b1f144466f5f:UNISWAPV2", - "pairContract_pair_total_reserves:0x3041cbd36888becc7bbcbc0045e3b1f144466f5f:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xd3d2e2692501a5c9ca623199d38826e513033a17:UNISWAPV2", - "pairContract_pair_total_reserves:0xd3d2e2692501a5c9ca623199d38826e513033a17:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xbb2b8038a1640196fbe3e38816f3e67cba72d940:UNISWAPV2", - "pairContract_pair_total_reserves:0xbb2b8038a1640196fbe3e38816f3e67cba72d940:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xa478c2975ab1ea89e8196811f51a7b7ade33eb11:UNISWAPV2", - "pairContract_pair_total_reserves:0xa478c2975ab1ea89e8196811f51a7b7ade33eb11:UNISWAPV2" - ], - "processor": { - "module": "snapshotter.modules.pooler.uniswapv2.aggregate.multi_uniswap_top_tokens", - "class_name": "AggreagateTopTokensProcessor" - } - }, - { - "project_type": "aggregate_7d_top_pairs_lite", - "aggregate_on": "MultiProject", - "projects_to_wait_for": [ - "aggregate_pairContract_7d_trade_volume:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2", - "aggregate_pairContract_7d_trade_volume:0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5:UNISWAPV2", - "aggregate_pairContract_7d_trade_volume:0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852:UNISWAPV2", - "aggregate_pairContract_7d_trade_volume:0x3041cbd36888becc7bbcbc0045e3b1f144466f5f:UNISWAPV2", - "aggregate_pairContract_7d_trade_volume:0xd3d2e2692501a5c9ca623199d38826e513033a17:UNISWAPV2", - "aggregate_pairContract_7d_trade_volume:0xbb2b8038a1640196fbe3e38816f3e67cba72d940:UNISWAPV2", - "aggregate_pairContract_7d_trade_volume:0xa478c2975ab1ea89e8196811f51a7b7ade33eb11:UNISWAPV2" - ], - "processor": { - "module": "snapshotter.modules.pooler.uniswapv2.aggregate.multi_uniswap_top_pairs_7d", - "class_name": "AggreagateTopPairsProcessor" - } - }, - { - "project_type": "aggregate_24h_stats_lite", - "aggregate_on": "MultiProject", - "projects_to_wait_for": [ - "aggregate_pairContract_24h_trade_volume:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2", - "pairContract_pair_total_reserves:0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5:UNISWAPV2", - "pairContract_pair_total_reserves:0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852:UNISWAPV2", - "pairContract_pair_total_reserves:0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0x3041cbd36888becc7bbcbc0045e3b1f144466f5f:UNISWAPV2", - "pairContract_pair_total_reserves:0x3041cbd36888becc7bbcbc0045e3b1f144466f5f:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xd3d2e2692501a5c9ca623199d38826e513033a17:UNISWAPV2", - "pairContract_pair_total_reserves:0xd3d2e2692501a5c9ca623199d38826e513033a17:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xbb2b8038a1640196fbe3e38816f3e67cba72d940:UNISWAPV2", - "pairContract_pair_total_reserves:0xbb2b8038a1640196fbe3e38816f3e67cba72d940:UNISWAPV2", - "aggregate_pairContract_24h_trade_volume:0xa478c2975ab1ea89e8196811f51a7b7ade33eb11:UNISWAPV2", - "pairContract_pair_total_reserves:0xa478c2975ab1ea89e8196811f51a7b7ade33eb11:UNISWAPV2" - ], - "processor": { - "module": "snapshotter.modules.pooler.uniswapv2.aggregate.multi_uniswap_stats", - "class_name": "AggreagateStatsProcessor" - } - } - ] -} diff --git a/config/auth_settings.example.json b/config/auth_settings.example.json deleted file mode 100644 index b6d0e8f3..00000000 --- a/config/auth_settings.example.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "redis": { - "host": "redis", - "port": 6379, - "db": 0, - "password": null - }, - "bind": { - "host": "0.0.0.0", - "port": 8555 - } -} diff --git a/config/preloader.json b/config/preloader.json deleted file mode 100644 index 24cd4b79..00000000 --- a/config/preloader.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "preloaders": [ - { - "task_type": "block_transactions", - "module": "snapshotter.utils.preloaders.tx_receipts.preloader", - "class_name": "TxPreloadWorker" - }, - { - "task_type": "block_details", - "module": "snapshotter.utils.preloaders.block_details.preloader", - "class_name": "BlockDetailsPreloader" - }, - { - "task_type": "eth_price", - "module": "snapshotter.utils.preloaders.eth_price.preloader", - "class_name": "EthPricePreloader" - } - ], - "delegate_tasks": [ - { - "task_type": "txreceipt", - "module": "snapshotter.utils.preloaders.tx_receipts.delegated_worker.tx_receipts", - "class_name": "TxReceiptProcessor" - } - ], - "timeout": 60 -} diff --git a/config/projects.example.json b/config/projects.example.json deleted file mode 100644 index 71444425..00000000 --- a/config/projects.example.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "config": [{ - "project_type": "pairContract_pair_total_reserves", - "preload_tasks":[ - "eth_price", - "block_details" - ], - "projects":[ - "0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc", - "0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5", - "0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852", - "0x3041cbd36888becc7bbcbc0045e3b1f144466f5f", - "0xd3d2e2692501a5c9ca623199d38826e513033a17", - "0xbb2b8038a1640196fbe3e38816f3e67cba72d940", - "0xa478c2975ab1ea89e8196811f51a7b7ade33eb11" - ], - "processor":{ - "module": "snapshotter.modules.pooler.uniswapv2.pair_total_reserves", - "class_name": "PairTotalReservesProcessor" - } - }, - { - "project_type": "pairContract_trade_volume", - "preload_tasks":[ - "eth_price", - "block_details" - ], - "projects":[ - "0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc", - "0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5", - "0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852", - "0x3041cbd36888becc7bbcbc0045e3b1f144466f5f", - "0xd3d2e2692501a5c9ca623199d38826e513033a17", - "0xbb2b8038a1640196fbe3e38816f3e67cba72d940", - "0xa478c2975ab1ea89e8196811f51a7b7ade33eb11" - ], - "processor":{ - "module": "snapshotter.modules.pooler.uniswapv2.trade_volume", - "class_name": "TradeVolumeProcessor" - } - } - ] -} diff --git a/config/settings.example.json b/config/settings.example.json deleted file mode 100644 index 6cdb9c3d..00000000 --- a/config/settings.example.json +++ /dev/null @@ -1,131 +0,0 @@ -{ - "namespace": "relevant-namespace", - "core_api": { - "host": "0.0.0.0", - "port": 8002, - "auth": { - "enabled": false, - "header_key": "X-API-KEY" - }, - "public_rate_limit": "20000/day;300/minute;10/second" - }, - "instance_id": "account-address", - "rpc": { - "full_nodes": [{ - "url": "https://rpc-url", - "rate_limit": "10000000/day;2000/minute;100/second" - }], - "archive_nodes": [], - "force_archive_blocks": 100, - "retry": 5, - "request_time_out": 5, - "skip_epoch_threshold_blocks": 30, - "polling_interval": 15, - "connection_limits":{ - "max_connections": 100, - "max_keepalive_connections": 50, - "keepalive_expiry": 300 - } - }, - "rlimit": { - "file_descriptors": 2048 - }, - "rabbitmq": { - "user": "guest", - "password": "guest", - "host": "rabbitmq", - "port": 5672, - "setup": { - "core": { - "exchange": "powerloom-backend" - }, - "callbacks": { - "exchange": "powerloom-backend-callbacks" - }, - "commit_payload": { - "exchange": "powerloom-backend-commit-payload" - }, - "event_detector": { - "exchange": "powerloom-backend-event-detector" - }, - "delegated_worker": { - "exchange": "powerloom-backend-delegated-worker" - } - } - }, - "reporting": { - "slack_url": "https://slack-reporting-url", - "service_url": "https://powerloom-reporting-url" - }, - "redis": { - "host": "redis", - "port": 6379, - "db": 0, - "password": null, - "ssl": false, - "cluster_mode": false - }, - "redis_reader": { - "host": "redis", - "port": 6379, - "db": 0, - "password": null - }, - "logs": { - "trace_enabled": false, - "write_to_files": true - }, - "projects_config_path": "config/projects.json", - "aggregator_config_path": "config/aggregator.json", - "preloader_config_path": "config/preloader.json", - "pair_contract_abi": "snapshotter/static/abis/UniswapV2Pair.json", - "protocol_state": { - "abi": "snapshotter/static/abis/ProtocolContract.json", - "address": "protocol-state-contract" - }, - "callback_worker_config": { - "num_delegate_workers": 8, - "num_snapshot_workers": 2, - "num_aggregation_workers": 4 - }, - "ipfs": { - "url": "ipfs-writer-url", - "url_auth": { - "apiKey": "ipfs-writer-key", - "apiSecret": "ipfs-writer-secret" - }, - "reader_url": "ipfs-reader-url", - "reader_url_auth": { - "apiKey": "ipfs-reader-key", - "apiSecret": "ipfs-reader-secret" - }, - "write_rate_limit": { - "req_per_sec": 10, - "burst": 10 - }, - "timeout": 60, - "local_cache_path": "/home/ubuntu/local_cache/", - "connection_limits":{ - "max_connections": 100, - "max_keepalive_connections": 50, - "keepalive_expiry": 300 - } - }, - "web3storage":{ - "upload_snapshots": false, - "upload_aggregates": true - }, - "anchor_chain_rpc": { - "full_nodes": [{ - "url": "https://prost-rpc-url", - "rate_limit": "100000000/day;18000/minute;300/second" - }], - "retry": 5, - "request_time_out": 5, - "connection_limits":{ - "max_connections": 100, - "max_keepalive_connections": 50, - "keepalive_expiry": 300 - } - } -} diff --git a/out/snapshotter/modules/pooler/flow/flow.png b/out/snapshotter/modules/pooler/flow/flow.png deleted file mode 100644 index 752791be..00000000 Binary files a/out/snapshotter/modules/pooler/flow/flow.png and /dev/null differ diff --git a/poetry.lock b/poetry.lock index 61094356..05d46c22 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. [[package]] name = "aio-pika" -version = "9.2.0" +version = "9.3.0" description = "Wrapper around the aiormq for asyncio and humans" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "aio_pika-9.2.0-py3-none-any.whl", hash = "sha256:65e114d6896ec6dbe364be365dc41a6260b81accf062523b280cd33660a4e808"}, - {file = "aio_pika-9.2.0.tar.gz", hash = "sha256:00df6c27555189fd59188730daefe4fcdcc42fa7168eb8a4d69d83f6bc32b120"}, + {file = "aio_pika-9.3.0-py3-none-any.whl", hash = "sha256:3aeb60410403bb61c0c0483f6487a471bfcf1f2cc7b738d6f3f466b18641a8f0"}, + {file = "aio_pika-9.3.0.tar.gz", hash = "sha256:230c1087e089e62a590fae95b77ccda053331d3c745bca067d274d76ffceda27"}, ] [package.dependencies] @@ -17,111 +17,99 @@ yarl = "*" [[package]] name = "aiohttp" -version = "3.8.5" +version = "3.9.0" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, + {file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6896b8416be9ada4d22cd359d7cb98955576ce863eadad5596b7cdfbf3e17c6c"}, + {file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1736d87dad8ef46a8ec9cddd349fa9f7bd3a064c47dd6469c0d6763d3d49a4fc"}, + {file = "aiohttp-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c9e5f4d7208cda1a2bb600e29069eecf857e6980d0ccc922ccf9d1372c16f4b"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8488519aa05e636c5997719fe543c8daf19f538f4fa044f3ce94bee608817cff"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ab16c254e2312efeb799bc3c06897f65a133b38b69682bf75d1f1ee1a9c43a9"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a94bde005a8f926d0fa38b88092a03dea4b4875a61fbcd9ac6f4351df1b57cd"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b777c9286b6c6a94f50ddb3a6e730deec327e9e2256cb08b5530db0f7d40fd8"}, + {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571760ad7736b34d05597a1fd38cbc7d47f7b65deb722cb8e86fd827404d1f6b"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:deac0a32aec29608eb25d730f4bc5a261a65b6c48ded1ed861d2a1852577c932"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4ee1b4152bc3190cc40ddd6a14715e3004944263ea208229ab4c297712aa3075"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3607375053df58ed6f23903aa10cf3112b1240e8c799d243bbad0f7be0666986"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:65b0a70a25456d329a5e1426702dde67be0fb7a4ead718005ba2ca582d023a94"}, + {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a2eb5311a37fe105aa35f62f75a078537e1a9e4e1d78c86ec9893a3c97d7a30"}, + {file = "aiohttp-3.9.0-cp310-cp310-win32.whl", hash = "sha256:2cbc14a13fb6b42d344e4f27746a4b03a2cb0c1c3c5b932b0d6ad8881aa390e3"}, + {file = "aiohttp-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ac9669990e2016d644ba8ae4758688534aabde8dbbc81f9af129c3f5f01ca9cd"}, + {file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f8e05f5163528962ce1d1806fce763ab893b1c5b7ace0a3538cd81a90622f844"}, + {file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4afa8f71dba3a5a2e1e1282a51cba7341ae76585345c43d8f0e624882b622218"}, + {file = "aiohttp-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f929f4c9b9a00f3e6cc0587abb95ab9c05681f8b14e0fe1daecfa83ea90f8318"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28185e36a78d247c55e9fbea2332d16aefa14c5276a582ce7a896231c6b1c208"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a486ddf57ab98b6d19ad36458b9f09e6022de0381674fe00228ca7b741aacb2f"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70e851f596c00f40a2f00a46126c95c2e04e146015af05a9da3e4867cfc55911"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b7bf8fe4d39886adc34311a233a2e01bc10eb4e842220235ed1de57541a896"}, + {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c67a51ea415192c2e53e4e048c78bab82d21955b4281d297f517707dc836bf3d"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:694df243f394629bcae2d8ed94c589a181e8ba8604159e6e45e7b22e58291113"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3dd8119752dd30dd7bca7d4bc2a92a59be6a003e4e5c2cf7e248b89751b8f4b7"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:eb6dfd52063186ac97b4caa25764cdbcdb4b10d97f5c5f66b0fa95052e744eb7"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d97c3e286d0ac9af6223bc132dc4bad6540b37c8d6c0a15fe1e70fb34f9ec411"}, + {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:816f4db40555026e4cdda604a1088577c1fb957d02f3f1292e0221353403f192"}, + {file = "aiohttp-3.9.0-cp311-cp311-win32.whl", hash = "sha256:3abf0551874fecf95f93b58f25ef4fc9a250669a2257753f38f8f592db85ddea"}, + {file = "aiohttp-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:e18d92c3e9e22553a73e33784fcb0ed484c9874e9a3e96c16a8d6a1e74a0217b"}, + {file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:99ae01fb13a618b9942376df77a1f50c20a281390dad3c56a6ec2942e266220d"}, + {file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:05857848da443c8c12110d99285d499b4e84d59918a21132e45c3f0804876994"}, + {file = "aiohttp-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:317719d7f824eba55857fe0729363af58e27c066c731bc62cd97bc9c3d9c7ea4"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1e3b3c107ccb0e537f309f719994a55621acd2c8fdf6d5ce5152aed788fb940"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45820ddbb276113ead8d4907a7802adb77548087ff5465d5c554f9aa3928ae7d"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a183f1978802588711aed0dea31e697d760ce9055292db9dc1604daa9a8ded"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a4cd44788ea0b5e6bb8fa704597af3a30be75503a7ed1098bc5b8ffdf6c982"}, + {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673343fbc0c1ac44d0d2640addc56e97a052504beacd7ade0dc5e76d3a4c16e8"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e8a3b79b6d186a9c99761fd4a5e8dd575a48d96021f220ac5b5fa856e5dd029"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6777a390e41e78e7c45dab43a4a0196c55c3b8c30eebe017b152939372a83253"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7ae5f99a32c53731c93ac3075abd3e1e5cfbe72fc3eaac4c27c9dd64ba3b19fe"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f1e4f254e9c35d8965d377e065c4a8a55d396fe87c8e7e8429bcfdeeb229bfb3"}, + {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11ca808f9a6b63485059f5f6e164ef7ec826483c1212a44f268b3653c91237d8"}, + {file = "aiohttp-3.9.0-cp312-cp312-win32.whl", hash = "sha256:de3cc86f4ea8b4c34a6e43a7306c40c1275e52bfa9748d869c6b7d54aa6dad80"}, + {file = "aiohttp-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca4fddf84ac7d8a7d0866664936f93318ff01ee33e32381a115b19fb5a4d1202"}, + {file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f09960b5bb1017d16c0f9e9f7fc42160a5a49fa1e87a175fd4a2b1a1833ea0af"}, + {file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8303531e2c17b1a494ffaeba48f2da655fe932c4e9a2626c8718403c83e5dd2b"}, + {file = "aiohttp-3.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4790e44f46a4aa07b64504089def5744d3b6780468c4ec3a1a36eb7f2cae9814"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1d7edf74a36de0e5ca50787e83a77cf352f5504eb0ffa3f07000a911ba353fb"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94697c7293199c2a2551e3e3e18438b4cba293e79c6bc2319f5fd652fccb7456"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1b66dbb8a7d5f50e9e2ea3804b01e766308331d0cac76eb30c563ac89c95985"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9623cfd9e85b76b83ef88519d98326d4731f8d71869867e47a0b979ffec61c73"}, + {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f32c86dc967ab8c719fd229ce71917caad13cc1e8356ee997bf02c5b368799bf"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f50b4663c3e0262c3a361faf440761fbef60ccdde5fe8545689a4b3a3c149fb4"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dcf71c55ec853826cd70eadb2b6ac62ec577416442ca1e0a97ad875a1b3a0305"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:42fe4fd9f0dfcc7be4248c162d8056f1d51a04c60e53366b0098d1267c4c9da8"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76a86a9989ebf82ee61e06e2bab408aec4ea367dc6da35145c3352b60a112d11"}, + {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f9e09a1c83521d770d170b3801eea19b89f41ccaa61d53026ed111cb6f088887"}, + {file = "aiohttp-3.9.0-cp38-cp38-win32.whl", hash = "sha256:a00ce44c21612d185c5275c5cba4bab8d7c1590f248638b667ed8a782fa8cd6f"}, + {file = "aiohttp-3.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:d5b9345ab92ebe6003ae11d8092ce822a0242146e6fa270889b9ba965457ca40"}, + {file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98d21092bf2637c5fa724a428a69e8f5955f2182bff61f8036827cf6ce1157bf"}, + {file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35a68cd63ca6aaef5707888f17a70c36efe62b099a4e853d33dc2e9872125be8"}, + {file = "aiohttp-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7f6235c7475658acfc1769d968e07ab585c79f6ca438ddfecaa9a08006aee2"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db04d1de548f7a62d1dd7e7cdf7c22893ee168e22701895067a28a8ed51b3735"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:536b01513d67d10baf6f71c72decdf492fb7433c5f2f133e9a9087379d4b6f31"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c8b0a6487e8109427ccf638580865b54e2e3db4a6e0e11c02639231b41fc0f"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7276fe0017664414fdc3618fca411630405f1aaf0cc3be69def650eb50441787"}, + {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23170247ef89ffa842a02bbfdc425028574d9e010611659abeb24d890bc53bb8"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b1a2ea8252cacc7fd51df5a56d7a2bb1986ed39be9397b51a08015727dfb69bd"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d71abc15ff7047412ef26bf812dfc8d0d1020d664617f4913df2df469f26b76"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d820162c8c2bdbe97d328cd4f417c955ca370027dce593345e437b2e9ffdc4d"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2779f5e7c70f7b421915fd47db332c81de365678180a9f3ab404088f87ba5ff9"}, + {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:366bc870d7ac61726f32a489fbe3d1d8876e87506870be66b01aeb84389e967e"}, + {file = "aiohttp-3.9.0-cp39-cp39-win32.whl", hash = "sha256:1df43596b826022b14998f0460926ce261544fedefe0d2f653e1b20f49e96454"}, + {file = "aiohttp-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:9c196b30f1b1aa3363a69dd69079ae9bec96c2965c4707eaa6914ba099fb7d4f"}, + {file = "aiohttp-3.9.0.tar.gz", hash = "sha256:09f23292d29135025e19e8ff4f0a68df078fe4ee013bca0105b2e803989de92d"}, ] [package.dependencies] aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +speedups = ["Brotli", "aiodns", "brotlicffi"] [[package]] name = "aiormq" @@ -165,24 +153,24 @@ frozenlist = ">=1.1.0" [[package]] name = "anyio" -version = "3.7.1" +version = "4.0.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.22)"] [[package]] name = "appdirs" @@ -215,13 +203,13 @@ resolved_reference = "2596e3d073de25918627b4f20319923b08018fb0" [[package]] name = "async-timeout" -version = "4.0.2" +version = "4.0.3" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] [[package]] @@ -258,219 +246,254 @@ tests = ["PyHamcrest (>=2.0.2)", "mypy", "pytest (>=4.6)", "pytest-benchmark", " [[package]] name = "bitarray" -version = "2.8.1" +version = "2.8.3" description = "efficient arrays of booleans -- C extension" optional = false python-versions = "*" files = [ - {file = "bitarray-2.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6be965028785413a6163dd55a639b898b22f67f9b6ed554081c23e94a602031e"}, - {file = "bitarray-2.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29e19cb80a69f6d1a64097bfbe1766c418e1a785d901b583ef0328ea10a30399"}, - {file = "bitarray-2.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0f6d705860f59721d7282496a4d29b5fd78690e1c1473503832c983e762b01b"}, - {file = "bitarray-2.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6df04efdba4e1bf9d93a1735e42005f8fcf812caf40c03934d9322412d563499"}, - {file = "bitarray-2.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18530ed3ddd71e9ff95440afce531efc3df7a3e0657f1c201c2c3cb41dd65869"}, - {file = "bitarray-2.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4cd81ffd2d58ef68c22c825aff89f4a47bd721e2ada0a3a96793169f370ae21"}, - {file = "bitarray-2.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8367768ab797105eb97dfbd4577fcde281618de4d8d3b16ad62c477bb065f347"}, - {file = "bitarray-2.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:848af80518d0ed2aee782018588c7c88805f51b01271935df5b256c8d81c726e"}, - {file = "bitarray-2.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c54b0af16be45de534af9d77e8a180126cd059f72db8b6550f62dda233868942"}, - {file = "bitarray-2.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f30cdce22af3dc7c73e70af391bfd87c4574cc40c74d651919e20efc26e014b5"}, - {file = "bitarray-2.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bc03bb358ae3917247d257207c79162e666d407ac473718d1b95316dac94162b"}, - {file = "bitarray-2.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:cf38871ed4cd89df9db7c70f729b948fa3e2848a07c69f78e4ddfbe4f23db63c"}, - {file = "bitarray-2.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a637bcd199c1366c65b98f18884f0d0b87403f04676b21e4635831660d722a7"}, - {file = "bitarray-2.8.1-cp310-cp310-win32.whl", hash = "sha256:904719fb7304d4115228b63c178f0cc725ad3b73e285c4b328e45a99a8e3fad6"}, - {file = "bitarray-2.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:1e859c664500d57526fe07140889a3b58dca54ff3b16ac6dc6d534a65c933084"}, - {file = "bitarray-2.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2d3f28a80f2e6bb96e9360a4baf3fbacb696b5aba06a14c18a15488d4b6f398f"}, - {file = "bitarray-2.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4677477a406f2a9e064920463f69172b865e4d69117e1f2160064d3f5912b0bd"}, - {file = "bitarray-2.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9061c0a50216f24c97fb2325de84200e5ad5555f25c854ddcb3ceb6f12136055"}, - {file = "bitarray-2.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:843af12991161b358b6379a8dc5f6636798f3dacdae182d30995b6a2df3b263e"}, - {file = "bitarray-2.8.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9336300fd0acf07ede92e424930176dc4b43ef1b298489e93ba9a1695e8ea752"}, - {file = "bitarray-2.8.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0af01e1f61fe627f63648c0c6f52de8eac56710a2ef1dbce4851d867084cc7e"}, - {file = "bitarray-2.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ab81c74a1805fe74330859b38e70d7525cdd80953461b59c06660046afaffcf"}, - {file = "bitarray-2.8.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2015a9dd718393e814ff7b9e80c58190eb1cef7980f86a97a33e8440e158ce2"}, - {file = "bitarray-2.8.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b0493ab66c6b8e17e9fde74c646b39ee09c236cf28a787cb8cbd3a83c05bff7"}, - {file = "bitarray-2.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:81e83ed7e0b1c09c5a33b97712da89e7a21fd3e5598eff3975c39540f5619792"}, - {file = "bitarray-2.8.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:741c3a2c0997c8f8878edfc65a4a8f7aa72eede337c9bc0b7bd8a45cf6e70dbc"}, - {file = "bitarray-2.8.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:57aeab27120a8a50917845bb81b0976e33d4759f2156b01359e2b43d445f5127"}, - {file = "bitarray-2.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:17c32ba584e8fb9322419390e0e248769ed7d59de3ffa7432562a4c0ec4f1f82"}, - {file = "bitarray-2.8.1-cp311-cp311-win32.whl", hash = "sha256:b67733a240a96f09b7597af97ac4d60c59140cfcfd180f11a7221863b82f023a"}, - {file = "bitarray-2.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:7b29d4bf3d3da1847f2be9e30105bf51caaf5922e94dc827653e250ed33f4e8a"}, - {file = "bitarray-2.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5f6175c1cf07dadad3213d60075704cf2e2f1232975cfd4ac8328c24a05e8f78"}, - {file = "bitarray-2.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cc066c7290151600b8872865708d2d00fb785c5db8a0df20d70d518e02f172b"}, - {file = "bitarray-2.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ce2ef9291a193a0e0cd5e23970bf3b682cc8b95220561d05b775b8d616d665f"}, - {file = "bitarray-2.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5582dd7d906e6f9ec1704f99d56d812f7d395d28c02262bc8b50834d51250c3"}, - {file = "bitarray-2.8.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2aa2267eb6d2b88ef7d139e79a6daaa84cd54d241b9797478f10dcb95a9cd620"}, - {file = "bitarray-2.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a04d4851e83730f03c4a6aac568c7d8b42f78f0f9cc8231d6db66192b030ce1e"}, - {file = "bitarray-2.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f7d2ec2174d503cbb092f8353527842633c530b4e03b9922411640ac9c018a19"}, - {file = "bitarray-2.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b65a04b2e029b0694b52d60786732afd15b1ec6517de61a36afbb7808a2ffac1"}, - {file = "bitarray-2.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:55020d6fb9b72bd3606969f5431386c592ed3666133bd475af945aa0fa9e84ec"}, - {file = "bitarray-2.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:797de3465f5f6c6be9a412b4e99eb6e8cdb86b83b6756655c4d83a65d0b9a376"}, - {file = "bitarray-2.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f9a66745682e175e143a180524a63e692acb2b8c86941073f6dd4ee906e69608"}, - {file = "bitarray-2.8.1-cp36-cp36m-win32.whl", hash = "sha256:443726af4bd60515e4e41ea36c5dbadb29a59bc799bcbf431011d1c6fd4363e3"}, - {file = "bitarray-2.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:2b0f754a5791635b8239abdcc0258378111b8ee7a8eb3e2bbc24bcc48a0f0b08"}, - {file = "bitarray-2.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d175e16419a52d54c0ac44c93309ba76dc2cfd33ee9d20624f1a5eb86b8e162e"}, - {file = "bitarray-2.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3128234bde3629ab301a501950587e847d30031a9cbf04d95f35cbf44469a9e"}, - {file = "bitarray-2.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75104c3076676708c1ac2484ebf5c26464fb3850312de33a5b5bf61bfa7dbec5"}, - {file = "bitarray-2.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82bfb6ab9b1b5451a5483c9a2ae2a8f83799d7503b384b54f6ab56ea74abb305"}, - {file = "bitarray-2.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dc064a63445366f6b26eaf77230d326b9463e903ba59d6ff5efde0c5ec1ea0e"}, - {file = "bitarray-2.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbe54685cf6b17b3e15faf6c4b76773bc1c484bc447020737d2550a9dde5f6e6"}, - {file = "bitarray-2.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9fed8aba8d1b09cf641b50f1e6dd079c31677106ea4b63ec29f4c49adfabd63f"}, - {file = "bitarray-2.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7c17dd8fb146c2c680bf1cb28b358f9e52a14076e44141c5442148863ee95d7d"}, - {file = "bitarray-2.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c9efcee311d9ba0c619743060585af9a9b81496e97b945843d5e954c67722a75"}, - {file = "bitarray-2.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dc7acffee09822b334d1b46cd384e969804abdf18f892c82c05c2328066cd2ae"}, - {file = "bitarray-2.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ea71e0a50060f96ad0821e0ac785e91e44807f8b69555970979d81934961d5bd"}, - {file = "bitarray-2.8.1-cp37-cp37m-win32.whl", hash = "sha256:69ab51d551d50e4d6ca35abc95c9d04b33ad28418019bb5481ab09bdbc0df15c"}, - {file = "bitarray-2.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3024ab4c4906c3681408ca17c35833237d18813ebb9f24ae9f9e3157a4a66939"}, - {file = "bitarray-2.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:46fdd27c8fa4186d8b290bf74a28cbd91b94127b1b6a35c265a002e394fa9324"}, - {file = "bitarray-2.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d32ccd2c0d906eae103ef84015f0545a395052b0b6eb0e02e9023ca0132557f6"}, - {file = "bitarray-2.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9186cf8135ca170cd907d8c4df408a87747570d192d89ec4ff23805611c702a0"}, - {file = "bitarray-2.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8d6e5ff385fea25caf26fd58b43f087deb763dcaddd18d3df2895235cf1b484"}, - {file = "bitarray-2.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d6a9c72354327c7aa9890ff87904cbe86830cb1fb58c39750a0afac8df5e051"}, - {file = "bitarray-2.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2f13b7d0694ce2024c82fc595e6ccc3918e7f069747c3de41b1ce72a9a1e346"}, - {file = "bitarray-2.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d38ceca90ed538706e3f111513073590f723f90659a7af0b992b29776a6e816"}, - {file = "bitarray-2.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b977c39e3734e73540a2e3a71501c2c6261c70c6ce59d427bb7c4ecf6331c7e"}, - {file = "bitarray-2.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:214c05a7642040f6174e29f3e099549d3c40ac44616405081bf230dcafb38767"}, - {file = "bitarray-2.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad440c17ef2ff42e94286186b5bcf82bf87c4026f91822675239102ebe1f7035"}, - {file = "bitarray-2.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:28dee92edd0d21655e56e1870c22468d0dabe557df18aa69f6d06b1543614180"}, - {file = "bitarray-2.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:df9d8a9a46c46950f306394705512553c552b633f8bf3c11359c4204289f11e3"}, - {file = "bitarray-2.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1a0d27aad02d8abcb1d3b7d85f463877c4937e71adf9b6adb9367f2cdad91a52"}, - {file = "bitarray-2.8.1-cp38-cp38-win32.whl", hash = "sha256:6033303431a7c85a535b3f1b0ec28abc2ebc2167c263f244993b56ccb87cae6b"}, - {file = "bitarray-2.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:9b65d487451e0e287565c8436cf4da45260f958f911299f6122a20d7ec76525c"}, - {file = "bitarray-2.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9aad7b4670f090734b272c072c9db375c63bd503512be9a9393e657dcacfc7e2"}, - {file = "bitarray-2.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bf80804014e3736515b84044c2be0e70080616b4ceddd4e38d85f3167aeb8165"}, - {file = "bitarray-2.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7f7231ef349e8f4955d9b39561f4683a418a73443cfce797a4eddbee1ba9664"}, - {file = "bitarray-2.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67e8fb18df51e649adbc81359e1db0f202d72708fba61b06f5ac8db47c08d107"}, - {file = "bitarray-2.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d5df3d6358425c9dfb6bdbd4f576563ec4173d24693a9042d05aadcb23c0b98"}, - {file = "bitarray-2.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ea51ba4204d086d5b76e84c31d2acbb355ed1b075ded54eb9b7070b0b95415d"}, - {file = "bitarray-2.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1414582b3b7516d2282433f0914dd9846389b051b2aea592ae7cc165806c24ac"}, - {file = "bitarray-2.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5934e3a623a1d485e1dcfc1990246e3c32c6fc6e7f0fd894750800d35fdb5794"}, - {file = "bitarray-2.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa08a9b03888c768b9b2383949a942804d50d8164683b39fe62f0bfbfd9b4204"}, - {file = "bitarray-2.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:00ff372dfaced7dd6cc2dffd052fafc118053cf81a442992b9a23367479d77d7"}, - {file = "bitarray-2.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dd76bbf5a4b2ab84b8ffa229f5648e80038ba76bf8d7acc5de9dd06031b38117"}, - {file = "bitarray-2.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e88a706f92ad1e0e1e66f6811d10b6155d5f18f0de9356ee899a7966a4e41992"}, - {file = "bitarray-2.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b2560475c5a1ff96fcab01fae7cf6b9a6da590f02659556b7fccc7991e401884"}, - {file = "bitarray-2.8.1-cp39-cp39-win32.whl", hash = "sha256:74cd1725d08325b6669e6e9a5d09cec29e7c41f7d58e082286af5387414d046d"}, - {file = "bitarray-2.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:e48c45ea7944225bcee026c457a70eaea61db3659d9603f07fc8a643ab7e633b"}, - {file = "bitarray-2.8.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2426dc7a0d92d8254def20ab7a231626397ce5b6fb3d4f44be74cc1370a60c3"}, - {file = "bitarray-2.8.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d34790a919f165b6f537935280ef5224957d9ce8ab11d339f5e6d0319a683ccc"}, - {file = "bitarray-2.8.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c26a923080bc211cab8f5a5e242e3657b32951fec8980db0616e9239aade482"}, - {file = "bitarray-2.8.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de1bc5f971aba46de88a4eb0dbb5779e30bbd7514f4dcbff743c209e0c02667"}, - {file = "bitarray-2.8.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3bb5f2954dd897b0bac13b5449e5c977534595b688120c8af054657a08b01f46"}, - {file = "bitarray-2.8.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:62ac31059a3c510ef64ed93d930581b262fd4592e6d95ede79fca91e8d3d3ef6"}, - {file = "bitarray-2.8.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae32ac7217e83646b9f64d7090bf7b737afaa569665621f110a05d9738ca841a"}, - {file = "bitarray-2.8.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3994f7dc48d21af40c0d69fca57d8040b02953f4c7c3652c2341d8947e9cbedf"}, - {file = "bitarray-2.8.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c361201e1c3ee6d6b2266f8b7a645389880bccab1b29e22e7a6b7b6e7831ad5"}, - {file = "bitarray-2.8.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:861850d6a58e7b6a7096d0b0efed9c6d993a6ab8b9d01e781df1f4d80cc00efa"}, - {file = "bitarray-2.8.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ee772c20dcb56b03d666a4e4383d0b5b942b0ccc27815e42fe0737b34cba2082"}, - {file = "bitarray-2.8.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63fa75e87ad8c57d5722cc87902ca148ef8bbbba12b5c5b3c3730a1bc9ac2886"}, - {file = "bitarray-2.8.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b999fb66980f885961d197d97d7ff5a13b7ab524ccf45ccb4704f4b82ce02e3"}, - {file = "bitarray-2.8.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3243e4b8279ff2fe4c6e7869f0e6930c17799ee9f8d07317f68d44a66b46281e"}, - {file = "bitarray-2.8.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:542358b178b025dcc95e7fb83389e9954f701c41d312cbb66bdd763cbe5414b5"}, - {file = "bitarray-2.8.1.tar.gz", hash = "sha256:e68ceef35a88625d16169550768fcc8d3894913e363c24ecbf6b8c07eb02c8f3"}, + {file = "bitarray-2.8.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be7c6343a7f24293a988e5a27c1e2f44f028476e35192e73663c4acec5c4766e"}, + {file = "bitarray-2.8.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:38233e5793e107575be656908419d2bceab359c78c28affc386c7b88b8882b8f"}, + {file = "bitarray-2.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:acf24bc6aedd0a490af71591b99401867d4445d64db09a7bfe0bde3e8498cc8d"}, + {file = "bitarray-2.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04fcb292637012a1551e55c00796e31b5c66d1692ca25a5ac83d23779c23cd29"}, + {file = "bitarray-2.8.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:015908355354d42973ad41ba4eca697b4b55690b3ece6d9629118273e7a9e380"}, + {file = "bitarray-2.8.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48a89c2112420ebeb163a3c273c244d542cf9315c9ce5a875d305f91adcdac24"}, + {file = "bitarray-2.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb530a9fb7ed13a1a49bda81db2def4c73b7fef0fd1bb969b1d7605121869230"}, + {file = "bitarray-2.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c87146e9c2c196c012e97273f82215e2239b9bffcbb6c7802bbbedac87be2358"}, + {file = "bitarray-2.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:84a2628a5377971d73c95014e540a51327eb27ffdfbab81e43eac494eced3dc2"}, + {file = "bitarray-2.8.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6bcbe2ea34c88cf736f157cf3d713c1af112f0d7a9eec390d69a9e042b7d76d4"}, + {file = "bitarray-2.8.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:67ee9d71af3db621aa637f96520a8df8534fcc64e881360d3ed3a07f7e47ed1b"}, + {file = "bitarray-2.8.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ba3f27d82b45543a7d1488d151594915a6e67fb28bd4f21eb0901df2ba4ede86"}, + {file = "bitarray-2.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:095923f084d2271f28d7430798e698f6d0b304c58b072b4f2eb0bc132321323b"}, + {file = "bitarray-2.8.3-cp310-cp310-win32.whl", hash = "sha256:de91007504b475a93d8b0949db9dec86d39c0306de9914f7b9087daeb3d9fbaf"}, + {file = "bitarray-2.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:09c140daa13d2515609d5a2dbfd289eada200e96222671194dc72eae89bc3c7b"}, + {file = "bitarray-2.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2bfd32ce49d23584333087262fb367b371c74cf531f6b0c16759d59f47c847d7"}, + {file = "bitarray-2.8.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:12035756896d71e82edf6a6fb46d3ca299eadbec25140c12505d4b32f561b0da"}, + {file = "bitarray-2.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:73fa449d9e551a063ff5c68b5d2cc0caaede5b59366d37457261ae3080f61fca"}, + {file = "bitarray-2.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18707458f6467072a9c3322835a299fa86df8fb3962f51afac2b50c6a4babf82"}, + {file = "bitarray-2.8.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f142476b3bb80f6887b5a3a08d69bbd526093aee5a00973c26458cc16dd5e47"}, + {file = "bitarray-2.8.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47400fa421b8a3947f6676981f8d9b8581239831533dff374477ef2b86fda42f"}, + {file = "bitarray-2.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f51107bb5406bfa4889064c01d5f9e7a545b3e2b53f159626c72c910fe8f07"}, + {file = "bitarray-2.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3741359cbb1a9eb50188e8faa0ced96ca658eb85061786b7f686efa94c3604"}, + {file = "bitarray-2.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c65080bbba08ce07b136490b4df3d0907ec3dd76c3c5d47fda011002420f6d31"}, + {file = "bitarray-2.8.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:117a6f409dabc15320f3212d05d878cc33436c1e118e8746bf3775da2509bb7d"}, + {file = "bitarray-2.8.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:782ff781ae3c4956c15764aefc06ceb8c1c348794f09dfc8ebf62ff35166da1f"}, + {file = "bitarray-2.8.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a7b839e5c038111fd2fbd09e83ca945da357d690e49cfa269c09aed239db9c2b"}, + {file = "bitarray-2.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab7e9b1846cc62739d9d293a94f704949b588afb9ed72db00e26b7fcdb4661a3"}, + {file = "bitarray-2.8.3-cp311-cp311-win32.whl", hash = "sha256:20cc6573ac21627e0fde854d4e0450d4c97706213bac986c0d38d252452da155"}, + {file = "bitarray-2.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:8011a63692e9e32cdc3fac3dfd0beceece926e8b53fb91750037fc386917f90b"}, + {file = "bitarray-2.8.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da61c6d7b6288d29db5be77048176f41f7320316997fced28b5415e1f939448e"}, + {file = "bitarray-2.8.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:60774f73151dbcabefb5acb6d97ac09a51c999f9a903ac6f8db3d8368d338969"}, + {file = "bitarray-2.8.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c815a7ca72a5eebcd85caaeb4d32b71af1c795e38b3dff5dcb5b6b1f3ba0b4f"}, + {file = "bitarray-2.8.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a102cd1fafee8919a069fed9ea40c1ffe4d6037fd5b0a7f47326c2f75f24f70f"}, + {file = "bitarray-2.8.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b2816afe82feeb7948e58ca0be31c254e23307953e56d3313f293f79279fbe7"}, + {file = "bitarray-2.8.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98fe712a82f65de536b65fa9af7601df4e8231f14e3b0b14ef22e16e30d2fbea"}, + {file = "bitarray-2.8.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8defbf10a731b44892001daa6903b2f2f7ad8c623a7b4d9ae6bd674592b1763e"}, + {file = "bitarray-2.8.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e98a7b510aaaf0d7368b7cb983d3106aecd28abdfa4b4593b80e7f4ab5af0a97"}, + {file = "bitarray-2.8.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5e24317b0768789c52586a31284dec8ccafa2f6c128df2f2d79656142f1e794"}, + {file = "bitarray-2.8.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:c30dbbe2f49056d4bd97a94c07a7fc0118ecc85661fdbaada36dfa9b14dc5962"}, + {file = "bitarray-2.8.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:2adb2ba1e7196f62587f4011b213b3609a717f92698a398904192e201ec3e29e"}, + {file = "bitarray-2.8.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:3aa1bd71236e07f0e7ab859a130fc57645301fd1ffd64be9a9750bce51446acb"}, + {file = "bitarray-2.8.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:63e595ca8dab2b77104e618782764bc3b172a0e9c6f97734d5fdd299063feac0"}, + {file = "bitarray-2.8.3-cp312-cp312-win32.whl", hash = "sha256:0c3de6517df7bbac18632046e722ca9000a4aeb76da68e545437fee1e61e2bbc"}, + {file = "bitarray-2.8.3-cp312-cp312-win_amd64.whl", hash = "sha256:4a6a4e83ecab1fd1fc171c57334663b24c5d286b66421efac2428b7e105c5d62"}, + {file = "bitarray-2.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:993438edd54350133f7569a8691074a90aa2297def69ec0e7af34de3d175cd00"}, + {file = "bitarray-2.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06770f6f7d238c2e2d251e9f5346358653ea8f3dbbedc83d18598f6c044f16b4"}, + {file = "bitarray-2.8.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44e3944ebccbc38ebdb7bd3c37a9b6ff91d87db2dad4bf3910e2b01fbd36831b"}, + {file = "bitarray-2.8.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a86c308018b59b999cf3d5a16889d3a347b48a2d08f34fbb4e29d5dc05fa198a"}, + {file = "bitarray-2.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b92c17b15bd5536c3e067051c67531adc81fcb6c1a699a760600ccd03dfcfba"}, + {file = "bitarray-2.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3d80bc6722652c847e5f503c2ce94a641b016059ec45bde4e1f13454b33e904"}, + {file = "bitarray-2.8.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fbc7ac38de41052599f1e27edf4f33c02d5aea6810ee299825a81863a32e26a0"}, + {file = "bitarray-2.8.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bbca4c4bc9854e3166474e471f3230989fd2baf32c915e363c32f91dc6ebb704"}, + {file = "bitarray-2.8.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:74efd69ac9d06ce9f43a1f513cee8a82c314f85aa0bd74664abe9e608fb59ffd"}, + {file = "bitarray-2.8.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:c3f7a6c6b78edd81fca0035fb7a156a79f25919e1b0598afb483c26513d562f1"}, + {file = "bitarray-2.8.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b0cefac8fedb3dbbf97542dc0c6fdd8bf09a210bf6fa5799083b7309fd97b1b2"}, + {file = "bitarray-2.8.3-cp36-cp36m-win32.whl", hash = "sha256:67e366efaea6e0b5971593a83d062cb7e4e09e03d29f8d5b825effdf5f516ad3"}, + {file = "bitarray-2.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:621d5658b890b99b3f8b1a678b0afed10e096d53baa767ecbcf428fce1f48415"}, + {file = "bitarray-2.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ac5451951ce1e0616385e77de49afc7bd90bdf9d0aa99c0fd7b0bd23400db890"}, + {file = "bitarray-2.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff6b6b47da38223803aa3e7aab356f84e0636ecdbd43fa4bd11dbc00a923d474"}, + {file = "bitarray-2.8.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:154082c814e4007bf15d8dfc576ebd4e79e9ed3626017cd53810961cee7e65d8"}, + {file = "bitarray-2.8.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9f4f29c0338e5862ebc3b88091d29ff28d44ab80381f238da08aabb054777c2"}, + {file = "bitarray-2.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b153b846a6ac4b6eca71bb5f84d3dba51f3cd159f4322f5d67b2c41cf15973ad"}, + {file = "bitarray-2.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2c8e06c3463746181255e03f07535c136f5346fb9c4a90eec2da27695102533"}, + {file = "bitarray-2.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f16a2247c27f4db3f8d01665ee97d46eaf0240b7a9feae16c17e906a3bb9a794"}, + {file = "bitarray-2.8.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:57f1fc3a089d9907859e940c6a4db3f5358013c75bba3b15156d93a58bca868e"}, + {file = "bitarray-2.8.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c42fcddc955d84164667d899e8d4bbb763f4bc029fe72642a65df7382c46fe94"}, + {file = "bitarray-2.8.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:e60254ac626790c8c95415b095c6831056ca57a5d31839564210530c3278f170"}, + {file = "bitarray-2.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a0bb2e5c0c9f964bf43a09a1cf37233ff96b3318c9a50b1b7c3d74a875b32072"}, + {file = "bitarray-2.8.3-cp37-cp37m-win32.whl", hash = "sha256:edddd6d885c7195ba7734936bc1efc8a37de18ec886a8be44a484980da87947e"}, + {file = "bitarray-2.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:44ee266b71cd6bd7c99f937b30ac3b7627cad04777f2c12894cd0f820cb79ada"}, + {file = "bitarray-2.8.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a836a988ada812776af9ea6e88edf1e2eaaf38ebd545bbbcd500b2db0ced3a4f"}, + {file = "bitarray-2.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:089a4658706ec63293c153ffb1472cea1bbefb39ccfb214f52f0c1f5d10bf28e"}, + {file = "bitarray-2.8.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8c492d90b41c510d799cc37c27892b149be77e225df6446854ce0b164e243a3"}, + {file = "bitarray-2.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b661052a4762825790a728469f897c341558392342cb68a6c54708d4e5198254"}, + {file = "bitarray-2.8.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4fd5e8a2e1b898ebc91faf6e1938bde38a4d20ee8ea49835e9adadd9b87c97c"}, + {file = "bitarray-2.8.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d4f3e78a8c1c5bf625632488a4bdd78fe87c4603ea10443cb8f207c2a846efe"}, + {file = "bitarray-2.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5797552e849079ff963936a037087367f20b41d5a612b07a1ba032259a2b86c8"}, + {file = "bitarray-2.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adfc210df3d85017f5d2ef82db94d46b585ecbbd7357a6ee1c3bc125cc2658e2"}, + {file = "bitarray-2.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:252bdf94c74192b10f7fdb42683adf1403892acdce39e3e3524e8b070793b1c7"}, + {file = "bitarray-2.8.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:879bb9f11bad60a5588f5efb4e60f42844e4787ce7d5bb0f8eb8b87a835e914f"}, + {file = "bitarray-2.8.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7a6413b5f53d44e134276d5a3747b71d17cbc25177a50445458921424a760dcd"}, + {file = "bitarray-2.8.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3d0daf70de198dcde459451c534333c0f59ab847649be013c9b88d24f0e49767"}, + {file = "bitarray-2.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:09244fa4e39ca263820dd8eca83a0175a98fb8f9bd353b4285a9ef2928b7fb41"}, + {file = "bitarray-2.8.3-cp38-cp38-win32.whl", hash = "sha256:7ad527ff1d398a703eba71ac270625087691e62efab8d0e331c53affe0628030"}, + {file = "bitarray-2.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:2fcaf220e53518762dae0701082cb70d620656eaaecf5512695a6afafa885ea6"}, + {file = "bitarray-2.8.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e19756480bff2703155060d1849d37138a1d2242287563de112fb5bdd3217d"}, + {file = "bitarray-2.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:123333df4b22f12f4fc13fa4821b8ca075df59161bd41f5f189ffc791aaac10b"}, + {file = "bitarray-2.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff62c1c174ceae7ef0456702f9eff1f3d76590c075b9c984c459d734f73fc766"}, + {file = "bitarray-2.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7554518934364b30d8da085f7a759ee3838c9ae4265b48beb82072f942b2816e"}, + {file = "bitarray-2.8.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f0306dbc6605dd7f9e2dada33a3916c0c28f37128464de7153df7d8cf7a959"}, + {file = "bitarray-2.8.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2aeae0f2dacf546256f8720a1e8233b6735a3bf76778be701a1736d26fe4ecec"}, + {file = "bitarray-2.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c02d24051d7070b8f3b52fa9c8984fd8eb035115545f7c4be44c9825e8b58c8"}, + {file = "bitarray-2.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82fe0a774204159383d1be993191d51500cb44adbd3e9287da801e4657c0d4b2"}, + {file = "bitarray-2.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa4513a7393055faef630dcfb4d10a339c47eeb943487c0e9063ba763b66cb73"}, + {file = "bitarray-2.8.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36f9752b654e18f99130a2bf84f54b1e6b8fad4f5f768f4390eb9b769a64a59c"}, + {file = "bitarray-2.8.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a4212b66f9ae2e28ca1aa0307167ebfcdb2ca263a56b786cc572699e8a717f91"}, + {file = "bitarray-2.8.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cadccf651900e3858e55dfd762d5de0786aec853f1fb26183905ddee233183b4"}, + {file = "bitarray-2.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f756d159099f154a21d73932f13c8ce27f45a1c892d9b19c66a1a2c50c18474"}, + {file = "bitarray-2.8.3-cp39-cp39-win32.whl", hash = "sha256:c2ffed55994f5c73d34371474946767f936b0b83237f800be0f27a3e783baadb"}, + {file = "bitarray-2.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:f69cacb3d983200114e48ec0c894e28690926f166b71202f75e976d5cd588be9"}, + {file = "bitarray-2.8.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d6a8a1da9205de97eea14aaa731c657fa8decd2d6878ee3d2d4bf33291960216"}, + {file = "bitarray-2.8.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8562dd32b4d9810a0b9c04fe3d1ed8078f27d74e3738063162c677b253216666"}, + {file = "bitarray-2.8.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed974048a4ced6e7b5d1cfcb83c046e70bf31b8a28eacfee3afa62f8690dee69"}, + {file = "bitarray-2.8.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2448d8f5ce6d8a840a5dff1b41f5124445141530724af7ba82ec7967eabd290a"}, + {file = "bitarray-2.8.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:64d867953b530b3dde93663d4c4708b533216e9dca3f3b4489698261cd80fcef"}, + {file = "bitarray-2.8.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:72bba6b388ba7c48a882bd58c86972aab73a30c3fb5b3341f28eb5bdc17365f8"}, + {file = "bitarray-2.8.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f62ee2eae65b72e034a24ac2bacd78d48845193168b54407e93bccd3772b247f"}, + {file = "bitarray-2.8.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ed46857ed73765f2316e08f2d5108b7e694b44f4293e30fb526f3123c829d4"}, + {file = "bitarray-2.8.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:136bd205384a3089bc22c02a365a152e61b1e8d06ec664185c90e3ab8967260c"}, + {file = "bitarray-2.8.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42d2d0123b1e68b387f4b2fd288e1a8f0dfb991cf1d2fbc56d948c3f4a113d8d"}, + {file = "bitarray-2.8.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f35d5ff7334610b42632b30c27332b30db3680dd0174f86e382c3e150dfea2c"}, + {file = "bitarray-2.8.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7618abbac8999cd942be278130b88ac6ed364ba3446222f1db0faf4de7a052cf"}, + {file = "bitarray-2.8.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50923d862e01a546f942272193612f386ec1f90cc4528b10561854902bd8aab0"}, + {file = "bitarray-2.8.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c99838782dbec7f0c5cba1a6d4faa8e2da2b522423aa36a7f383a2265ac0ae3f"}, + {file = "bitarray-2.8.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e76735a285e834fc9db560de11e086453128c1177950a15c3404fe16c7d76f5e"}, + {file = "bitarray-2.8.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ffa74d8601e26570f1d0e3042fda6eb26b64ba8d8dfe9b96d0bf90a6f0d81582"}, + {file = "bitarray-2.8.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6993e46c81702d0bb39aad83ceb228cec087bc321782fbd2c6ddff7c653dcc8"}, + {file = "bitarray-2.8.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d9ec6a214563d2edd46d1a553583782379a2cb1016e8cc6c524e011905433b1"}, + {file = "bitarray-2.8.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34ceedbeed9aefde10c273d44801971db8f7505f80933fbb936969ee2343b8a3"}, + {file = "bitarray-2.8.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cc178297951343c8d8cd8a391999abf0024ca319671418f98dea0d7e71354126"}, + {file = "bitarray-2.8.3.tar.gz", hash = "sha256:e15587b2bdf18d32eb3ba25f5f5a51bedd0dc06b3112a4c53dab5e7753bc6588"}, ] [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -798,13 +821,13 @@ test = ["hypothesis (>=4.43.0,<5.0.0)", "pytest (==5.4.1)", "pytest-xdist", "tox [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -833,21 +856,22 @@ test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==23.1.0)", "coverage[toml] (>=6 [[package]] name = "fastapi-pagination" -version = "0.12.8" +version = "0.12.12" description = "FastAPI pagination" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "fastapi_pagination-0.12.8-py3-none-any.whl", hash = "sha256:d162fe05d5725301229d47b773eff62f578bdd4db9a97b521151a656e5d0b489"}, - {file = "fastapi_pagination-0.12.8.tar.gz", hash = "sha256:7f740346e3b40038de13eb59941c47d7fbd1ea8f99346b2f571d5c7759400aa2"}, + {file = "fastapi_pagination-0.12.12-py3-none-any.whl", hash = "sha256:a711d007daf1c98ad6f9e4f8269fa95812e16fa146c4efe5180b07997a39005e"}, + {file = "fastapi_pagination-0.12.12.tar.gz", hash = "sha256:7000fd7deccc2b1cc6ed777f9953587ea15c199a50c6b2c77a54f1dad9295e68"}, ] [package.dependencies] fastapi = ">=0.93.0" pydantic = ">=1.9.1" +typing-extensions = ">=4.8.0,<5.0.0" [package.extras] -all = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)", "beanie (>=1.11.9,<2.0.0)", "bunnet (>=1.1.0,<2.0.0)", "databases (>=0.6.0)", "django (<5.0.0)", "mongoengine (>=0.23.1,<0.28.0)", "motor (>=2.5.1,<4.0.0)", "orm (>=0.3.1)", "ormar (>=0.11.2)", "piccolo (>=0.89,<0.120)", "pony (>=0.7.16,<0.8.0)", "scylla-driver (>=3.25.6,<4.0.0)", "sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.9)", "tortoise-orm (>=0.16.18,<0.20.0)"] +all = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)", "beanie (>=1.11.9,<2.0.0)", "bunnet (>=1.1.0,<2.0.0)", "databases (>=0.6.0)", "django (<5.0.0)", "mongoengine (>=0.23.1,<0.28.0)", "motor (>=2.5.1,<4.0.0)", "orm (>=0.3.1)", "ormar (>=0.11.2)", "piccolo (>=0.89,<0.122)", "pony (>=0.7.16,<0.8.0)", "scylla-driver (>=3.25.6,<4.0.0)", "sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.12)", "tortoise-orm (>=0.16.18,<0.21.0)"] asyncpg = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)"] beanie = ["beanie (>=1.11.9,<2.0.0)"] bunnet = ["bunnet (>=1.1.0,<2.0.0)"] @@ -857,11 +881,11 @@ mongoengine = ["mongoengine (>=0.23.1,<0.28.0)"] motor = ["motor (>=2.5.1,<4.0.0)"] orm = ["databases (>=0.6.0)", "orm (>=0.3.1)"] ormar = ["ormar (>=0.11.2)"] -piccolo = ["piccolo (>=0.89,<0.120)"] +piccolo = ["piccolo (>=0.89,<0.122)"] scylla-driver = ["scylla-driver (>=3.25.6,<4.0.0)"] sqlalchemy = ["SQLAlchemy (>=1.3.20)", "sqlakeyset (>=2.0.1680321678,<3.0.0)"] -sqlmodel = ["sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.9)"] -tortoise = ["tortoise-orm (>=0.16.18,<0.20.0)"] +sqlmodel = ["sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.12)"] +tortoise = ["tortoise-orm (>=0.16.18,<0.21.0)"] [[package]] name = "frozendict" @@ -1107,7 +1131,7 @@ validators = "^0.20.0" type = "git" url = "https://git@github.com/PowerLoom/py-ipfs-client.git" reference = "HEAD" -resolved_reference = "11bb5fd25f67a0132238056b3f7840b37f63e27e" +resolved_reference = "0edf32e2d07f09efbca4e213492ef2c687bdf778" [[package]] name = "importlib" @@ -1121,21 +1145,21 @@ files = [ [[package]] name = "importlib-resources" -version = "6.0.1" +version = "6.1.1" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.0.1-py3-none-any.whl", hash = "sha256:134832a506243891221b88b4ae1213327eea96ceb4e407a00d790bb0626f45cf"}, - {file = "importlib_resources-6.0.1.tar.gz", hash = "sha256:4359457e42708462b9626a04657c6208ad799ceb41e5c58c57ffa0e6a098a5d4"}, + {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, + {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] [[package]] name = "ipfshttpclient" @@ -1154,13 +1178,13 @@ requests = ">=2.11" [[package]] name = "jsonschema" -version = "4.19.0" +version = "4.20.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.0-py3-none-any.whl", hash = "sha256:043dc26a3845ff09d20e4420d6012a9c91c9aa8999fa184e7efcfeccb41e32cb"}, - {file = "jsonschema-4.19.0.tar.gz", hash = "sha256:6e1e7569ac13be8139b2dd2c21a55d350066ee3f80df06c608b398cdc6f30e8f"}, + {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, + {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, ] [package.dependencies] @@ -1177,18 +1201,18 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.7.1" +version = "2023.11.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, + {file = "jsonschema_specifications-2023.11.1-py3-none-any.whl", hash = "sha256:f596778ab612b3fd29f72ea0d990393d0540a5aab18bf0407a46632eab540779"}, + {file = "jsonschema_specifications-2023.11.1.tar.gz", hash = "sha256:c9b234904ffe02f079bf91b14d79987faa685fd4b39c377a0996954c0090b9ca"}, ] [package.dependencies] importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -referencing = ">=0.28.0" +referencing = ">=0.31.0" [[package]] name = "limits" @@ -1219,13 +1243,13 @@ rediscluster = ["redis (>=4.2.0)"] [[package]] name = "loguru" -version = "0.7.0" +version = "0.7.2" description = "Python logging made (stupidly) simple" optional = false python-versions = ">=3.5" files = [ - {file = "loguru-0.7.0-py3-none-any.whl", hash = "sha256:b93aa30099fa6860d4727f1b81f8718e965bb96253fa190fab2077aaad6d15d3"}, - {file = "loguru-0.7.0.tar.gz", hash = "sha256:1612053ced6ae84d7959dd7d5e431a0532642237ec21f7fd83ac73fe539e03e1"}, + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, ] [package.dependencies] @@ -1233,97 +1257,96 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v0.990)", "pre-commit (==3.2.1)", "pytest (==6.1.2)", "pytest (==7.2.1)", "pytest-cov (==2.12.1)", "pytest-cov (==4.0.0)", "pytest-mypy-plugins (==1.10.1)", "pytest-mypy-plugins (==1.9.3)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.2.0)", "tox (==3.27.1)", "tox (==4.4.6)"] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] [[package]] name = "lru-dict" -version = "1.2.0" +version = "1.3.0" description = "An Dict like LRU container." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "lru-dict-1.2.0.tar.gz", hash = "sha256:13c56782f19d68ddf4d8db0170041192859616514c706b126d0df2ec72a11bd7"}, - {file = "lru_dict-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:de906e5486b5c053d15b7731583c25e3c9147c288ac8152a6d1f9bccdec72641"}, - {file = "lru_dict-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604d07c7604b20b3130405d137cae61579578b0e8377daae4125098feebcb970"}, - {file = "lru_dict-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:203b3e78d03d88f491fa134f85a42919020686b6e6f2d09759b2f5517260c651"}, - {file = "lru_dict-1.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:020b93870f8c7195774cbd94f033b96c14f51c57537969965c3af300331724fe"}, - {file = "lru_dict-1.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1184d91cfebd5d1e659d47f17a60185bbf621635ca56dcdc46c6a1745d25df5c"}, - {file = "lru_dict-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fc42882b554a86e564e0b662da47b8a4b32fa966920bd165e27bb8079a323bc1"}, - {file = "lru_dict-1.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:18ee88ada65bd2ffd483023be0fa1c0a6a051ef666d1cd89e921dcce134149f2"}, - {file = "lru_dict-1.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:756230c22257597b7557eaef7f90484c489e9ba78e5bb6ab5a5bcfb6b03cb075"}, - {file = "lru_dict-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4da599af36618881748b5db457d937955bb2b4800db891647d46767d636c408"}, - {file = "lru_dict-1.2.0-cp310-cp310-win32.whl", hash = "sha256:35a142a7d1a4fd5d5799cc4f8ab2fff50a598d8cee1d1c611f50722b3e27874f"}, - {file = "lru_dict-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:6da5b8099766c4da3bf1ed6e7d7f5eff1681aff6b5987d1258a13bd2ed54f0c9"}, - {file = "lru_dict-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b20b7c9beb481e92e07368ebfaa363ed7ef61e65ffe6e0edbdbaceb33e134124"}, - {file = "lru_dict-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22147367b296be31cc858bf167c448af02435cac44806b228c9be8117f1bfce4"}, - {file = "lru_dict-1.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a3091abeb95e707f381a8b5b7dc8e4ee016316c659c49b726857b0d6d1bd7a"}, - {file = "lru_dict-1.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:877801a20f05c467126b55338a4e9fa30e2a141eb7b0b740794571b7d619ee11"}, - {file = "lru_dict-1.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d3336e901acec897bcd318c42c2b93d5f1d038e67688f497045fc6bad2c0be7"}, - {file = "lru_dict-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8dafc481d2defb381f19b22cc51837e8a42631e98e34b9e0892245cc96593deb"}, - {file = "lru_dict-1.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:87bbad3f5c3de8897b8c1263a9af73bbb6469fb90e7b57225dad89b8ef62cd8d"}, - {file = "lru_dict-1.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:25f9e0bc2fe8f41c2711ccefd2871f8a5f50a39e6293b68c3dec576112937aad"}, - {file = "lru_dict-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ae301c282a499dc1968dd633cfef8771dd84228ae9d40002a3ea990e4ff0c469"}, - {file = "lru_dict-1.2.0-cp311-cp311-win32.whl", hash = "sha256:c9617583173a29048e11397f165501edc5ae223504a404b2532a212a71ecc9ed"}, - {file = "lru_dict-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6b7a031e47421d4b7aa626b8c91c180a9f037f89e5d0a71c4bb7afcf4036c774"}, - {file = "lru_dict-1.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ea2ac3f7a7a2f32f194c84d82a034e66780057fd908b421becd2f173504d040e"}, - {file = "lru_dict-1.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd46c94966f631a81ffe33eee928db58e9fbee15baba5923d284aeadc0e0fa76"}, - {file = "lru_dict-1.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:086ce993414f0b28530ded7e004c77dc57c5748fa6da488602aa6e7f79e6210e"}, - {file = "lru_dict-1.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df25a426446197488a6702954dcc1de511deee20c9db730499a2aa83fddf0df1"}, - {file = "lru_dict-1.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c53b12b89bd7a6c79f0536ff0d0a84fdf4ab5f6252d94b24b9b753bd9ada2ddf"}, - {file = "lru_dict-1.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f9484016e6765bd295708cccc9def49f708ce07ac003808f69efa386633affb9"}, - {file = "lru_dict-1.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0f7ec902a0097ac39f1922c89be9eaccf00eb87751e28915320b4f72912d057"}, - {file = "lru_dict-1.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:981ef3edc82da38d39eb60eae225b88a538d47b90cce2e5808846fd2cf64384b"}, - {file = "lru_dict-1.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e25b2e90a032dc248213af7f3f3e975e1934b204f3b16aeeaeaff27a3b65e128"}, - {file = "lru_dict-1.2.0-cp36-cp36m-win32.whl", hash = "sha256:59f3df78e94e07959f17764e7fa7ca6b54e9296953d2626a112eab08e1beb2db"}, - {file = "lru_dict-1.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:de24b47159e07833aeab517d9cb1c3c5c2d6445cc378b1c2f1d8d15fb4841d63"}, - {file = "lru_dict-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d0dd4cd58220351233002f910e35cc01d30337696b55c6578f71318b137770f9"}, - {file = "lru_dict-1.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a87bdc291718bbdf9ea4be12ae7af26cbf0706fa62c2ac332748e3116c5510a7"}, - {file = "lru_dict-1.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05fb8744f91f58479cbe07ed80ada6696ec7df21ea1740891d4107a8dd99a970"}, - {file = "lru_dict-1.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f6e8a3fc91481b40395316a14c94daa0f0a5de62e7e01a7d589f8d29224052"}, - {file = "lru_dict-1.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b172fce0a0ffc0fa6d282c14256d5a68b5db1e64719c2915e69084c4b6bf555"}, - {file = "lru_dict-1.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e707d93bae8f0a14e6df1ae8b0f076532b35f00e691995f33132d806a88e5c18"}, - {file = "lru_dict-1.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b9ec7a4a0d6b8297102aa56758434fb1fca276a82ed7362e37817407185c3abb"}, - {file = "lru_dict-1.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f404dcc8172da1f28da9b1f0087009578e608a4899b96d244925c4f463201f2a"}, - {file = "lru_dict-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1171ad3bff32aa8086778be4a3bdff595cc2692e78685bcce9cb06b96b22dcc2"}, - {file = "lru_dict-1.2.0-cp37-cp37m-win32.whl", hash = "sha256:0c316dfa3897fabaa1fe08aae89352a3b109e5f88b25529bc01e98ac029bf878"}, - {file = "lru_dict-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5919dd04446bc1ee8d6ecda2187deeebfff5903538ae71083e069bc678599446"}, - {file = "lru_dict-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbf36c5a220a85187cacc1fcb7dd87070e04b5fc28df7a43f6842f7c8224a388"}, - {file = "lru_dict-1.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:712e71b64da181e1c0a2eaa76cd860265980cd15cb0e0498602b8aa35d5db9f8"}, - {file = "lru_dict-1.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f54908bf91280a9b8fa6a8c8f3c2f65850ce6acae2852bbe292391628ebca42f"}, - {file = "lru_dict-1.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3838e33710935da2ade1dd404a8b936d571e29268a70ff4ca5ba758abb3850df"}, - {file = "lru_dict-1.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5d5a5f976b39af73324f2b793862859902ccb9542621856d51a5993064f25e4"}, - {file = "lru_dict-1.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8bda3a9afd241ee0181661decaae25e5336ce513ac268ab57da737eacaa7871f"}, - {file = "lru_dict-1.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd2cd1b998ea4c8c1dad829fc4fa88aeed4dee555b5e03c132fc618e6123f168"}, - {file = "lru_dict-1.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b55753ee23028ba8644fd22e50de7b8f85fa60b562a0fafaad788701d6131ff8"}, - {file = "lru_dict-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e51fa6a203fa91d415f3b2900e5748ec8e06ad75777c98cc3aeb3983ca416d7"}, - {file = "lru_dict-1.2.0-cp38-cp38-win32.whl", hash = "sha256:cd6806313606559e6c7adfa0dbeb30fc5ab625f00958c3d93f84831e7a32b71e"}, - {file = "lru_dict-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d90a70c53b0566084447c3ef9374cc5a9be886e867b36f89495f211baabd322"}, - {file = "lru_dict-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3ea7571b6bf2090a85ff037e6593bbafe1a8598d5c3b4560eb56187bcccb4dc"}, - {file = "lru_dict-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:287c2115a59c1c9ed0d5d8ae7671e594b1206c36ea9df2fca6b17b86c468ff99"}, - {file = "lru_dict-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5ccfd2291c93746a286c87c3f895165b697399969d24c54804ec3ec559d4e43"}, - {file = "lru_dict-1.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b710f0f4d7ec4f9fa89dfde7002f80bcd77de8024017e70706b0911ea086e2ef"}, - {file = "lru_dict-1.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5345bf50e127bd2767e9fd42393635bbc0146eac01f6baf6ef12c332d1a6a329"}, - {file = "lru_dict-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:291d13f85224551913a78fe695cde04cbca9dcb1d84c540167c443eb913603c9"}, - {file = "lru_dict-1.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d5bb41bc74b321789803d45b124fc2145c1b3353b4ad43296d9d1d242574969b"}, - {file = "lru_dict-1.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0facf49b053bf4926d92d8d5a46fe07eecd2af0441add0182c7432d53d6da667"}, - {file = "lru_dict-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:987b73a06bcf5a95d7dc296241c6b1f9bc6cda42586948c9dabf386dc2bef1cd"}, - {file = "lru_dict-1.2.0-cp39-cp39-win32.whl", hash = "sha256:231d7608f029dda42f9610e5723614a35b1fff035a8060cf7d2be19f1711ace8"}, - {file = "lru_dict-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:71da89e134747e20ed5b8ad5b4ee93fc5b31022c2b71e8176e73c5a44699061b"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:21b3090928c7b6cec509e755cc3ab742154b33660a9b433923bd12c37c448e3e"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaecd7085212d0aa4cd855f38b9d61803d6509731138bf798a9594745953245b"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead83ac59a29d6439ddff46e205ce32f8b7f71a6bd8062347f77e232825e3d0a"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:312b6b2a30188586fe71358f0f33e4bac882d33f5e5019b26f084363f42f986f"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b30122e098c80e36d0117810d46459a46313421ce3298709170b687dc1240b02"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f010cfad3ab10676e44dc72a813c968cd586f37b466d27cde73d1f7f1ba158c2"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20f5f411f7751ad9a2c02e80287cedf69ae032edd321fe696e310d32dd30a1f8"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afdadd73304c9befaed02eb42f5f09fdc16288de0a08b32b8080f0f0f6350aa6"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7ab0c10c4fa99dc9e26b04e6b62ac32d2bcaea3aad9b81ec8ce9a7aa32b7b1b"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:edad398d5d402c43d2adada390dd83c74e46e020945ff4df801166047013617e"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:91d577a11b84387013815b1ad0bb6e604558d646003b44c92b3ddf886ad0f879"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb12f19cdf9c4f2d9aa259562e19b188ff34afab28dd9509ff32a3f1c2c29326"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e4c85aa8844bdca3c8abac3b7f78da1531c74e9f8b3e4890c6e6d86a5a3f6c0"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c6acbd097b15bead4de8e83e8a1030bb4d8257723669097eac643a301a952f0"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b6613daa851745dd22b860651de930275be9d3e9373283a2164992abacb75b62"}, + {file = "lru-dict-1.3.0.tar.gz", hash = "sha256:54fd1966d6bd1fcde781596cb86068214edeebff1db13a2cea11079e3fd07b6b"}, + {file = "lru_dict-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4073333894db9840f066226d50e6f914a2240711c87d60885d8c940b69a6673f"}, + {file = "lru_dict-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0ad6361e4dd63b47b2fc8eab344198f37387e1da3dcfacfee19bafac3ec9f1eb"}, + {file = "lru_dict-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c637ab54b8cd9802fe19b260261e38820d748adf7606e34045d3c799b6dde813"}, + {file = "lru_dict-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fce5f95489ca1fc158cc9fe0f4866db9cec82c2be0470926a9080570392beaf"}, + {file = "lru_dict-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2bf2e24cf5f19c3ff69bf639306e83dced273e6fa775b04e190d7f5cd16f794"}, + {file = "lru_dict-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e90059f7701bef3c4da073d6e0434a9c7dc551d5adce30e6b99ef86b186f4b4a"}, + {file = "lru_dict-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ecb7ae557239c64077e9b26a142eb88e63cddb104111a5122de7bebbbd00098"}, + {file = "lru_dict-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6af36166d22dba851e06a13e35bbf33845d3dd88872e6aebbc8e3e7db70f4682"}, + {file = "lru_dict-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ee38d420c77eed548df47b7d74b5169a98e71c9e975596e31ab808e76d11f09"}, + {file = "lru_dict-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0e1845024c31e6ff246c9eb5e6f6f1a8bb564c06f8a7d6d031220044c081090b"}, + {file = "lru_dict-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3ca5474b1649555d014be1104e5558a92497509021a5ba5ea6e9b492303eb66b"}, + {file = "lru_dict-1.3.0-cp310-cp310-win32.whl", hash = "sha256:ebb03a9bd50c2ed86d4f72a54e0aae156d35a14075485b2127c4b01a3f4a63fa"}, + {file = "lru_dict-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:04cda617f4e4c27009005d0a8185ef02829b14b776d2791f5c994cc9d668bc24"}, + {file = "lru_dict-1.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:20c595764695d20bdc3ab9b582e0cc99814da183544afb83783a36d6741a0dac"}, + {file = "lru_dict-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d9b30a8f50c3fa72a494eca6be5810a1b5c89e4f0fda89374f0d1c5ad8d37d51"}, + {file = "lru_dict-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9710737584650a4251b9a566cbb1a86f83437adb209c9ba43a4e756d12faf0d7"}, + {file = "lru_dict-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b84c321ae34f2f40aae80e18b6fa08b31c90095792ab64bb99d2e385143effaa"}, + {file = "lru_dict-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eed24272b4121b7c22f234daed99899817d81d671b3ed030c876ac88bc9dc890"}, + {file = "lru_dict-1.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd13af06dab7c6ee92284fd02ed9a5613a07d5c1b41948dc8886e7207f86dfd"}, + {file = "lru_dict-1.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1efc59bfba6aac33684d87b9e02813b0e2445b2f1c444dae2a0b396ad0ed60c"}, + {file = "lru_dict-1.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfaf75ac574447afcf8ad998789071af11d2bcf6f947643231f692948839bd98"}, + {file = "lru_dict-1.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c95f8751e2abd6f778da0399c8e0239321d560dbc58cb063827123137d213242"}, + {file = "lru_dict-1.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:abd0c284b26b5c4ee806ca4f33ab5e16b4bf4d5ec9e093e75a6f6287acdde78e"}, + {file = "lru_dict-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a47740652b25900ac5ce52667b2eade28d8b5fdca0ccd3323459df710e8210a"}, + {file = "lru_dict-1.3.0-cp311-cp311-win32.whl", hash = "sha256:a690c23fc353681ed8042d9fe8f48f0fb79a57b9a45daea2f0be1eef8a1a4aa4"}, + {file = "lru_dict-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:efd3f4e0385d18f20f7ea6b08af2574c1bfaa5cb590102ef1bee781bdfba84bc"}, + {file = "lru_dict-1.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c279068f68af3b46a5d649855e1fb87f5705fe1f744a529d82b2885c0e1fc69d"}, + {file = "lru_dict-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:350e2233cfee9f326a0d7a08e309372d87186565e43a691b120006285a0ac549"}, + {file = "lru_dict-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4eafb188a84483b3231259bf19030859f070321b00326dcb8e8c6cbf7db4b12f"}, + {file = "lru_dict-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73593791047e36b37fdc0b67b76aeed439fcea80959c7d46201240f9ec3b2563"}, + {file = "lru_dict-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1958cb70b9542773d6241974646e5410e41ef32e5c9e437d44040d59bd80daf2"}, + {file = "lru_dict-1.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc1cd3ed2cee78a47f11f3b70be053903bda197a873fd146e25c60c8e5a32cd6"}, + {file = "lru_dict-1.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82eb230d48eaebd6977a92ddaa6d788f14cf4f4bcf5bbffa4ddfd60d051aa9d4"}, + {file = "lru_dict-1.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5ad659cbc349d0c9ba8e536b5f40f96a70c360f43323c29f4257f340d891531c"}, + {file = "lru_dict-1.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ba490b8972531d153ac0d4e421f60d793d71a2f4adbe2f7740b3c55dce0a12f1"}, + {file = "lru_dict-1.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:c0131351b8a7226c69f1eba5814cbc9d1d8daaf0fdec1ae3f30508e3de5262d4"}, + {file = "lru_dict-1.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0e88dba16695f17f41701269fa046197a3fd7b34a8dba744c8749303ddaa18df"}, + {file = "lru_dict-1.3.0-cp312-cp312-win32.whl", hash = "sha256:6ffaf595e625b388babc8e7d79b40f26c7485f61f16efe76764e32dce9ea17fc"}, + {file = "lru_dict-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf9da32ef2582434842ab6ba6e67290debfae72771255a8e8ab16f3e006de0aa"}, + {file = "lru_dict-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c265f16c936a8ff3bb4b8a4bda0be94c15ec28b63e99fdb1439c1ffe4cd437db"}, + {file = "lru_dict-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:784ca9d3b0730b3ec199c0a58f66264c63dd5d438119c739c349a6a9be8e5f6e"}, + {file = "lru_dict-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e13b2f58f647178470adaa14603bb64cc02eeed32601772ccea30e198252883c"}, + {file = "lru_dict-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ffbce5c2e80f57937679553c8f27e61ec327c962bf7ea0b15f1d74277fd5363"}, + {file = "lru_dict-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7969cb034b3ccc707aff877c73c225c32d7e2a7981baa8f92f5dd4d468fe8c33"}, + {file = "lru_dict-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca9ab676609cce85dd65d91c275e47da676d13d77faa72de286fbea30fbaa596"}, + {file = "lru_dict-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27c078b5d75989952acbf9b77e14c3dadc468a4aafe85174d548afbc5efc38b"}, + {file = "lru_dict-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6123aefe97762ad74215d05320a7f389f196f0594c8813534284d4eafeca1a96"}, + {file = "lru_dict-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cd869cadba9a63e1e7fe2dced4a5747d735135b86016b0a63e8c9e324ab629ac"}, + {file = "lru_dict-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:40a8daddc29c7edb09dfe44292cf111f1e93a8344349778721d430d336b50505"}, + {file = "lru_dict-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a03170e4152836987a88dcebde61aaeb73ab7099a00bb86509d45b3fe424230"}, + {file = "lru_dict-1.3.0-cp38-cp38-win32.whl", hash = "sha256:3b4f121afe10f5a82b8e317626eb1e1c325b3f104af56c9756064cd833b1950b"}, + {file = "lru_dict-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:1470f5828c7410e16c24b5150eb649647986e78924816e6fb0264049dea14a2b"}, + {file = "lru_dict-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a3c9f746a9917e784fffcedeac4c8c47a3dbd90cbe13b69e9140182ad97ce4b7"}, + {file = "lru_dict-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2789296819525a1f3204072dfcf3df6db8bcf69a8fc740ffd3de43a684ea7002"}, + {file = "lru_dict-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:170b66d29945391460351588a7bd8210a95407ae82efe0b855e945398a1d24ea"}, + {file = "lru_dict-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774ca88501a9effe8797c3db5a6685cf20978c9cb0fe836b6813cfe1ca60d8c9"}, + {file = "lru_dict-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:df2e119c6ae412d2fd641a55f8a1e2e51f45a3de3449c18b1b86c319ab79e0c4"}, + {file = "lru_dict-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28aa1ea42a7e48174bf513dc2416fea7511a547961e678dc6f5670ca987c18cb"}, + {file = "lru_dict-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9537e1cee6fa582cb68f2fb9ce82d51faf2ccc0a638b275d033fdcb1478eb80b"}, + {file = "lru_dict-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:64545fca797fe2c68c5168efb5f976c6e1459e058cab02445207a079180a3557"}, + {file = "lru_dict-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a193a14c66cfc0c259d05dddc5e566a4b09e8f1765e941503d065008feebea9d"}, + {file = "lru_dict-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:3cb1de0ce4137b060abaafed8474cc0ebd12cedd88aaa7f7b3ebb1ddfba86ae0"}, + {file = "lru_dict-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8551ccab1349d4bebedab333dfc8693c74ff728f4b565fe15a6bf7d296bd7ea9"}, + {file = "lru_dict-1.3.0-cp39-cp39-win32.whl", hash = "sha256:6cb0be5e79c3f34d69b90d8559f0221e374b974b809a22377122c4b1a610ff67"}, + {file = "lru_dict-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9f725f2a0bdf1c18735372d5807af4ea3b77888208590394d4660e3d07971f21"}, + {file = "lru_dict-1.3.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f8f7824db5a64581180ab9d09842e6dd9fcdc46aac9cb592a0807cd37ea55680"}, + {file = "lru_dict-1.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acd04b7e7b0c0c192d738df9c317093335e7282c64c9d1bb6b7ebb54674b4e24"}, + {file = "lru_dict-1.3.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5c20f236f27551e3f0adbf1a987673fb1e9c38d6d284502cd38f5a3845ef681"}, + {file = "lru_dict-1.3.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca3703ff03b03a1848c563bc2663d0ad813c1cd42c4d9cf75b623716d4415d9a"}, + {file = "lru_dict-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a9fb71ba262c6058a0017ce83d343370d0a0dbe2ae62c2eef38241ec13219330"}, + {file = "lru_dict-1.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f5b88a7c39e307739a3701194993455968fcffe437d1facab93546b1b8a334c1"}, + {file = "lru_dict-1.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2682bfca24656fb7a643621520d57b7fe684ed5fa7be008704c1235d38e16a32"}, + {file = "lru_dict-1.3.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96fc87ddf569181827458ec5ad8fa446c4690cffacda66667de780f9fcefd44d"}, + {file = "lru_dict-1.3.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcec98e2c7da7631f0811730303abc4bdfe70d013f7a11e174a2ccd5612a7c59"}, + {file = "lru_dict-1.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6bba2863060caeaedd8386b0c8ee9a7ce4d57a7cb80ceeddf440b4eff2d013ba"}, + {file = "lru_dict-1.3.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c497fb60279f1e1d7dfbe150b1b069eaa43f7e172dab03f206282f4994676c5"}, + {file = "lru_dict-1.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d9509d817a47597988615c1a322580c10100acad10c98dfcf3abb41e0e5877f"}, + {file = "lru_dict-1.3.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0213ab4e3d9a8d386c18e485ad7b14b615cb6f05df6ef44fb2a0746c6ea9278b"}, + {file = "lru_dict-1.3.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50fbd69cd3287196796ab4d50e4cc741eb5b5a01f89d8e930df08da3010c385"}, + {file = "lru_dict-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5247d1f011f92666010942434020ddc5a60951fefd5d12a594f0e5d9f43e3b3b"}, ] [package.extras] @@ -1431,13 +1454,13 @@ files = [ [[package]] name = "netaddr" -version = "0.8.0" +version = "0.9.0" description = "A network address manipulation library for Python" optional = false python-versions = "*" files = [ - {file = "netaddr-0.8.0-py2.py3-none-any.whl", hash = "sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac"}, - {file = "netaddr-0.8.0.tar.gz", hash = "sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243"}, + {file = "netaddr-0.9.0-py3-none-any.whl", hash = "sha256:5148b1055679d2a1ec070c521b7db82137887fabd6d7e37f5199b44f775c3bb1"}, + {file = "netaddr-0.9.0.tar.gz", hash = "sha256:7b46fa9b1a2d71fd5de9e4a3784ef339700a53a08c8040f08baf5f1194da0128"}, ] [[package]] @@ -1542,25 +1565,27 @@ files = [ [[package]] name = "psutil" -version = "5.9.5" +version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, + {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, + {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, + {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, + {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, + {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, + {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, + {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, + {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, + {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, ] [package.extras] @@ -1568,88 +1593,88 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "pycryptodome" -version = "3.18.0" +version = "3.19.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.18.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:d1497a8cd4728db0e0da3c304856cb37c0c4e3d0b36fcbabcc1600f18504fc54"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:928078c530da78ff08e10eb6cada6e0dff386bf3d9fa9871b4bbc9fbc1efe024"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:157c9b5ba5e21b375f052ca78152dd309a09ed04703fd3721dce3ff8ecced148"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:d20082bdac9218649f6abe0b885927be25a917e29ae0502eaf2b53f1233ce0c2"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e8ad74044e5f5d2456c11ed4cfd3e34b8d4898c0cb201c4038fe41458a82ea27"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win32.whl", hash = "sha256:62a1e8847fabb5213ccde38915563140a5b338f0d0a0d363f996b51e4a6165cf"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win_amd64.whl", hash = "sha256:16bfd98dbe472c263ed2821284118d899c76968db1a6665ade0c46805e6b29a4"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7a3d22c8ee63de22336679e021c7f2386f7fc465477d59675caa0e5706387944"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:78d863476e6bad2a592645072cc489bb90320972115d8995bcfbee2f8b209918"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b6a610f8bfe67eab980d6236fdc73bfcdae23c9ed5548192bb2d530e8a92780e"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:422c89fd8df8a3bee09fb8d52aaa1e996120eafa565437392b781abec2a56e14"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:9ad6f09f670c466aac94a40798e0e8d1ef2aa04589c29faa5b9b97566611d1d1"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:53aee6be8b9b6da25ccd9028caf17dcdce3604f2c7862f5167777b707fbfb6cb"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:10da29526a2a927c7d64b8f34592f461d92ae55fc97981aab5bbcde8cb465bb6"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f21efb8438971aa16924790e1c3dba3a33164eb4000106a55baaed522c261acf"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4944defabe2ace4803f99543445c27dd1edbe86d7d4edb87b256476a91e9ffa4"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:51eae079ddb9c5f10376b4131be9589a6554f6fd84f7f655180937f611cd99a2"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:83c75952dcf4a4cebaa850fa257d7a860644c70a7cd54262c237c9f2be26f76e"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:957b221d062d5752716923d14e0926f47670e95fead9d240fa4d4862214b9b2f"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win32.whl", hash = "sha256:795bd1e4258a2c689c0b1f13ce9684fa0dd4c0e08680dcf597cf9516ed6bc0f3"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win_amd64.whl", hash = "sha256:b1d9701d10303eec8d0bd33fa54d44e67b8be74ab449052a8372f12a66f93fb9"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:cb1be4d5af7f355e7d41d36d8eec156ef1382a88638e8032215c215b82a4b8ec"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-win32.whl", hash = "sha256:fc0a73f4db1e31d4a6d71b672a48f3af458f548059aa05e83022d5f61aac9c08"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f022a4fd2a5263a5c483a2bb165f9cb27f2be06f2f477113783efe3fe2ad887b"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:363dd6f21f848301c2dcdeb3c8ae5f0dee2286a5e952a0f04954b82076f23825"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12600268763e6fec3cefe4c2dcdf79bde08d0b6dc1813887e789e495cb9f3403"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4604816adebd4faf8810782f137f8426bf45fee97d8427fa8e1e49ea78a52e2c"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:01489bbdf709d993f3058e2996f8f40fee3f0ea4d995002e5968965fa2fe89fb"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3811e31e1ac3069988f7a1c9ee7331b942e605dfc0f27330a9ea5997e965efb2"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4b967bb11baea9128ec88c3d02f55a3e338361f5e4934f5240afcb667fdaec"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9c8eda4f260072f7dbe42f473906c659dcbadd5ae6159dfb49af4da1293ae380"}, - {file = "pycryptodome-3.18.0.tar.gz", hash = "sha256:c9adee653fc882d98956e33ca2c1fb582e23a8af7ac82fee75bd6113c55a0413"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3006c44c4946583b6de24fe0632091c2653d6256b99a02a3db71ca06472ea1e4"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7c760c8a0479a4042111a8dd2f067d3ae4573da286c53f13cf6f5c53a5c1f631"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:08ce3558af5106c632baf6d331d261f02367a6bc3733086ae43c0f988fe042db"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45430dfaf1f421cf462c0dd824984378bef32b22669f2635cb809357dbaab405"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:a9bcd5f3794879e91970f2bbd7d899780541d3ff439d8f2112441769c9f2ccea"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-win32.whl", hash = "sha256:190c53f51e988dceb60472baddce3f289fa52b0ec38fbe5fd20dd1d0f795c551"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-win_amd64.whl", hash = "sha256:22e0ae7c3a7f87dcdcf302db06ab76f20e83f09a6993c160b248d58274473bfa"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7822f36d683f9ad7bc2145b2c2045014afdbbd1d9922a6d4ce1cbd6add79a01e"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:05e33267394aad6db6595c0ce9d427fe21552f5425e116a925455e099fdf759a"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:829b813b8ee00d9c8aba417621b94bc0b5efd18c928923802ad5ba4cf1ec709c"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:fc7a79590e2b5d08530175823a242de6790abc73638cc6dc9d2684e7be2f5e49"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"}, + {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, ] [[package]] name = "pydantic" -version = "1.10.12" +version = "1.10.13" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [package.dependencies] @@ -1702,13 +1727,13 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" [[package]] name = "referencing" -version = "0.30.2" +version = "0.31.0" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, - {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, + {file = "referencing-0.31.0-py3-none-any.whl", hash = "sha256:381b11e53dd93babb55696c71cf42aef2d36b8a150c49bf0bc301e36d536c882"}, + {file = "referencing-0.31.0.tar.gz", hash = "sha256:cc28f2c88fbe7b961a7817a0abc034c09a1e36358f82fedb4ffdf29a25398863"}, ] [package.dependencies] @@ -1759,125 +1784,127 @@ test = ["hypothesis (==5.19.0)", "pytest (==5.4.3)", "tox (>=2.9.1,<3)"] [[package]] name = "rpds-py" -version = "0.9.2" +version = "0.13.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.9.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ab6919a09c055c9b092798ce18c6c4adf49d24d4d9e43a92b257e3f2548231e7"}, - {file = "rpds_py-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d55777a80f78dd09410bd84ff8c95ee05519f41113b2df90a69622f5540c4f8b"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a216b26e5af0a8e265d4efd65d3bcec5fba6b26909014effe20cd302fd1138fa"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29cd8bfb2d716366a035913ced99188a79b623a3512292963d84d3e06e63b496"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44659b1f326214950a8204a248ca6199535e73a694be8d3e0e869f820767f12f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:745f5a43fdd7d6d25a53ab1a99979e7f8ea419dfefebcab0a5a1e9095490ee5e"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a987578ac5214f18b99d1f2a3851cba5b09f4a689818a106c23dbad0dfeb760f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf4151acb541b6e895354f6ff9ac06995ad9e4175cbc6d30aaed08856558201f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03421628f0dc10a4119d714a17f646e2837126a25ac7a256bdf7c3943400f67f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13b602dc3e8dff3063734f02dcf05111e887f301fdda74151a93dbbc249930fe"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fae5cb554b604b3f9e2c608241b5d8d303e410d7dfb6d397c335f983495ce7f6"}, - {file = "rpds_py-0.9.2-cp310-none-win32.whl", hash = "sha256:47c5f58a8e0c2c920cc7783113df2fc4ff12bf3a411d985012f145e9242a2764"}, - {file = "rpds_py-0.9.2-cp310-none-win_amd64.whl", hash = "sha256:4ea6b73c22d8182dff91155af018b11aac9ff7eca085750455c5990cb1cfae6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e564d2238512c5ef5e9d79338ab77f1cbbda6c2d541ad41b2af445fb200385e3"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f411330a6376fb50e5b7a3e66894e4a39e60ca2e17dce258d53768fea06a37bd"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e7521f5af0233e89939ad626b15278c71b69dc1dfccaa7b97bd4cdf96536bb7"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3335c03100a073883857e91db9f2e0ef8a1cf42dc0369cbb9151c149dbbc1b"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d25b1c1096ef0447355f7293fbe9ad740f7c47ae032c2884113f8e87660d8f6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a5d3fbd02efd9cf6a8ffc2f17b53a33542f6b154e88dd7b42ef4a4c0700fdad"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5934e2833afeaf36bd1eadb57256239785f5af0220ed8d21c2896ec4d3a765f"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:095b460e117685867d45548fbd8598a8d9999227e9061ee7f012d9d264e6048d"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91378d9f4151adc223d584489591dbb79f78814c0734a7c3bfa9c9e09978121c"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:24a81c177379300220e907e9b864107614b144f6c2a15ed5c3450e19cf536fae"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:de0b6eceb46141984671802d412568d22c6bacc9b230174f9e55fc72ef4f57de"}, - {file = "rpds_py-0.9.2-cp311-none-win32.whl", hash = "sha256:700375326ed641f3d9d32060a91513ad668bcb7e2cffb18415c399acb25de2ab"}, - {file = "rpds_py-0.9.2-cp311-none-win_amd64.whl", hash = "sha256:0766babfcf941db8607bdaf82569ec38107dbb03c7f0b72604a0b346b6eb3298"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1440c291db3f98a914e1afd9d6541e8fc60b4c3aab1a9008d03da4651e67386"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f2996fbac8e0b77fd67102becb9229986396e051f33dbceada3debaacc7033f"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f30d205755566a25f2ae0382944fcae2f350500ae4df4e795efa9e850821d82"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:159fba751a1e6b1c69244e23ba6c28f879a8758a3e992ed056d86d74a194a0f3"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1f044792e1adcea82468a72310c66a7f08728d72a244730d14880cd1dabe36b"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9251eb8aa82e6cf88510530b29eef4fac825a2b709baf5b94a6094894f252387"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01899794b654e616c8625b194ddd1e5b51ef5b60ed61baa7a2d9c2ad7b2a4238"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0c43f8ae8f6be1d605b0465671124aa8d6a0e40f1fb81dcea28b7e3d87ca1e1"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207f57c402d1f8712618f737356e4b6f35253b6d20a324d9a47cb9f38ee43a6b"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b52e7c5ae35b00566d244ffefba0f46bb6bec749a50412acf42b1c3f402e2c90"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:978fa96dbb005d599ec4fd9ed301b1cc45f1a8f7982d4793faf20b404b56677d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6aa8326a4a608e1c28da191edd7c924dff445251b94653988efb059b16577a4d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aad51239bee6bff6823bbbdc8ad85136c6125542bbc609e035ab98ca1e32a192"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd4dc3602370679c2dfb818d9c97b1137d4dd412230cfecd3c66a1bf388a196"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd9da77c6ec1f258387957b754f0df60766ac23ed698b61941ba9acccd3284d1"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:190ca6f55042ea4649ed19c9093a9be9d63cd8a97880106747d7147f88a49d18"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:876bf9ed62323bc7dcfc261dbc5572c996ef26fe6406b0ff985cbcf460fc8a4c"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa2818759aba55df50592ecbc95ebcdc99917fa7b55cc6796235b04193eb3c55"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ea4d00850ef1e917815e59b078ecb338f6a8efda23369677c54a5825dbebb55"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5855c85eb8b8a968a74dc7fb014c9166a05e7e7a8377fb91d78512900aadd13d"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:14c408e9d1a80dcb45c05a5149e5961aadb912fff42ca1dd9b68c0044904eb32"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:65a0583c43d9f22cb2130c7b110e695fff834fd5e832a776a107197e59a1898e"}, - {file = "rpds_py-0.9.2-cp38-none-win32.whl", hash = "sha256:71f2f7715935a61fa3e4ae91d91b67e571aeb5cb5d10331ab681256bda2ad920"}, - {file = "rpds_py-0.9.2-cp38-none-win_amd64.whl", hash = "sha256:674c704605092e3ebbbd13687b09c9f78c362a4bc710343efe37a91457123044"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:07e2c54bef6838fa44c48dfbc8234e8e2466d851124b551fc4e07a1cfeb37260"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fdf55283ad38c33e35e2855565361f4bf0abd02470b8ab28d499c663bc5d7c"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:890ba852c16ace6ed9f90e8670f2c1c178d96510a21b06d2fa12d8783a905193"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50025635ba8b629a86d9d5474e650da304cb46bbb4d18690532dd79341467846"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517cbf6e67ae3623c5127206489d69eb2bdb27239a3c3cc559350ef52a3bbf0b"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0836d71ca19071090d524739420a61580f3f894618d10b666cf3d9a1688355b1"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c439fd54b2b9053717cca3de9583be6584b384d88d045f97d409f0ca867d80f"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f68996a3b3dc9335037f82754f9cdbe3a95db42bde571d8c3be26cc6245f2324"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7d68dc8acded354c972116f59b5eb2e5864432948e098c19fe6994926d8e15c3"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f963c6b1218b96db85fc37a9f0851eaf8b9040aa46dec112611697a7023da535"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a46859d7f947061b4010e554ccd1791467d1b1759f2dc2ec9055fa239f1bc26"}, - {file = "rpds_py-0.9.2-cp39-none-win32.whl", hash = "sha256:e07e5dbf8a83c66783a9fe2d4566968ea8c161199680e8ad38d53e075df5f0d0"}, - {file = "rpds_py-0.9.2-cp39-none-win_amd64.whl", hash = "sha256:682726178138ea45a0766907957b60f3a1bf3acdf212436be9733f28b6c5af3c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:196cb208825a8b9c8fc360dc0f87993b8b260038615230242bf18ec84447c08d"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c7671d45530fcb6d5e22fd40c97e1e1e01965fc298cbda523bb640f3d923b387"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b32f0940adec65099f3b1c215ef7f1d025d13ff947975a055989cb7fd019a4"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f67da97f5b9eac838b6980fc6da268622e91f8960e083a34533ca710bec8611"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03975db5f103997904c37e804e5f340c8fdabbb5883f26ee50a255d664eed58c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:987b06d1cdb28f88a42e4fb8a87f094e43f3c435ed8e486533aea0bf2e53d931"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c861a7e4aef15ff91233751619ce3a3d2b9e5877e0fcd76f9ea4f6847183aa16"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02938432352359805b6da099c9c95c8a0547fe4b274ce8f1a91677401bb9a45f"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ef1f08f2a924837e112cba2953e15aacfccbbfcd773b4b9b4723f8f2ddded08e"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:35da5cc5cb37c04c4ee03128ad59b8c3941a1e5cd398d78c37f716f32a9b7f67"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:141acb9d4ccc04e704e5992d35472f78c35af047fa0cfae2923835d153f091be"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79f594919d2c1a0cc17d1988a6adaf9a2f000d2e1048f71f298b056b1018e872"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a06418fe1155e72e16dddc68bb3780ae44cebb2912fbd8bb6ff9161de56e1798"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2eb034c94b0b96d5eddb290b7b5198460e2d5d0c421751713953a9c4e47d10"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b08605d248b974eb02f40bdcd1a35d3924c83a2a5e8f5d0fa5af852c4d960af"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0805911caedfe2736935250be5008b261f10a729a303f676d3d5fea6900c96a"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab2299e3f92aa5417d5e16bb45bb4586171c1327568f638e8453c9f8d9e0f020"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c8d7594e38cf98d8a7df25b440f684b510cf4627fe038c297a87496d10a174f"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b9ec12ad5f0a4625db34db7e0005be2632c1013b253a4a60e8302ad4d462afd"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1fcdee18fea97238ed17ab6478c66b2095e4ae7177e35fb71fbe561a27adf620"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:933a7d5cd4b84f959aedeb84f2030f0a01d63ae6cf256629af3081cf3e3426e8"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:686ba516e02db6d6f8c279d1641f7067ebb5dc58b1d0536c4aaebb7bf01cdc5d"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0173c0444bec0a3d7d848eaeca2d8bd32a1b43f3d3fde6617aac3731fa4be05f"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d576c3ef8c7b2d560e301eb33891d1944d965a4d7a2eacb6332eee8a71827db6"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed89861ee8c8c47d6beb742a602f912b1bb64f598b1e2f3d758948721d44d468"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1054a08e818f8e18910f1bee731583fe8f899b0a0a5044c6e680ceea34f93876"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99e7c4bb27ff1aab90dcc3e9d37ee5af0231ed98d99cb6f5250de28889a3d502"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c545d9d14d47be716495076b659db179206e3fd997769bc01e2d550eeb685596"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9039a11bca3c41be5a58282ed81ae422fa680409022b996032a43badef2a3752"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fb39aca7a64ad0c9490adfa719dbeeb87d13be137ca189d2564e596f8ba32c07"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2d8b3b3a2ce0eaa00c5bbbb60b6713e94e7e0becab7b3db6c5c77f979e8ed1f1"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:99b1c16f732b3a9971406fbfe18468592c5a3529585a45a35adbc1389a529a03"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c27ee01a6c3223025f4badd533bea5e87c988cb0ba2811b690395dfe16088cfe"}, - {file = "rpds_py-0.9.2.tar.gz", hash = "sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"}, + {file = "rpds_py-0.13.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1758197cc8d7ff383c07405f188253535b4aa7fa745cbc54d221ae84b18e0702"}, + {file = "rpds_py-0.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:715df74cbcef4387d623c917f295352127f4b3e0388038d68fa577b4e4c6e540"}, + {file = "rpds_py-0.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a9cec0f49df9bac252d92f138c0d7708d98828e21fd57db78087d8f50b5656"}, + {file = "rpds_py-0.13.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5c2545bba02f68abdf398ef4990dc77592cc1e5d29438b35b3a3ca34d171fb4b"}, + {file = "rpds_py-0.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95375c44ffb9ea2bc25d67fb66e726ea266ff1572df50b9556fe28a5f3519cd7"}, + {file = "rpds_py-0.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54e513df45a8a9419e7952ffd26ac9a5b7b1df97fe72530421794b0de29f9d72"}, + {file = "rpds_py-0.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a25f514a53927b6b4bd04a9a6a13b55209df54f548660eeed673336c0c946d14"}, + {file = "rpds_py-0.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1a920fa679ec2758411d66bf68840b0a21317b9954ab0e973742d723bb67709"}, + {file = "rpds_py-0.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f9339d1404b87e6d8cb35e485945753be57a99ab9bb389f42629215b2f6bda0f"}, + {file = "rpds_py-0.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c99f9dda2c959f7bb69a7125e192c74fcafb7a534a95ccf49313ae3a04807804"}, + {file = "rpds_py-0.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bad6758df5f1042b35683bd1811d5432ac1b17700a5a2a51fdc293f7df5f7827"}, + {file = "rpds_py-0.13.0-cp310-none-win32.whl", hash = "sha256:2a29ec68fa9655ce9501bc6ae074b166e8b45c2dfcd2d71d90d1a61758ed8c73"}, + {file = "rpds_py-0.13.0-cp310-none-win_amd64.whl", hash = "sha256:244be953f13f148b0071d67a610f89cd72eb5013a147e517d6ca3f3f3b7e0380"}, + {file = "rpds_py-0.13.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:240279ca0b2afd6d4710afce1c94bf9e75fc161290bf62c0feba64d64780d80b"}, + {file = "rpds_py-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25c9727da2dabc93664a18eda7a70feedf478f0c4c8294e4cdba7f60a479a246"}, + {file = "rpds_py-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981e46e1e5064f95460381bff4353783b4b5ce351c930e5b507ebe0278c61dac"}, + {file = "rpds_py-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6052bb47ea583646b8ff562acacb9a2ec5ec847267049cbae3919671929e94c6"}, + {file = "rpds_py-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87f591ff8cc834fa01ca5899ab5edcd7ee590492a9cdcf43424ac142e731ce3e"}, + {file = "rpds_py-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62772259b3381e2aabf274c74fd1e1ac03b0524de0a6593900684becfa8cfe4b"}, + {file = "rpds_py-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4de9d20fe68c16b4d97f551a09920745add0c86430262230528b83c2ed2fe90"}, + {file = "rpds_py-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b70a54fb628c1d6400e351674a31ba63d2912b8c5b707f99b408674a5d8b69ab"}, + {file = "rpds_py-0.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2063ab9cd1be7ef6b5ed0f408e2bdf32c060b6f40c097a468f32864731302636"}, + {file = "rpds_py-0.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:84f7f3f18d29a1c645729634003d21d84028bd9c2fd78eba9d028998f46fa5aa"}, + {file = "rpds_py-0.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7c7ddc8d1a64623068da5a15e28001fbd0f0aff754aae7a75a4be5042191638"}, + {file = "rpds_py-0.13.0-cp311-none-win32.whl", hash = "sha256:8a33d2b6340261191bb59adb5a453fa6c7d99de85552bd4e8196411f0509c9bf"}, + {file = "rpds_py-0.13.0-cp311-none-win_amd64.whl", hash = "sha256:8b9c1dd90461940315981499df62a627571c4f0992e8bafc5396d33916224cac"}, + {file = "rpds_py-0.13.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:15a2d542de5cbfc6abddc4846d9412b59f8ee9c8dfa0b9c92a29321297c91745"}, + {file = "rpds_py-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8dd69e01b29ff45a0062cad5c480d8aa9301c3ef09da471f86337a78eb2d3405"}, + {file = "rpds_py-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efdd02971a02f98492a72b25484f1f6125fb9f2166e48cc4c9bfa563349c851b"}, + {file = "rpds_py-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91ca9aaee7ccdfa66d800b5c4ec634fefca947721bab52d6ad2f6350969a3771"}, + {file = "rpds_py-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afcec1f5b09d0db70aeb2d90528a9164acb61841a3124e28f6ac0137f4c36cb4"}, + {file = "rpds_py-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6824673f66c47f7ee759c21e973bfce3ceaf2c25cb940cb45b41105dc914e8"}, + {file = "rpds_py-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50b6d80925dfeb573fc5e38582fb9517c6912dc462cc858a11c8177b0837127a"}, + {file = "rpds_py-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3a1a38512925829784b5dc38591c757b80cfce115c72c594dc59567dab62b9c4"}, + {file = "rpds_py-0.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:977c6123c359dcc70ce3161b781ab70b0d342de2666944b776617e01a0a7822a"}, + {file = "rpds_py-0.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c472409037e05ed87b99430f97a6b82130328bb977502813547e8ee6a3392502"}, + {file = "rpds_py-0.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:28bb22019f4a783ea06a6b81437d5996551869e8a722ee8720b744f7684d97f4"}, + {file = "rpds_py-0.13.0-cp312-none-win32.whl", hash = "sha256:46be9c0685cce2ea02151aa8308f2c1b78581be41a5dd239448a941a210ef5dd"}, + {file = "rpds_py-0.13.0-cp312-none-win_amd64.whl", hash = "sha256:3c5b9ad4d3e05dfcf8629f0d534f92610e9805dbce2fcb9b3c801ddb886431d5"}, + {file = "rpds_py-0.13.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:66eb5aa36e857f768c598d2082fafb733eaf53e06e1169c6b4de65636e04ffd0"}, + {file = "rpds_py-0.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9f4c2b7d989426e9fe9b720211172cf10eb5f7aa16c63de2e5dc61457abcf35"}, + {file = "rpds_py-0.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e37dfffe8959a492b7b331995f291847a41a035b4aad82d6060f38e8378a2b"}, + {file = "rpds_py-0.13.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8220321f2dccd9d66f72639185247cb7bbdd90753bf0b6bfca0fa31dba8af23c"}, + {file = "rpds_py-0.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8f1d466a9747213d3cf7e1afec849cc51edb70d5b4ae9a82eca0f172bfbb6d0"}, + {file = "rpds_py-0.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c4c4b4ff3de834ec5c1c690e5a18233ca78547d003eb83664668ccf09ef1398"}, + {file = "rpds_py-0.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:525d19ef0a999229ef0f0a7687ab2c9a00d1b6a47a005006f4d8c4b8975fdcec"}, + {file = "rpds_py-0.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0982b59d014efb84a57128e7e69399fb29ad8f2da5b0a5bcbfd12e211c00492e"}, + {file = "rpds_py-0.13.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f714dd5b705f1c394d1b361d96486c4981055c434a7eafb1a3147ac75e34a3de"}, + {file = "rpds_py-0.13.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:766b573a964389ef0d91a26bb31e1b59dbc5d06eff7707f3dfcec23d93080ba3"}, + {file = "rpds_py-0.13.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2ed65ad3fc5065d13e31e90794e0b52e405b63ae4fab1080caeaadc10a3439c5"}, + {file = "rpds_py-0.13.0-cp38-none-win32.whl", hash = "sha256:9645f7fe10a68b2396d238250b4b264c2632d2eb6ce2cb90aa0fe08adee194be"}, + {file = "rpds_py-0.13.0-cp38-none-win_amd64.whl", hash = "sha256:42d0ad129c102856a364ccc7d356faec017af86b3543a8539795f22b6cabad11"}, + {file = "rpds_py-0.13.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:95c11647fac2a3515ea2614a79e14b7c75025724ad54c91c7db4a6ea5c25ef19"}, + {file = "rpds_py-0.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9435bf4832555c4f769c6be9401664357be33d5f5d8dc58f5c20fb8d21e2c45d"}, + {file = "rpds_py-0.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b1d671a74395344239ee3adbcd8c496525f6a2b2e54c40fec69620a31a8dcb"}, + {file = "rpds_py-0.13.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13c8061115f1468de6ffdfb1d31b446e1bd814f1ff6e556862169aacb9fbbc5d"}, + {file = "rpds_py-0.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a78861123b002725633871a2096c3a4313224aab3d11b953dced87cfba702418"}, + {file = "rpds_py-0.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97c1be5a018cdad54fa7e5f7d36b9ab45ef941a1d185987f18bdab0a42344012"}, + {file = "rpds_py-0.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e33b17915c8e4fb2ea8b91bb4c46cba92242c63dd38b87e869ead5ba217e2970"}, + {file = "rpds_py-0.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:153b6d8cf7ae4b9ffd09de6abeda661e351e3e06eaafd18a8c104ea00099b131"}, + {file = "rpds_py-0.13.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:da2852201e8e00c86be82c43d6893e6c380ef648ae53f337ffd1eaa35e3dfb8a"}, + {file = "rpds_py-0.13.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a2383f400691fd7bd63347d4d75eb2fd525de9d901799a33a4e896c9885609f8"}, + {file = "rpds_py-0.13.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d5bf560634ea6e9a59ceb2181a6cd6195a03f48cef9a400eb15e197e18f14548"}, + {file = "rpds_py-0.13.0-cp39-none-win32.whl", hash = "sha256:fdaef49055cc0c701fb17b9b34a38ef375e5cdb230b3722d4a12baf9b7cbc6d3"}, + {file = "rpds_py-0.13.0-cp39-none-win_amd64.whl", hash = "sha256:26660c74a20fe249fad75ca00bbfcf60e57c3fdbde92971c88a20e07fea1de64"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:28324f2f0247d407daabf7ff357ad9f36126075c92a0cf5319396d96ff4e1248"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b431c2c0ff1ea56048a2b066d99d0c2d151ae7625b20be159b7e699f3e80390b"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7472bd60a8293217444bdc6a46e516feb8d168da44d5f3fccea0336e88e3b79a"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:169063f346b8fd84f47d986c9c48e6094eb38b839c1287e7cb886b8a2b32195d"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eef7ee7c70f8b8698be468d54f9f5e01804f3a1dd5657e8a96363dbd52b9b5ec"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:762013dd59df12380c5444f61ccbf9ae1297027cabbd7aa25891f724ebf8c8f7"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:152570689a27ae0be1d5f50b21dad38d450b9227d0974f23bd400400ea087e88"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d70a93a40e55da117c511ddc514642bc7d59a95a99137168a5f3f2f876b47962"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e6c6fed07d13b9e0fb689356c40c81f1aa92e3c9d91d8fd5816a0348ccd999f7"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:cdded3cf9e36840b09ccef714d5fa74a03f4eb6cf81e694226ed9cb5e6f90de0"}, + {file = "rpds_py-0.13.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e1f40faf406c52c7ae7d208b9140377c06397248978ccb03fbfbb30a0571e359"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c10326e30c97a95b7e1d75e5200ef0b9827aa0f861e331e43b15dfdfd63e669b"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:afde37e3763c602d0385bce5c12f262e7b1dd2a0f323e239fa9d7b2d4d5d8509"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4084ab6840bd4d79eff3b5f497add847a7db31ce5a0c2d440c90b2d2b7011857"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c9c9cb48ab77ebfa47db25b753f594d4f44959cfe43b713439ca6e3c9329671"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:533d728ea5ad5253af3395102723ca8a77b62de47b2295155650c9a88fcdeec8"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f22cab655b41033d430f20266bf563b35038a7f01c9a099b0ccfd30a7fb9247"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a0507342c37132813449393e6e6f351bbff376031cfff1ee6e616402ac7908"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4eb1faf8e2ee9a2de3cb3ae4c8c355914cdc85f2cd7f27edf76444c9550ce1e7"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a61a152d61e3ae26e0bbba7b2f568f6f25ca0abdeb6553eca7e7c45b59d9b1a9"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:e499bf2200eb74774a6f85a7465e3bc5273fa8ef0055590d97a88c1e7ea02eea"}, + {file = "rpds_py-0.13.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1e5becd0de924616ca9a12abeb6458568d1dc8fe5c670d5cdb738402a8a8429d"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:70cfe098d915f566eeebcb683f49f9404d2f948432891b6e075354336eda9dfb"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2e73511e88368f93c24efe7c9a20b319eaa828bc7431f8a17713efb9e31a39fa"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c07cb9bcccd08f9bc2fd05bf586479df4272ea5a6a70fbcb59b018ed48a5a84d"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c4e84016ba225e09df20fed8befe8c68d14fbeff6078f4a0ff907ae2095e17e"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ad465e5a70580ca9c1944f43a9a71bca3a7b74554347fc96ca0479eca8981f9"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:189aebd44a07fa7b7966cf78b85bde8335b0b6c3b1c4ef5589f8c03176830107"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f50ca0460f1f7a89ab9b8355d83ac993d5998ad4218e76654ecf8afe648d8aa"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6c225011467021879c0482316e42d8a28852fc29f0c15d2a435ff457cadccd4"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1e63b32b856c0f08a56b76967d61b6ad811d8d330a8aebb9d21afadd82a296f6"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7e5fbe9800f09c56967fda88c4d9272955e781699a66102bd098f22511a3f260"}, + {file = "rpds_py-0.13.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:fea99967d4a978ce95dd52310bcb4a943b77c61725393bca631b0908047d6e2f"}, + {file = "rpds_py-0.13.0.tar.gz", hash = "sha256:35cc91cbb0b775705e0feb3362490b8418c408e9e3c3b9cb3b02f6e495f03ee7"}, ] [[package]] name = "setuptools" -version = "68.0.0" +version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1921,13 +1948,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam [[package]] name = "tenacity" -version = "8.2.2" +version = "8.2.3" description = "Retry code until it succeeds" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "tenacity-8.2.2-py3-none-any.whl", hash = "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0"}, - {file = "tenacity-8.2.2.tar.gz", hash = "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"}, + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, ] [package.extras] @@ -1976,29 +2003,28 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] name = "urllib3" -version = "2.0.4" +version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2182,86 +2208,81 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "wrapt" -version = "1.15.0" +version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, - {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, - {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, - {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, - {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, - {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, - {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, - {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, - {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, - {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, - {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, - {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, - {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, - {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, - {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, - {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, - {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, - {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, - {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] [[package]] @@ -2353,20 +2374,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.16.2" +version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "7468d4e69733a2dc852d3e40d512f11078cf9b6b120e06054ebf25eebfe83b91" +content-hash = "db43b17660ed2e4537a599c5a045ed47da7cdc44360e55847bf9b2c2181ab157" diff --git a/pyproject.toml b/pyproject.toml index d191992b..38e1c3bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ fastapi = "^0.95.1" ifps-client = {git = "https://git@github.com/PowerLoom/py-ipfs-client.git"} aiorwlock = "^1.3.0" aio-pika = "^9.1.4" -fastapi-pagination = "^0.12.8" +fastapi-pagination = "^0.12.12" [build-system] diff --git a/snapshotter/auth/gunicorn_auth_entry_launcher.py b/snapshotter/auth/gunicorn_auth_entry_launcher.py index ed8c78f2..dfcbea4a 100644 --- a/snapshotter/auth/gunicorn_auth_entry_launcher.py +++ b/snapshotter/auth/gunicorn_auth_entry_launcher.py @@ -1,11 +1,8 @@ import logging import os -import sys from snapshotter.auth.conf import auth_settings from snapshotter.auth.server_entry import app -from snapshotter.utils.default_logger import FORMAT -from snapshotter.utils.default_logger import logger from snapshotter.utils.gunicorn import InterceptHandler from snapshotter.utils.gunicorn import StandaloneApplication from snapshotter.utils.gunicorn import StubbedGunicornLogger @@ -33,9 +30,6 @@ seen.add(name.split('.')[0]) logging.getLogger(name).handlers = [intercept_handler] - logger.add(sys.stdout, format=FORMAT, level=LOG_LEVEL, serialize=JSON_LOGS) - logger.add(sys.stderr, format=FORMAT, level=logging.ERROR, serialize=JSON_LOGS) - options = { 'bind': f'{auth_settings.bind.host}:{auth_settings.bind.port}', 'workers': WORKERS, diff --git a/snapshotter/core_api.py b/snapshotter/core_api.py index 35ab22e5..3ffa3ae4 100644 --- a/snapshotter/core_api.py +++ b/snapshotter/core_api.py @@ -30,13 +30,14 @@ from snapshotter.utils.models.data_models import SnapshotterEpochProcessingReportItem from snapshotter.utils.models.data_models import SnapshotterStates from snapshotter.utils.models.data_models import SnapshotterStateUpdate +from snapshotter.utils.models.data_models import TaskStatusRequest from snapshotter.utils.redis.rate_limiter import load_rate_limiter_scripts from snapshotter.utils.redis.redis_conn import RedisPoolCache +from snapshotter.utils.redis.redis_keys import active_status_key from snapshotter.utils.redis.redis_keys import epoch_id_epoch_released_key from snapshotter.utils.redis.redis_keys import epoch_id_project_to_state_mapping from snapshotter.utils.redis.redis_keys import epoch_process_report_cached_key from snapshotter.utils.redis.redis_keys import project_last_finalized_epoch_key -from snapshotter.utils.redis.redis_keys import active_status_key from snapshotter.utils.rpc import RpcHelper @@ -76,6 +77,9 @@ @app.on_event('startup') async def startup_boilerplate(): + """ + This function initializes various state variables and caches required for the application to function properly. + """ app.state.aioredis_pool = RedisPoolCache(pool_size=100) await app.state.aioredis_pool.populate() app.state.redis_pool = app.state.aioredis_pool._aioredis_pool @@ -97,6 +101,7 @@ async def startup_boilerplate(): app.state.ipfs_singleton = AsyncIPFSClientSingleton(settings.ipfs) await app.state.ipfs_singleton.init_sessions() app.state.ipfs_reader_client = app.state.ipfs_singleton._ipfs_read_client + app.state.epoch_size = 0 # Health check endpoint @@ -105,6 +110,16 @@ async def health_check( request: Request, response: Response, ): + """ + Endpoint to check the health of the Snapshotter service. + + Parameters: + request (Request): The incoming request object. + response (Response): The outgoing response object. + + Returns: + dict: A dictionary containing the status of the service. + """ redis_conn: aioredis.Redis = request.app.state.redis_pool _ = await redis_conn.get(active_status_key) if _: @@ -117,8 +132,6 @@ async def health_check( } return {'status': 'OK'} -# get current epoch - @app.get('/current_epoch') async def get_current_epoch( @@ -129,7 +142,16 @@ async def get_current_epoch( ), ): """ - This endpoint is used to fetch current epoch. + Get the current epoch data from the protocol state contract. + + Args: + request (Request): The incoming request object. + response (Response): The outgoing response object. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit authentication check dependency. + Defaults to Depends(rate_limit_auth_check,). + + Returns: + dict: A dictionary containing the current epoch data. """ if not ( rate_limit_auth_dep.rate_limit_passed and @@ -166,7 +188,6 @@ async def get_current_epoch( return current_epoch -# get epoch info @app.get('/epoch/{epoch_id}') async def get_epoch_info( request: Request, @@ -177,7 +198,16 @@ async def get_epoch_info( ), ): """ - This endpoint is used to fetch epoch info for a given epoch_id. + Get epoch information for a given epoch ID. + + Args: + request (Request): The incoming request object. + response (Response): The outgoing response object. + epoch_id (int): The epoch ID for which to retrieve information. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit authentication check dependency. Defaults to rate_limit_auth_check. + + Returns: + dict: A dictionary containing epoch information including timestamp, block number, and epoch end. """ if not ( rate_limit_auth_dep.rate_limit_passed and @@ -224,8 +254,18 @@ async def get_project_last_finalized_epoch_info( ), ): """ - This endpoint is used to fetch epoch info for the last finalized epoch for a given project. + Get the last finalized epoch information for a given project. + + Args: + request (Request): The incoming request object. + response (Response): The outgoing response object. + project_id (str): The ID of the project to get the last finalized epoch information for. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit authentication dependency. Defaults to rate_limit_auth_check. + + Returns: + dict: A dictionary containing the last finalized epoch information for the given project. """ + if not ( rate_limit_auth_dep.rate_limit_passed and rate_limit_auth_dep.authorized and @@ -298,9 +338,8 @@ async def get_project_last_finalized_epoch_info( return epoch_info -# get data for epoch_id, project_id - +# get data for epoch_id, project_id @app.get('/data/{epoch_id}/{project_id}/') async def get_data_for_project_id_epoch_id( request: Request, @@ -312,7 +351,17 @@ async def get_data_for_project_id_epoch_id( ), ): """ - This endpoint is used to fetch data for a given project_id and epoch_id. + Get data for a given project and epoch ID. + + Args: + request (Request): The incoming request. + response (Response): The outgoing response. + project_id (str): The ID of the project. + epoch_id (int): The ID of the epoch. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit authentication check. Defaults to Depends(rate_limit_auth_check). + + Returns: + dict: The data for the given project and epoch ID. """ if not ( rate_limit_auth_dep.rate_limit_passed and @@ -354,9 +403,8 @@ async def get_data_for_project_id_epoch_id( return data -# get finalized cid for epoch_id, project_id - +# get finalized cid for epoch_id, project_id @app.get('/cid/{epoch_id}/{project_id}/') async def get_finalized_cid_for_project_id_epoch_id( request: Request, @@ -368,7 +416,17 @@ async def get_finalized_cid_for_project_id_epoch_id( ), ): """ - This endpoint is used to fetch finalized cid for a given project_id and epoch_id. + Get finalized cid for a given project_id and epoch_id. + + Args: + request (Request): The incoming request. + response (Response): The outgoing response. + project_id (str): The project id. + epoch_id (int): The epoch id. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit auth check dependency. Defaults to rate_limit_auth_check. + + Returns: + dict: The finalized cid for the given project_id and epoch_id. """ if not ( rate_limit_auth_dep.rate_limit_passed and @@ -418,6 +476,18 @@ async def get_snapshotter_overall_status( rate_limit_auth_check, ), ): + """ + Returns the overall status of the snapshotter. + + Args: + request (Request): The incoming request. + response (Response): The outgoing response. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit authentication check. Defaults to Depends(rate_limit_auth_check). + + Returns: + dict: A dictionary containing the snapshotter status. + """ + if not ( rate_limit_auth_dep.rate_limit_passed and rate_limit_auth_dep.authorized and @@ -456,6 +526,20 @@ async def get_snapshotter_project_level_status( rate_limit_auth_check, ), ): + """ + Get snapshotter project level status. + + Args: + request (Request): The request object. + response (Response): The response object. + project_id (str): The project ID. + data (bool, optional): Whether to include data in the response. Defaults to False. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit auth check dependency. Defaults to rate_limit_auth_check. + + Returns: + dict: The snapshotter project status. + """ + if not ( rate_limit_auth_dep.rate_limit_passed and rate_limit_auth_dep.authorized and @@ -501,6 +585,17 @@ async def get_snapshotter_epoch_processing_status( rate_limit_auth_check, ), ) -> Page[SnapshotterEpochProcessingReportItem]: + """ + Endpoint to get the epoch processing status report. + + Args: + request (Request): The incoming request object. + response (Response): The outgoing response object. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit authentication check dependency. Defaults to Depends(rate_limit_auth_check). + + Returns: + Page[SnapshotterEpochProcessingReportItem]: The paginated epoch processing status report. + """ if not ( rate_limit_auth_dep.rate_limit_passed and rate_limit_auth_dep.authorized and @@ -540,14 +635,30 @@ async def get_snapshotter_epoch_processing_status( 'message': f'Unable to get current epoch, error: {e}', } current_epoch_id = current_epoch['epochId'] + if request.app.state.epoch_size == 0: + [epoch_size] = await request.app.state.anchor_rpc_helper.web3_call( + [request.app.state.protocol_state_contract.functions.EPOCH_SIZE()], + redis_conn=request.app.state.redis_pool, + ) + rest_logger.info(f'Setting Epoch size: {epoch_size}') + request.app.state.epoch_size = epoch_size for epoch_id in range(current_epoch_id, current_epoch_id - 30 - 1, -1): epoch_specific_report = SnapshotterEpochProcessingReportItem.construct() - epoch_specific_report.epochId = epoch_id epoch_release_status = await redis_conn.get( epoch_id_epoch_released_key(epoch_id=epoch_id), ) if not epoch_release_status: continue + epoch_specific_report.epochId = epoch_id + if epoch_id == current_epoch_id: + epoch_specific_report.epochEnd = current_epoch['end'] + else: + epoch_specific_report.epochEnd = current_epoch['end'] - ( + (current_epoch_id - epoch_id) * request.app.state.epoch_size + ) + rest_logger.debug( + f'Epoch End for epoch_id: {epoch_id} is {epoch_specific_report.epochEnd}', + ) epoch_specific_report.transitionStatus = dict() if epoch_release_status: epoch_specific_report.transitionStatus['EPOCH_RELEASED'] = SnapshotterStateUpdate( @@ -572,7 +683,95 @@ async def get_snapshotter_epoch_processing_status( epoch_processing_final_report.append(epoch_specific_report) await redis_conn.set( epoch_process_report_cached_key, - json.dumps(list(map(lambda x: x.json(), epoch_processing_final_report))), + json.dumps(list(map(lambda x: x.dict(), epoch_processing_final_report))), ex=60, ) return paginate(epoch_processing_final_report) + + +@app.post('/task_status') +async def get_task_status_post( + request: Request, + response: Response, + task_status_request: TaskStatusRequest, + rate_limit_auth_dep: RateLimitAuthCheck = Depends( + rate_limit_auth_check, + ), +): + """ + Endpoint to get the status of a task for a given wallet address. + + Args: + request (Request): The incoming request object. + response (Response): The outgoing response object. + task_status_request (TaskStatusRequest): The request body containing the task type and wallet address. + rate_limit_auth_dep (RateLimitAuthCheck, optional): The rate limit and authorization dependency. Defaults to rate_limit_auth_check. + + Returns: + dict: A dictionary containing the status of the task and a message. + """ + if not ( + rate_limit_auth_dep.rate_limit_passed and + rate_limit_auth_dep.authorized and + rate_limit_auth_dep.owner.active == UserStatusEnum.active + ): + return inject_rate_limit_fail_response(rate_limit_auth_dep) + + # check wallet address is valid EVM address + try: + Web3.toChecksumAddress(task_status_request.wallet_address) + except: + response.status_code = 400 + return { + 'status': 'error', + 'message': f'Invalid wallet address: {task_status_request.wallet_address}', + } + + project_id = f'{task_status_request.task_type}:{task_status_request.wallet_address.lower()}:{settings.namespace}' + try: + + # check redis first, if doesn't exist, fetch from contract + last_finalized_epoch = await request.app.state.redis_pool.get( + project_last_finalized_epoch_key(project_id), + ) + + if last_finalized_epoch is None: + + [last_finalized_epoch] = await request.app.state.anchor_rpc_helper.web3_call( + [request.app.state.protocol_state_contract.functions.lastFinalizedSnapshot(project_id)], + redis_conn=request.app.state.redis_pool, + ) + # cache it in redis + if last_finalized_epoch != 0: + await request.app.state.redis_pool.set( + project_last_finalized_epoch_key(project_id), + last_finalized_epoch, + ) + else: + last_finalized_epoch = int(last_finalized_epoch.decode('utf-8')) + + except Exception as e: + rest_logger.exception( + 'Exception in get_current_epoch', + e=e, + ) + response.status_code = 500 + return { + 'status': 'error', + 'message': f'Unable to get last_finalized_epoch, error: {e}', + } + else: + + auth_redis_conn: aioredis.Redis = request.app.state.auth_aioredis_pool + await incr_success_calls_count(auth_redis_conn, rate_limit_auth_dep) + + if last_finalized_epoch > 0: + return { + 'completed': True, + 'message': f'Task {task_status_request.task_type} for wallet {task_status_request.wallet_address} was completed in epoch {last_finalized_epoch}', + } + else: + return { + 'completed': False, + 'message': f'Task {task_status_request.task_type} for wallet {task_status_request.wallet_address} is not completed yet', + } diff --git a/snapshotter/init_rabbitmq.py b/snapshotter/init_rabbitmq.py index ed21be94..189d0183 100644 --- a/snapshotter/init_rabbitmq.py +++ b/snapshotter/init_rabbitmq.py @@ -1,4 +1,3 @@ -import aio_pika import pika from snapshotter.settings.config import settings @@ -9,6 +8,12 @@ def create_rabbitmq_conn() -> pika.BlockingConnection: + """ + Creates a connection to RabbitMQ using the settings specified in the application configuration. + + Returns: + A `pika.BlockingConnection` object representing the connection to RabbitMQ. + """ c = pika.BlockingConnection( pika.ConnectionParameters( host=settings.rabbitmq.host, @@ -27,6 +32,16 @@ def create_rabbitmq_conn() -> pika.BlockingConnection: def processhub_command_publish( ch: pika.adapters.blocking_connection.BlockingChannel, cmd: str, ) -> None: + """ + Publishes a command to the processhub-commands exchange. + + Args: + ch (pika.adapters.blocking_connection.BlockingChannel): The channel to use for publishing. + cmd (str): The command to publish. + + Returns: + None + """ ch.basic_publish( exchange=( f'{settings.rabbitmq.setup.core.exchange}:{settings.namespace}' @@ -45,6 +60,12 @@ def processhub_command_publish( def get_snapshot_queue_routing_key_pattern() -> tuple[str, str]: + """ + Returns the queue name and routing key pattern for snapshot messages. + + Returns: + A tuple containing the queue name and routing key pattern. + """ queue_name = ( f'powerloom-backend-cb-snapshot:{settings.namespace}:{settings.instance_id}' ) @@ -53,6 +74,12 @@ def get_snapshot_queue_routing_key_pattern() -> tuple[str, str]: def get_aggregate_queue_routing_key_pattern() -> tuple[str, str]: + """ + Returns the queue name and routing key pattern for the aggregate queue. + + Returns: + A tuple containing the queue name and routing key pattern. + """ queue_name = ( f'powerloom-backend-cb-aggregate:{settings.namespace}:{settings.instance_id}' ) @@ -61,12 +88,24 @@ def get_aggregate_queue_routing_key_pattern() -> tuple[str, str]: def get_delegate_worker_request_queue_routing_key() -> tuple[str, str]: + """ + Returns the name and routing key for the request queue used by the delegated worker. + + Returns: + A tuple containing the request queue name and routing key. + """ request_queue_routing_key = f'powerloom-delegated-worker:{settings.namespace}:{settings.instance_id}:Request' request_queue_name = f'powerloom-delegated-worker-request:{settings.namespace}:{settings.instance_id}' return request_queue_name, request_queue_routing_key def get_delegate_worker_response_queue_routing_key_pattern() -> tuple[str, str]: + """ + Returns a tuple containing the response queue name and routing key pattern for a delegated worker. + + Returns: + tuple[str, str]: A tuple containing the response queue name and routing key pattern. + """ response_queue_routing_key = f'powerloom-delegated-worker:{settings.namespace}:{settings.instance_id}:Response.*' response_queue_name = f'powerloom-delegated-worker-response:{settings.namespace}:{settings.instance_id}' return response_queue_name, response_queue_routing_key @@ -79,6 +118,19 @@ def init_queue( exchange_name: str, bind: bool = True, ) -> None: + """ + Declare a queue and optionally bind it to an exchange with a routing key. + + Args: + ch: A blocking channel object from a Pika connection. + queue_name: The name of the queue to declare. + routing_key: The routing key to use for binding the queue to an exchange. + exchange_name: The name of the exchange to bind the queue to. + bind: Whether or not to bind the queue to the exchange. Defaults to True. + + Returns: + None + """ ch.queue_declare(queue_name) if bind: ch.queue_bind( @@ -101,6 +153,18 @@ def init_topic_exchange_and_queue( queue_name: str, routing_key_pattern: str, ) -> None: + """ + Initialize a topic exchange and queue in RabbitMQ. + + Args: + ch: A blocking channel object for RabbitMQ. + exchange_name: The name of the exchange to declare. + queue_name: The name of the queue to declare. + routing_key_pattern: The routing key pattern to use for the queue. + + Returns: + None + """ ch.exchange_declare( exchange=exchange_name, exchange_type='topic', durable=True, ) @@ -118,6 +182,15 @@ def init_topic_exchange_and_queue( def init_callback_queue( ch: pika.adapters.blocking_connection.BlockingChannel, ) -> None: + """ + Initializes the callback queue for snapshot and aggregate. + + Args: + ch (pika.adapters.blocking_connection.BlockingChannel): The blocking channel object. + + Returns: + None + """ callback_exchange_name = ( f'{settings.rabbitmq.setup.callbacks.exchange}:{settings.namespace}' ) @@ -143,6 +216,15 @@ def init_callback_queue( def init_commit_payload_queue( ch: pika.adapters.blocking_connection.BlockingChannel, ) -> None: + """ + Initializes a RabbitMQ queue for commit payloads. + + Args: + ch (pika.adapters.blocking_connection.BlockingChannel): The RabbitMQ channel to use. + + Returns: + None + """ commit_payload_exchange_name = ( f'{settings.rabbitmq.setup.commit_payload.exchange}:{settings.namespace}' ) @@ -161,19 +243,36 @@ def init_commit_payload_queue( def init_delegate_worker_queue( ch: pika.adapters.blocking_connection.BlockingChannel, ) -> None: - delegated_worker_exchange_name = ( - f'{settings.rabbitmq.setup.delegated_worker.exchange}:{settings.namespace}' + """ + Initializes the delegate worker queue by declaring the response and request exchanges and initializing the request queue. + + Args: + ch (pika.adapters.blocking_connection.BlockingChannel): The blocking channel to use for declaring exchanges and initializing the queue. + + Returns: + None + """ + delegated_worker_response_exchange_name = ( + f'{settings.rabbitmq.setup.delegated_worker.exchange}:Response:{settings.namespace}' + ) + + ch.exchange_declare( + exchange=delegated_worker_response_exchange_name, exchange_type='direct', durable=True, + ) + + delegated_worker_request_exchange_name = ( + f'{settings.rabbitmq.setup.delegated_worker.exchange}:Request:{settings.namespace}' ) ch.exchange_declare( - exchange=delegated_worker_exchange_name, exchange_type='direct', durable=True, + exchange=delegated_worker_request_exchange_name, exchange_type='direct', durable=True, ) request_queue_name, request_queue_routing_key = get_delegate_worker_request_queue_routing_key() init_queue( ch, - exchange_name=delegated_worker_exchange_name, + exchange_name=delegated_worker_request_exchange_name, queue_name=request_queue_name, routing_key=request_queue_routing_key, bind=True, @@ -183,6 +282,17 @@ def init_delegate_worker_queue( def init_event_detector_queue( ch: pika.adapters.blocking_connection.BlockingChannel, ) -> None: + """ + Initializes the event detector queue by creating a topic exchange and a queue + with the given exchange name, queue name, and routing key pattern. + + Args: + ch (pika.adapters.blocking_connection.BlockingChannel): The blocking channel + to use for creating the exchange and queue. + + Returns: + None + """ event_detector_exchange_name = ( f'{settings.rabbitmq.setup.event_detector.exchange}:{settings.namespace}' ) @@ -199,6 +309,9 @@ def init_event_detector_queue( def init_exchanges_queues(): + """ + Initializes the RabbitMQ Direct exchange and queues required for snapshotter. + """ c = create_rabbitmq_conn() ch: pika.adapters.blocking_connection.BlockingChannel = c.channel() # core exchange remains same for multiple snapshotter instances diff --git a/snapshotter/launch_process_hub_core.py b/snapshotter/launch_process_hub_core.py index 79e25530..14ec1628 100644 --- a/snapshotter/launch_process_hub_core.py +++ b/snapshotter/launch_process_hub_core.py @@ -8,10 +8,20 @@ def generic_exit_handler(signum, frame): + """ + A signal handler function that raises a GenericExitOnSignal exception. + + Args: + signum (int): The signal number. + frame (frame): The current stack frame at the time the signal was received. + """ raise GenericExitOnSignal def main(): + """ + Launches the ProcessHubCore and waits for it to join. Handles SIGINT, SIGTERM, and SIGQUIT signals. + """ for signame in [signal.SIGINT, signal.SIGTERM, signal.SIGQUIT]: signal.signal(signame, generic_exit_handler) diff --git a/snapshotter/modules/computes b/snapshotter/modules/computes new file mode 160000 index 00000000..b4295c41 --- /dev/null +++ b/snapshotter/modules/computes @@ -0,0 +1 @@ +Subproject commit b4295c41b81ca0f8ba16df7200620c72dc5afffb diff --git a/snapshotter/modules/pooler/flow.puml b/snapshotter/modules/pooler/flow.puml deleted file mode 100644 index 0c782eae..00000000 --- a/snapshotter/modules/pooler/flow.puml +++ /dev/null @@ -1,23 +0,0 @@ -@startuml - -AggregateProcessor -> ProtocolStateContractorRedis : Get project first epoch - -alt calculating aggregate for the first time - AggregateProcessor -> ProtocolStateContractorRedis : Get required base snapshots and calculate aggregate from scratch -else calculating aggregate other than the first time - AggregateProcessor -> ProtocolStateContractorRedis: Get last Finalized Aggregate Snapshot - ProtocolStateContractorRedis -> AggregateProcessor: LastAggregateSnapshot - alt if last Finalized Aggregate Snapshot is not found - AggregateProcessor -> ProtocolStateContractorRedis : Get required base snapshots and calculate aggregate from scratch - else last Finalized Aggregate Snapshot is found - AggregateProcessor -> ProtocolStateContractorRedis : Get required (remaining) base snapshots - LastAggregateSnapshot -> LastAggregateSnapshot: Calculate aggregate from last Finalized Aggregate Snapshot by adding all missing base snapshots - end - - AggregateProcessor -> ProtocolStateContractorRedis: Fetch tail snapshots for corresponding added snapshots and mark for removal from Aggregate Snapshot - - LastAggregateSnapshot -> LastAggregateSnapshot: Remove marked tail snapshots from Aggregate Snapshot - - LastAggregateSnapshot -> AggregateProcessor: Finalized Aggregate Snapshot -end -@enduml diff --git a/snapshotter/modules/pooler/uniswapv2/__init__.py b/snapshotter/modules/pooler/uniswapv2/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/snapshotter/modules/pooler/uniswapv2/aggregate/__init__.py b/snapshotter/modules/pooler/uniswapv2/aggregate/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_stats.py b/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_stats.py deleted file mode 100644 index 22a0a867..00000000 --- a/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_stats.py +++ /dev/null @@ -1,114 +0,0 @@ -from ipfs_client.main import AsyncIPFSClient -from redis import asyncio as aioredis - -from ..utils.models.message_models import UniswapPairTotalReservesSnapshot -from ..utils.models.message_models import UniswapStatsSnapshot -from ..utils.models.message_models import UniswapTradesAggregateSnapshot -from snapshotter.utils.callback_helpers import GenericProcessorAggregate -from snapshotter.utils.data_utils import get_project_epoch_snapshot -from snapshotter.utils.data_utils import get_sumbmission_data_bulk -from snapshotter.utils.data_utils import get_tail_epoch_id -from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.message_models import PowerloomCalculateAggregateMessage -from snapshotter.utils.rpc import RpcHelper - - -class AggreagateStatsProcessor(GenericProcessorAggregate): - transformation_lambdas = None - - def __init__(self) -> None: - self.transformation_lambdas = [] - self._logger = logger.bind(module='AggregateStatsProcessor') - - async def compute( - self, - msg_obj: PowerloomCalculateAggregateMessage, - redis: aioredis.Redis, - rpc_helper: RpcHelper, - anchor_rpc_helper: RpcHelper, - ipfs_reader: AsyncIPFSClient, - protocol_state_contract, - project_id: str, - - ): - self._logger.info(f'Calculating unswap stats for {msg_obj}') - - epoch_id = msg_obj.epochId - - snapshot_mapping = {} - - snapshot_data = await get_sumbmission_data_bulk( - redis, [msg.snapshotCid for msg in msg_obj.messages], ipfs_reader, [ - msg.projectId for msg in msg_obj.messages - ], - ) - - complete_flags = [] - for msg, data in zip(msg_obj.messages, snapshot_data): - if not data: - continue - if 'reserves' in msg.projectId: - snapshot = UniswapPairTotalReservesSnapshot.parse_obj(data) - elif 'volume' in msg.projectId: - snapshot = UniswapTradesAggregateSnapshot.parse_obj(data) - complete_flags.append(snapshot.complete) - snapshot_mapping[msg.projectId] = snapshot - - stats_data = { - 'volume24h': 0, - 'tvl': 0, - 'fee24h': 0, - 'volumeChange24h': 0, - 'tvlChange24h': 0, - 'feeChange24h': 0, - } - # iterate over all snapshots and generate stats data - for snapshot_project_id in snapshot_mapping.keys(): - snapshot = snapshot_mapping[snapshot_project_id] - - if 'reserves' in snapshot_project_id: - max_epoch_block = snapshot.chainHeightRange.end - - stats_data['tvl'] += snapshot.token0ReservesUSD[f'block{max_epoch_block}'] + \ - snapshot.token1ReservesUSD[f'block{max_epoch_block}'] - - elif 'volume' in snapshot_project_id: - stats_data['volume24h'] += snapshot.totalTrade - stats_data['fee24h'] += snapshot.totalFee - - # source project tail epoch - tail_epoch_id, extrapolated_flag = await get_tail_epoch_id( - redis, protocol_state_contract, anchor_rpc_helper, msg_obj.epochId, 86400, project_id, - ) - if not extrapolated_flag: - previous_stats_snapshot_data = await get_project_epoch_snapshot( - redis, protocol_state_contract, anchor_rpc_helper, ipfs_reader, tail_epoch_id, project_id, - ) - - if previous_stats_snapshot_data: - previous_stats_snapshot = UniswapStatsSnapshot.parse_obj(previous_stats_snapshot_data) - - # calculate change in percentage - stats_data['volumeChange24h'] = (stats_data['volume24h'] - previous_stats_snapshot.volume24h) / \ - previous_stats_snapshot.volume24h * 100 - - stats_data['tvlChange24h'] = (stats_data['tvl'] - previous_stats_snapshot.tvl) / \ - previous_stats_snapshot.tvl * 100 - - stats_data['feeChange24h'] = (stats_data['fee24h'] - previous_stats_snapshot.fee24h) / \ - previous_stats_snapshot.fee24h * 100 - - stats_snapshot = UniswapStatsSnapshot( - epochId=epoch_id, - volume24h=stats_data['volume24h'], - tvl=stats_data['tvl'], - fee24h=stats_data['fee24h'], - volumeChange24h=stats_data['volumeChange24h'], - tvlChange24h=stats_data['tvlChange24h'], - feeChange24h=stats_data['feeChange24h'], - ) - - if not all(complete_flags): - stats_snapshot.complete = False - - return stats_snapshot diff --git a/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_pairs_24h.py b/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_pairs_24h.py deleted file mode 100644 index 7b2fbf2c..00000000 --- a/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_pairs_24h.py +++ /dev/null @@ -1,107 +0,0 @@ -from ipfs_client.main import AsyncIPFSClient -from redis import asyncio as aioredis - -from ..utils.helpers import get_pair_metadata -from ..utils.models.message_models import UniswapPairTotalReservesSnapshot -from ..utils.models.message_models import UniswapTopPair24hSnapshot -from ..utils.models.message_models import UniswapTopPairs24hSnapshot -from ..utils.models.message_models import UniswapTradesAggregateSnapshot -from snapshotter.utils.callback_helpers import GenericProcessorAggregate -from snapshotter.utils.data_utils import get_sumbmission_data_bulk -from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.message_models import PowerloomCalculateAggregateMessage -from snapshotter.utils.rpc import RpcHelper - - -class AggreagateTopPairsProcessor(GenericProcessorAggregate): - transformation_lambdas = None - - def __init__(self) -> None: - self.transformation_lambdas = [] - self._logger = logger.bind(module='AggregateTopPairsProcessor') - - async def compute( - self, - msg_obj: PowerloomCalculateAggregateMessage, - redis: aioredis.Redis, - rpc_helper: RpcHelper, - anchor_rpc_helper: RpcHelper, - ipfs_reader: AsyncIPFSClient, - protocol_state_contract, - project_id: str, - - ): - self._logger.info(f'Calculating 24h top pairs trade volume and reserves data for {msg_obj}') - - epoch_id = msg_obj.epochId - - snapshot_mapping = {} - all_pair_metadata = {} - - snapshot_data = await get_sumbmission_data_bulk( - redis, [msg.snapshotCid for msg in msg_obj.messages], ipfs_reader, [ - msg.projectId for msg in msg_obj.messages - ], - ) - - complete_flags = [] - for msg, data in zip(msg_obj.messages, snapshot_data): - if not data: - continue - if 'reserves' in msg.projectId: - snapshot = UniswapPairTotalReservesSnapshot.parse_obj(data) - elif 'volume' in msg.projectId: - snapshot = UniswapTradesAggregateSnapshot.parse_obj(data) - complete_flags.append(snapshot.complete) - snapshot_mapping[msg.projectId] = snapshot - - contract_address = msg.projectId.split(':')[-2] - if contract_address not in all_pair_metadata: - pair_metadata = await get_pair_metadata( - contract_address, - redis_conn=redis, - rpc_helper=rpc_helper, - ) - - all_pair_metadata[contract_address] = pair_metadata - - # iterate over all snapshots and generate pair data - pair_data = {} - for snapshot_project_id in snapshot_mapping.keys(): - snapshot = snapshot_mapping[snapshot_project_id] - contract = snapshot_project_id.split(':')[-2] - pair_metadata = all_pair_metadata[contract] - - if contract not in pair_data: - pair_data[contract] = { - 'address': contract, - 'name': pair_metadata['pair']['symbol'], - 'liquidity': 0, - 'volume24h': 0, - 'fee24h': 0, - } - - if 'reserves' in snapshot_project_id: - max_epoch_block = snapshot.chainHeightRange.end - pair_data[contract]['liquidity'] += snapshot.token0ReservesUSD[f'block{max_epoch_block}'] + \ - snapshot.token1ReservesUSD[f'block{max_epoch_block}'] - - elif 'volume' in snapshot_project_id: - pair_data[contract]['volume24h'] += snapshot.totalTrade - pair_data[contract]['fee24h'] += snapshot.totalFee - - top_pairs = [] - for pair in pair_data.values(): - top_pairs.append(UniswapTopPair24hSnapshot.parse_obj(pair)) - - top_pairs = sorted(top_pairs, key=lambda x: x.liquidity, reverse=True) - - top_pairs_snapshot = UniswapTopPairs24hSnapshot( - epochId=epoch_id, - pairs=top_pairs, - ) - - if not all(complete_flags): - top_pairs_snapshot.complete = False - - return top_pairs_snapshot diff --git a/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_pairs_7d.py b/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_pairs_7d.py deleted file mode 100644 index 31ddca77..00000000 --- a/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_pairs_7d.py +++ /dev/null @@ -1,96 +0,0 @@ -from ipfs_client.main import AsyncIPFSClient -from redis import asyncio as aioredis - -from ..utils.helpers import get_pair_metadata -from ..utils.models.message_models import UniswapTopPair7dSnapshot -from ..utils.models.message_models import UniswapTopPairs7dSnapshot -from ..utils.models.message_models import UniswapTradesAggregateSnapshot -from snapshotter.utils.callback_helpers import GenericProcessorAggregate -from snapshotter.utils.data_utils import get_sumbmission_data_bulk -from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.message_models import PowerloomCalculateAggregateMessage -from snapshotter.utils.rpc import RpcHelper - - -class AggreagateTopPairsProcessor(GenericProcessorAggregate): - transformation_lambdas = None - - def __init__(self) -> None: - self.transformation_lambdas = [] - self._logger = logger.bind(module='AggregateTopPairsProcessor') - - async def compute( - self, - msg_obj: PowerloomCalculateAggregateMessage, - redis: aioredis.Redis, - rpc_helper: RpcHelper, - anchor_rpc_helper: RpcHelper, - ipfs_reader: AsyncIPFSClient, - protocol_state_contract, - project_id: str, - - ): - self._logger.info(f'Calculating 7d top pairs trade volume data for {msg_obj}') - - epoch_id = msg_obj.epochId - - snapshot_mapping = {} - all_pair_metadata = {} - - snapshot_data = await get_sumbmission_data_bulk( - redis, [msg.snapshotCid for msg in msg_obj.messages], ipfs_reader, [ - msg.projectId for msg in msg_obj.messages - ], - ) - - complete_flags = [] - for msg, data in zip(msg_obj.messages, snapshot_data): - if not data: - continue - snapshot = UniswapTradesAggregateSnapshot.parse_obj(data) - complete_flags.append(snapshot.complete) - snapshot_mapping[msg.projectId] = snapshot - - contract_address = msg.projectId.split(':')[-2] - if contract_address not in all_pair_metadata: - pair_metadata = await get_pair_metadata( - contract_address, - redis_conn=redis, - rpc_helper=rpc_helper, - ) - - all_pair_metadata[contract_address] = pair_metadata - - # iterate over all snapshots and generate pair data - pair_data = {} - for snapshot_project_id in snapshot_mapping.keys(): - snapshot = snapshot_mapping[snapshot_project_id] - contract = snapshot_project_id.split(':')[-2] - pair_metadata = all_pair_metadata[contract] - - if contract not in pair_data: - pair_data[contract] = { - 'address': contract, - 'name': pair_metadata['pair']['symbol'], - 'volume7d': 0, - 'fee7d': 0, - } - - pair_data[contract]['volume7d'] += snapshot.totalTrade - pair_data[contract]['fee7d'] += snapshot.totalFee - - top_pairs = [] - for pair in pair_data.values(): - top_pairs.append(UniswapTopPair7dSnapshot.parse_obj(pair)) - - top_pairs = sorted(top_pairs, key=lambda x: x.volume7d, reverse=True) - - top_pairs_snapshot = UniswapTopPairs7dSnapshot( - epochId=epoch_id, - pairs=top_pairs, - ) - - if not all(complete_flags): - top_pairs_snapshot.complete = False - - return top_pairs_snapshot diff --git a/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_tokens.py b/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_tokens.py deleted file mode 100644 index b27a76f0..00000000 --- a/snapshotter/modules/pooler/uniswapv2/aggregate/multi_uniswap_top_tokens.py +++ /dev/null @@ -1,149 +0,0 @@ -from ipfs_client.main import AsyncIPFSClient -from redis import asyncio as aioredis - -from ..utils.helpers import get_pair_metadata -from ..utils.models.message_models import UniswapPairTotalReservesSnapshot -from ..utils.models.message_models import UniswapTopTokenSnapshot -from ..utils.models.message_models import UniswapTopTokensSnapshot -from ..utils.models.message_models import UniswapTradesAggregateSnapshot -from snapshotter.utils.callback_helpers import GenericProcessorAggregate -from snapshotter.utils.data_utils import get_project_epoch_snapshot -from snapshotter.utils.data_utils import get_sumbmission_data_bulk -from snapshotter.utils.data_utils import get_tail_epoch_id -from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.message_models import PowerloomCalculateAggregateMessage -from snapshotter.utils.rpc import RpcHelper - - -class AggreagateTopTokensProcessor(GenericProcessorAggregate): - transformation_lambdas = None - - def __init__(self) -> None: - self.transformation_lambdas = [] - self._logger = logger.bind(module='AggregateTopTokensProcessor') - - async def compute( - self, - msg_obj: PowerloomCalculateAggregateMessage, - redis: aioredis.Redis, - rpc_helper: RpcHelper, - anchor_rpc_helper: RpcHelper, - ipfs_reader: AsyncIPFSClient, - protocol_state_contract, - project_id: str, - - ): - - self._logger.info(f'Calculating top tokens data for {msg_obj}') - epoch_id = msg_obj.epochId - - snapshot_mapping = {} - projects_metadata = {} - - snapshot_data = await get_sumbmission_data_bulk( - redis, [msg.snapshotCid for msg in msg_obj.messages], ipfs_reader, [ - msg.projectId for msg in msg_obj.messages - ], - ) - - complete_flags = [] - for msg, data in zip(msg_obj.messages, snapshot_data): - if not data: - continue - if 'reserves' in msg.projectId: - snapshot = UniswapPairTotalReservesSnapshot.parse_obj(data) - elif 'volume' in msg.projectId: - snapshot = UniswapTradesAggregateSnapshot.parse_obj(data) - complete_flags.append(snapshot.complete) - snapshot_mapping[msg.projectId] = snapshot - - contract_address = msg.projectId.split(':')[-2] - pair_metadata = await get_pair_metadata( - contract_address, - redis_conn=redis, - rpc_helper=rpc_helper, - ) - - projects_metadata[msg.projectId] = pair_metadata - - # iterate over all snapshots and generate token data - token_data = {} - for snapshot_project_id in snapshot_mapping.keys(): - snapshot = snapshot_mapping[snapshot_project_id] - project_metadata = projects_metadata[snapshot_project_id] - - token0 = project_metadata['token0'] - token1 = project_metadata['token1'] - - if token0['address'] not in token_data: - token_data[token0['address']] = { - 'address': token0['address'], - 'name': token0['name'], - 'symbol': token0['symbol'], - 'decimals': token0['decimals'], - 'price': 0, - 'volume24h': 0, - 'liquidity': 0, - 'priceChange24h': 0, - } - - if token1['address'] not in token_data: - token_data[token1['address']] = { - 'address': token1['address'], - 'name': token1['name'], - 'symbol': token1['symbol'], - 'decimals': token1['decimals'], - 'price': 0, - 'volume24h': 0, - 'liquidity': 0, - 'priceChange24h': 0, - } - - if 'reserves' in snapshot_project_id: - max_epoch_block = snapshot.chainHeightRange.end - - token_data[token0['address']]['price'] = snapshot.token0Prices[f'block{max_epoch_block}'] - token_data[token1['address']]['price'] = snapshot.token1Prices[f'block{max_epoch_block}'] - - token_data[token0['address']]['liquidity'] += snapshot.token0ReservesUSD[f'block{max_epoch_block}'] - token_data[token1['address']]['liquidity'] += snapshot.token1ReservesUSD[f'block{max_epoch_block}'] - - elif 'volume' in snapshot_project_id: - - token_data[token0['address']]['volume24h'] += snapshot.token0TradeVolumeUSD - token_data[token1['address']]['volume24h'] += snapshot.token1TradeVolumeUSD - - tail_epoch_id, extrapolated_flag = await get_tail_epoch_id( - redis, protocol_state_contract, anchor_rpc_helper, msg_obj.epochId, 86400, project_id, - ) - - if not extrapolated_flag: - previous_top_tokens_snapshot_data = await get_project_epoch_snapshot( - redis, protocol_state_contract, anchor_rpc_helper, ipfs_reader, tail_epoch_id, project_id, - ) - - if previous_top_tokens_snapshot_data: - previous_top_tokens_snapshot = UniswapTopTokensSnapshot.parse_obj(previous_top_tokens_snapshot_data) - for token in previous_top_tokens_snapshot.tokens: - if token.address in token_data: - price_before_24h = token.price - - if price_before_24h > 0: - token_data[token.address]['priceChange24h'] = ( - token_data[token.address]['price'] - price_before_24h - ) / price_before_24h * 100 - - top_tokens = [] - for token in token_data.values(): - top_tokens.append(UniswapTopTokenSnapshot.parse_obj(token)) - - top_tokens = sorted(top_tokens, key=lambda x: x.liquidity, reverse=True) - - top_tokens_snapshot = UniswapTopTokensSnapshot( - epochId=epoch_id, - tokens=top_tokens, - ) - if not all(complete_flags): - top_tokens_snapshot.complete = False - - return top_tokens_snapshot diff --git a/snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_24h.py b/snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_24h.py deleted file mode 100644 index c720d5ec..00000000 --- a/snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_24h.py +++ /dev/null @@ -1,268 +0,0 @@ -import asyncio -import json - -from ipfs_client.main import AsyncIPFSClient -from redis import asyncio as aioredis - -from ..utils.models.message_models import UniswapTradesAggregateSnapshot -from ..utils.models.message_models import UniswapTradesSnapshot -from snapshotter.utils.callback_helpers import GenericProcessorAggregate -from snapshotter.utils.data_utils import get_project_epoch_snapshot_bulk -from snapshotter.utils.data_utils import get_project_first_epoch -from snapshotter.utils.data_utils import get_submission_data -from snapshotter.utils.data_utils import get_tail_epoch_id -from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.message_models import PowerloomSnapshotSubmittedMessage -from snapshotter.utils.redis.redis_keys import project_finalized_data_zset -from snapshotter.utils.redis.redis_keys import submitted_base_snapshots_key -from snapshotter.utils.rpc import RpcHelper - - -class AggreagateTradeVolumeProcessor(GenericProcessorAggregate): - transformation_lambdas = None - - def __init__(self) -> None: - self.transformation_lambdas = [] - self._logger = logger.bind(module='AggregateTradeVolumeProcessor24h') - - def _add_aggregate_snapshot( - self, - previous_aggregate_snapshot: UniswapTradesAggregateSnapshot, - current_snapshot: UniswapTradesSnapshot, - ): - - previous_aggregate_snapshot.totalTrade += current_snapshot.totalTrade - previous_aggregate_snapshot.totalFee += current_snapshot.totalFee - previous_aggregate_snapshot.token0TradeVolume += current_snapshot.token0TradeVolume - previous_aggregate_snapshot.token1TradeVolume += current_snapshot.token1TradeVolume - previous_aggregate_snapshot.token0TradeVolumeUSD += current_snapshot.token0TradeVolumeUSD - previous_aggregate_snapshot.token1TradeVolumeUSD += current_snapshot.token1TradeVolumeUSD - - return previous_aggregate_snapshot - - def _remove_aggregate_snapshot( - self, - previous_aggregate_snapshot: UniswapTradesAggregateSnapshot, - current_snapshot: UniswapTradesSnapshot, - ): - - previous_aggregate_snapshot.totalTrade -= current_snapshot.totalTrade - previous_aggregate_snapshot.totalFee -= current_snapshot.totalFee - previous_aggregate_snapshot.token0TradeVolume -= current_snapshot.token0TradeVolume - previous_aggregate_snapshot.token1TradeVolume -= current_snapshot.token1TradeVolume - previous_aggregate_snapshot.token0TradeVolumeUSD -= current_snapshot.token0TradeVolumeUSD - previous_aggregate_snapshot.token1TradeVolumeUSD -= current_snapshot.token1TradeVolumeUSD - - return previous_aggregate_snapshot - - async def _calculate_from_scratch( - self, - msg_obj: PowerloomSnapshotSubmittedMessage, - redis: aioredis.Redis, - rpc_helper: RpcHelper, - anchor_rpc_helper: RpcHelper, - ipfs_reader: AsyncIPFSClient, - protocol_state_contract, - project_id: str, - ): - calculate_from_scratch_in_progress = await redis.get(f'calculate_from_scratch:{project_id}') - if calculate_from_scratch_in_progress: - self._logger.info('calculate_from_scratch already in progress, skipping') - return - - self._logger.info('building aggregate from scratch') - await redis.set( - name=f'calculate_from_scratch:{project_id}', - value='true', - ex=300, - ) - # source project tail epoch - tail_epoch_id, extrapolated_flag = await get_tail_epoch_id( - redis, protocol_state_contract, anchor_rpc_helper, msg_obj.epochId, 86400, msg_obj.projectId, - ) - - # for the first epoch, using submitted cid - current_epoch_underlying_data = await get_submission_data( - redis, msg_obj.snapshotCid, ipfs_reader, project_id, - ) - - snapshots_data = await get_project_epoch_snapshot_bulk( - redis, protocol_state_contract, anchor_rpc_helper, ipfs_reader, - tail_epoch_id, msg_obj.epochId - 1, msg_obj.projectId, - ) - - aggregate_snapshot = UniswapTradesAggregateSnapshot.parse_obj({'epochId': msg_obj.epochId}) - if extrapolated_flag: - aggregate_snapshot.complete = False - if current_epoch_underlying_data: - current_snapshot = UniswapTradesSnapshot.parse_obj(current_epoch_underlying_data) - aggregate_snapshot = self._add_aggregate_snapshot(aggregate_snapshot, current_snapshot) - - for snapshot_data in snapshots_data: - if snapshot_data: - snapshot = UniswapTradesSnapshot.parse_obj(snapshot_data) - aggregate_snapshot = self._add_aggregate_snapshot(aggregate_snapshot, snapshot) - - await redis.delete(f'calculate_from_scratch:{project_id}') - - return aggregate_snapshot - - async def compute( - self, - msg_obj: PowerloomSnapshotSubmittedMessage, - redis: aioredis.Redis, - rpc_helper: RpcHelper, - anchor_rpc_helper: RpcHelper, - ipfs_reader: AsyncIPFSClient, - protocol_state_contract, - project_id: str, - - ): - self._logger.info(f'Building trade volume aggregate snapshot for {msg_obj}') - - # aggregate project first epoch - project_first_epoch = await get_project_first_epoch( - redis, protocol_state_contract, anchor_rpc_helper, project_id, - ) - - # If no past snapshots exist, then aggregate will be current snapshot - if project_first_epoch == 0: - return await self._calculate_from_scratch( - msg_obj, redis, rpc_helper, anchor_rpc_helper, ipfs_reader, protocol_state_contract, project_id, - ) - - else: - self._logger.info('project_first_epoch is not 0, building aggregate from previous aggregate') - - # get key with highest score - project_last_finalized = await redis.zrevrangebyscore( - project_finalized_data_zset(project_id), - max='+inf', - min='-inf', - withscores=True, - start=0, - num=1, - ) - - if project_last_finalized: - project_last_finalized_cid, project_last_finalized_epoch = project_last_finalized[0] - project_last_finalized_epoch = int(project_last_finalized_epoch) - project_last_finalized_cid = project_last_finalized_cid.decode('utf-8') - else: - self._logger.info('project_last_finalized is None, trying to fetch from contract') - return await self._calculate_from_scratch( - msg_obj, redis, rpc_helper, anchor_rpc_helper, ipfs_reader, protocol_state_contract, project_id, - ) - - tail_epoch_id, extrapolated_flag = await get_tail_epoch_id( - redis, protocol_state_contract, anchor_rpc_helper, msg_obj.epochId, 86400, msg_obj.projectId, - ) - - if extrapolated_flag: - aggregate_complete_flag = False - else: - aggregate_complete_flag = True - - if project_last_finalized_epoch <= tail_epoch_id: - self._logger.error('last finalized epoch is too old, building aggregate from scratch') - return await self._calculate_from_scratch( - msg_obj, redis, rpc_helper, anchor_rpc_helper, ipfs_reader, protocol_state_contract, project_id, - ) - - project_last_finalized_data = await get_submission_data( - redis, project_last_finalized_cid, ipfs_reader, project_id, - ) - - if not project_last_finalized_data: - self._logger.info('project_last_finalized_data is None, building aggregate from scratch') - return await self._calculate_from_scratch( - msg_obj, redis, rpc_helper, anchor_rpc_helper, ipfs_reader, protocol_state_contract, project_id, - ) - - aggregate_snapshot = UniswapTradesAggregateSnapshot.parse_obj(project_last_finalized_data) - # updating epochId to current epoch - aggregate_snapshot.epochId = msg_obj.epochId - - base_project_last_finalized = await redis.zrevrangebyscore( - project_finalized_data_zset(msg_obj.projectId), - max='+inf', - min='-inf', - withscores=True, - start=0, - num=1, - ) - - if base_project_last_finalized: - _, base_project_last_finalized_epoch_ = base_project_last_finalized[0] - base_project_last_finalized_epoch = int(base_project_last_finalized_epoch_) - else: - base_project_last_finalized_epoch = 0 - - if base_project_last_finalized_epoch and project_last_finalized_epoch < base_project_last_finalized_epoch: - # fetch base finalized snapshots if they exist and are within 5 epochs of current epoch - base_finalized_snapshot_range = ( - project_last_finalized_epoch + 1, - base_project_last_finalized_epoch, - ) - - base_finalized_snapshots = await get_project_epoch_snapshot_bulk( - redis, protocol_state_contract, anchor_rpc_helper, ipfs_reader, - base_finalized_snapshot_range[0], base_finalized_snapshot_range[1], msg_obj.projectId, - ) - else: - base_finalized_snapshots = [] - base_finalized_snapshot_range = (0, project_last_finalized_epoch) - - base_unfinalized_tasks = [] - for epoch_id in range(base_finalized_snapshot_range[1] + 1, msg_obj.epochId + 1): - base_unfinalized_tasks.append( - redis.get(submitted_base_snapshots_key(epoch_id=epoch_id, project_id=msg_obj.projectId)), - ) - - base_unfinalized_snapshots_raw = await asyncio.gather(*base_unfinalized_tasks, return_exceptions=True) - - base_unfinalized_snapshots = [] - for snapshot_data in base_unfinalized_snapshots_raw: - # check if not exception and not None - if not isinstance(snapshot_data, Exception) and snapshot_data: - base_unfinalized_snapshots.append( - json.loads(snapshot_data), - ) - else: - self._logger.error( - f'Error fetching base unfinalized snapshot, cancelling aggregation for epoch {msg_obj.epochId}', - ) - return None - - base_snapshots = base_finalized_snapshots + base_unfinalized_snapshots - - for snapshot_data in base_snapshots: - if snapshot_data: - snapshot = UniswapTradesSnapshot.parse_obj(snapshot_data) - aggregate_snapshot = self._add_aggregate_snapshot(aggregate_snapshot, snapshot) - - # Remove from tail if needed - tail_epochs_to_remove = [] - for epoch_id in range(project_last_finalized_epoch, msg_obj.epochId): - tail_epoch_id, extrapolated_flag = await get_tail_epoch_id( - redis, protocol_state_contract, anchor_rpc_helper, epoch_id, 86400, msg_obj.projectId, - ) - if not extrapolated_flag: - tail_epochs_to_remove.append(tail_epoch_id) - if tail_epochs_to_remove: - tail_epoch_snapshots = await get_project_epoch_snapshot_bulk( - redis, protocol_state_contract, anchor_rpc_helper, ipfs_reader, - tail_epochs_to_remove[0], tail_epochs_to_remove[-1], msg_obj.projectId, - ) - - for snapshot_data in tail_epoch_snapshots: - if snapshot_data: - snapshot = UniswapTradesSnapshot.parse_obj(snapshot_data) - aggregate_snapshot = self._remove_aggregate_snapshot(aggregate_snapshot, snapshot) - - if aggregate_complete_flag: - aggregate_snapshot.complete = True - else: - aggregate_snapshot.complete = False - - return aggregate_snapshot diff --git a/snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_7d.py b/snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_7d.py deleted file mode 100644 index 8ac8442d..00000000 --- a/snapshotter/modules/pooler/uniswapv2/aggregate/single_uniswap_trade_volume_7d.py +++ /dev/null @@ -1,125 +0,0 @@ -import asyncio - -import pydantic -from ipfs_client.main import AsyncIPFSClient -from redis import asyncio as aioredis - -from ..utils.models.message_models import UniswapTradesAggregateSnapshot -from snapshotter.utils.callback_helpers import GenericProcessorAggregate -from snapshotter.utils.data_utils import get_project_epoch_snapshot -from snapshotter.utils.data_utils import get_submission_data -from snapshotter.utils.data_utils import get_tail_epoch_id -from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.message_models import PowerloomSnapshotSubmittedMessage -from snapshotter.utils.rpc import RpcHelper - - -class AggreagateTradeVolumeProcessor(GenericProcessorAggregate): - transformation_lambdas = None - - def __init__(self) -> None: - self.transformation_lambdas = [] - self._logger = logger.bind(module='AggregateTradeVolumeProcessor7d') - - def _add_aggregate_snapshot( - self, - previous_aggregate_snapshot: UniswapTradesAggregateSnapshot, - current_snapshot: UniswapTradesAggregateSnapshot, - ): - - previous_aggregate_snapshot.totalTrade += current_snapshot.totalTrade - previous_aggregate_snapshot.totalFee += current_snapshot.totalFee - previous_aggregate_snapshot.token0TradeVolume += current_snapshot.token0TradeVolume - previous_aggregate_snapshot.token1TradeVolume += current_snapshot.token1TradeVolume - previous_aggregate_snapshot.token0TradeVolumeUSD += current_snapshot.token0TradeVolumeUSD - previous_aggregate_snapshot.token1TradeVolumeUSD += current_snapshot.token1TradeVolumeUSD - - return previous_aggregate_snapshot - - def _remove_aggregate_snapshot( - self, - previous_aggregate_snapshot: UniswapTradesAggregateSnapshot, - current_snapshot: UniswapTradesAggregateSnapshot, - ): - - previous_aggregate_snapshot.totalTrade -= current_snapshot.totalTrade - previous_aggregate_snapshot.totalFee -= current_snapshot.totalFee - previous_aggregate_snapshot.token0TradeVolume -= current_snapshot.token0TradeVolume - previous_aggregate_snapshot.token1TradeVolume -= current_snapshot.token1TradeVolume - previous_aggregate_snapshot.token0TradeVolumeUSD -= current_snapshot.token0TradeVolumeUSD - previous_aggregate_snapshot.token1TradeVolumeUSD -= current_snapshot.token1TradeVolumeUSD - - return previous_aggregate_snapshot - - async def compute( - self, - msg_obj: PowerloomSnapshotSubmittedMessage, - redis: aioredis.Redis, - rpc_helper: RpcHelper, - anchor_rpc_helper: RpcHelper, - ipfs_reader: AsyncIPFSClient, - protocol_state_contract, - project_id: str, - - ): - self._logger.info(f'Building 7 day trade volume aggregate snapshot against {msg_obj}') - - contract = project_id.split(':')[-2] - - aggregate_snapshot = UniswapTradesAggregateSnapshot( - epochId=msg_obj.epochId, - ) - # 24h snapshots fetches - snapshot_tasks = list() - self._logger.debug('fetching 24hour aggregates spaced out by 1 day over 7 days...') - count = 1 - self._logger.debug( - 'fetch # {}: queueing task for 24h aggregate snapshot for project ID {}' - ' at currently received epoch ID {} with snasphot CID {}', - count, msg_obj.projectId, msg_obj.epochId, msg_obj.snapshotCid, - ) - - snapshot_tasks.append( - get_submission_data( - redis, msg_obj.snapshotCid, ipfs_reader, msg_obj.projectId, - ), - ) - - seek_stop_flag = False - head_epoch = msg_obj.epochId - # 2. if not extrapolated, attempt to seek further back - while not seek_stop_flag and count < 7: - tail_epoch_id, seek_stop_flag = await get_tail_epoch_id( - redis, protocol_state_contract, anchor_rpc_helper, head_epoch, 86400, msg_obj.projectId, - ) - count += 1 - snapshot_tasks.append( - get_project_epoch_snapshot( - redis, protocol_state_contract, anchor_rpc_helper, - ipfs_reader, tail_epoch_id, msg_obj.projectId, - ), - ) - head_epoch = tail_epoch_id - 1 - - all_snapshots = await asyncio.gather(*snapshot_tasks, return_exceptions=True) - self._logger.debug( - 'for 7d aggregated trade volume calculations: fetched {} ' - '24h aggregated trade volume snapshots for project ID {}: {}', - len(all_snapshots), msg_obj.projectId, all_snapshots, - ) - complete_flags = [] - for single_24h_snapshot in all_snapshots: - if not isinstance(single_24h_snapshot, BaseException): - try: - snapshot = UniswapTradesAggregateSnapshot.parse_obj(single_24h_snapshot) - complete_flags.append(snapshot.complete) - except pydantic.ValidationError: - pass - else: - aggregate_snapshot = self._add_aggregate_snapshot(aggregate_snapshot, snapshot) - - if not all(complete_flags) or count < 7: - aggregate_snapshot.complete = False - else: - aggregate_snapshot.complete = True - return aggregate_snapshot diff --git a/snapshotter/modules/pooler/uniswapv2/pair_total_reserves.py b/snapshotter/modules/pooler/uniswapv2/pair_total_reserves.py deleted file mode 100644 index e79664ea..00000000 --- a/snapshotter/modules/pooler/uniswapv2/pair_total_reserves.py +++ /dev/null @@ -1,114 +0,0 @@ -import time -from typing import Dict -from typing import Optional -from typing import Union - -from redis import asyncio as aioredis - -from .utils.core import get_pair_reserves -from .utils.models.message_models import EpochBaseSnapshot -from .utils.models.message_models import UniswapPairTotalReservesSnapshot -from snapshotter.utils.callback_helpers import GenericProcessorSnapshot -from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.message_models import PowerloomSnapshotProcessMessage -from snapshotter.utils.rpc import RpcHelper - - -class PairTotalReservesProcessor(GenericProcessorSnapshot): - transformation_lambdas = None - - def __init__(self) -> None: - self.transformation_lambdas = [] - self._logger = logger.bind(module='PairTotalReservesProcessor') - - async def compute( - self, - epoch: PowerloomSnapshotProcessMessage, - redis_conn: aioredis.Redis, - rpc_helper: RpcHelper, - - ) -> Optional[Dict[str, Union[int, float]]]: - - min_chain_height = epoch.begin - max_chain_height = epoch.end - - data_source_contract_address = epoch.data_source - - epoch_reserves_snapshot_map_token0 = dict() - epoch_prices_snapshot_map_token0 = dict() - epoch_prices_snapshot_map_token1 = dict() - epoch_reserves_snapshot_map_token1 = dict() - epoch_usd_reserves_snapshot_map_token0 = dict() - epoch_usd_reserves_snapshot_map_token1 = dict() - max_block_timestamp = int(time.time()) - - self._logger.debug(f'pair reserves {data_source_contract_address} computation init time {time.time()}') - pair_reserve_total = await get_pair_reserves( - pair_address=data_source_contract_address, - from_block=min_chain_height, - to_block=max_chain_height, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - fetch_timestamp=True, - ) - - for block_num in range(min_chain_height, max_chain_height + 1): - block_pair_total_reserves = pair_reserve_total.get(block_num) - fetch_ts = True if block_num == max_chain_height else False - - epoch_reserves_snapshot_map_token0[ - f'block{block_num}' - ] = block_pair_total_reserves['token0'] - epoch_reserves_snapshot_map_token1[ - f'block{block_num}' - ] = block_pair_total_reserves['token1'] - epoch_usd_reserves_snapshot_map_token0[ - f'block{block_num}' - ] = block_pair_total_reserves['token0USD'] - epoch_usd_reserves_snapshot_map_token1[ - f'block{block_num}' - ] = block_pair_total_reserves['token1USD'] - - epoch_prices_snapshot_map_token0[ - f'block{block_num}' - ] = block_pair_total_reserves['token0Price'] - - epoch_prices_snapshot_map_token1[ - f'block{block_num}' - ] = block_pair_total_reserves['token1Price'] - - if fetch_ts: - if not block_pair_total_reserves.get('timestamp', None): - self._logger.error( - ( - 'Could not fetch timestamp against max block' - ' height in epoch {} - {}to calculate pair' - ' reserves for contract {}. Using current time' - ' stamp for snapshot construction' - ), - data_source_contract_address, - min_chain_height, - max_chain_height, - ) - else: - max_block_timestamp = block_pair_total_reserves.get( - 'timestamp', - ) - pair_total_reserves_snapshot = UniswapPairTotalReservesSnapshot( - **{ - 'token0Reserves': epoch_reserves_snapshot_map_token0, - 'token1Reserves': epoch_reserves_snapshot_map_token1, - 'token0ReservesUSD': epoch_usd_reserves_snapshot_map_token0, - 'token1ReservesUSD': epoch_usd_reserves_snapshot_map_token1, - 'token0Prices': epoch_prices_snapshot_map_token0, - 'token1Prices': epoch_prices_snapshot_map_token1, - 'chainHeightRange': EpochBaseSnapshot( - begin=min_chain_height, end=max_chain_height, - ), - 'timestamp': max_block_timestamp, - 'contract': data_source_contract_address, - }, - ) - self._logger.debug(f'pair reserves {data_source_contract_address}, computation end time {time.time()}') - - return pair_total_reserves_snapshot diff --git a/snapshotter/modules/pooler/uniswapv2/redis_keys.py b/snapshotter/modules/pooler/uniswapv2/redis_keys.py deleted file mode 100644 index c6b526c0..00000000 --- a/snapshotter/modules/pooler/uniswapv2/redis_keys.py +++ /dev/null @@ -1,49 +0,0 @@ -from snapshotter.settings.config import settings - -uniswap_pair_contract_tokens_addresses = ( - 'uniswap:pairContract:' + settings.namespace + ':{}:PairContractTokensAddresses' -) -uniswap_pair_contract_tokens_data = ( - 'uniswap:pairContract:' + settings.namespace + ':{}:PairContractTokensData' -) - -uinswap_token_pair_contract_mapping = ( - 'uniswap:tokens:' + settings.namespace + ':PairContractAddress' -) - -uniswap_V2_summarized_snapshots_zset = ( - 'uniswap:V2PairsSummarySnapshot:' + settings.namespace + ':snapshotsZset' -) -uniswap_V2_snapshot_at_blockheight = ( - 'uniswap:V2PairsSummarySnapshot:' + settings.namespace + ':snapshot:{}' -) # block_height -uniswap_v2_daily_stats_snapshot_zset = ( - 'uniswap:V2DailyStatsSnapshot:' + settings.namespace + ':snapshotsZset' -) -uniswap_V2_daily_stats_at_blockheight = ( - 'uniswap:V2DailyStatsSnapshot:' + settings.namespace + ':snapshot:{}' -) # block_height -uniswap_v2_tokens_snapshot_zset = ( - 'uniswap:V2TokensSummarySnapshot:' + settings.namespace + ':snapshotsZset' -) -uniswap_V2_tokens_at_blockheight = ( - 'uniswap:V2TokensSummarySnapshot:' + settings.namespace + ':{}' -) # block_height - -uniswap_pair_cached_recent_logs = ( - 'uniswap:pairContract:' + settings.namespace + ':{}:recentLogs' -) - -uniswap_tokens_pair_map = ( - 'uniswap:pairContract:' + settings.namespace + ':tokensPairMap' -) - -uniswap_pair_cached_block_height_token_price = ( - 'uniswap:pairContract:' + settings.namespace + - ':{}:cachedPairBlockHeightTokenPrice' -) - -uniswap_token_derived_eth_cached_block_height = ( - 'uniswap:token:' + settings.namespace + - ':{}:cachedDerivedEthBlockHeight' -) diff --git a/snapshotter/modules/pooler/uniswapv2/settings/__init__.py b/snapshotter/modules/pooler/uniswapv2/settings/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/snapshotter/modules/pooler/uniswapv2/settings/config.py b/snapshotter/modules/pooler/uniswapv2/settings/config.py deleted file mode 100644 index 79c54b60..00000000 --- a/snapshotter/modules/pooler/uniswapv2/settings/config.py +++ /dev/null @@ -1,10 +0,0 @@ -import json -import os - -from .settings_model import Settings - -dir_path = os.path.dirname(os.path.realpath(__file__)) -settings_file = open(os.path.join(dir_path, 'settings.json'), 'r') -settings_dict = json.load(settings_file) - -settings: Settings = Settings(**settings_dict) diff --git a/snapshotter/modules/pooler/uniswapv2/settings/settings.json b/snapshotter/modules/pooler/uniswapv2/settings/settings.json deleted file mode 100644 index 28edb26d..00000000 --- a/snapshotter/modules/pooler/uniswapv2/settings/settings.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "uniswap_contract_abis":{ - "factory":"snapshotter/modules/pooler/uniswapv2/static/abis/IUniswapV2Factory.json", - "router":"snapshotter/modules/pooler/uniswapv2/static/abis/UniswapV2Router.json", - "pair_contract":"snapshotter/modules/pooler/uniswapv2/static/abis/UniswapV2Pair.json", - "erc20":"snapshotter/modules/pooler/uniswapv2/static/abis/IERC20.json", - "trade_events":"snapshotter/modules/pooler/uniswapv2/static/abis/UniswapTradeEvents.json" - }, - "contract_addresses": { - "iuniswap_v2_factory": "0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f", - "iuniswap_v2_router": "0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D", - "iuniswap_v2_pair": "0x3356c9A8f40F8E9C1d192A4347A76D18243fABC5", - "USDT": "0xdac17f958d2ee523a2206206994597c13d831ec7", - "DAI": "0x6b175474e89094c44da98b954eedeac495271d0f", - "USDC": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - "WETH": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", - "MAKER": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", - "WETH_USDT": "0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852", - "FRAX": "0x853d955aCEf822Db058eb8505911ED77F175b99e", - "SYN": "0x0f2D719407FdBeFF09D87557AbB7232601FD9F29", - "FEI": "0x956F47F50A910163D8BF957Cf5846D573E7f87CA", - "agEUR": "0x1a7e4e63778B4f12a199C062f3eFdD288afCBce8", - "DAI_WETH_PAIR": "0xa478c2975ab1ea89e8196811f51a7b7ade33eb11", - "USDC_WETH_PAIR": "0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc", - "USDT_WETH_PAIR": "0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852" - }, - "uniswap_v2_whitelist": [ - "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", - "0x6b175474e89094c44da98b954eedeac495271d0f", - "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - "0xdac17f958d2ee523a2206206994597c13d831ec7", - "0x0000000000085d4780b73119b644ae5ecd22b376", - "0x5d3a536e4d6dbd6114cc1ead35777bab948e3643", - "0x39aa39c021dfbae8fac545936693ac917d5e7563", - "0x57ab1ec28d129707052df4df418d58a2d46d5f51", - "0x9f8f72aa9304c8b593d555f12ef6589cc3a579a2", - "0xc00e94cb662c3520282e6f5717214004a7f26888", - "0x514910771af9ca656af840dff83e8264ecf986ca", - "0x960b236a07cf122663c4303350609a66a7b288c0", - "0xc011a73ee8576fb46f5e1c5751ca3b9fe0af2a6f", - "0x0bc529c00c6401aef6d220be8c6ea1667f6ad93e", - "0xdf5e0e81dff6faf3a7e52ba697820c5e32d806a8", - "0x853d955acef822db058eb8505911ed77f175b99e", - "0xa47c8bf37f92abed4a126bda807a7b7498661acd", - "0x1f9840a85d5af5bf1d1762f925bdaddc4201f984", - "0x2260fac5e5542a773aa44fbcfedf7c193bc2c599", - "0x956f47f50a910163d8bf957cf5846d573e7f87ca" - ] -} diff --git a/snapshotter/modules/pooler/uniswapv2/settings/settings_model.py b/snapshotter/modules/pooler/uniswapv2/settings/settings_model.py deleted file mode 100644 index 2650cfac..00000000 --- a/snapshotter/modules/pooler/uniswapv2/settings/settings_model.py +++ /dev/null @@ -1,58 +0,0 @@ -from typing import List - -from pydantic import BaseModel -from pydantic import Field - - -class UniswapContractAbis(BaseModel): - factory: str = Field( - ..., example='pooler/modules/uniswapv2static/abis/IUniswapV2Factory.json', - ) - router: str = Field(..., example='pooler/modules/uniswapv2/static/abis/UniswapV2Router.json') - pair_contract: str = Field( - ..., example='pooler/modules/uniswapv2/static/abis/UniswapV2Pair.json', - ) - erc20: str = Field(..., example='pooler/modules/uniswapv2/static/abis/IERC20.json') - trade_events: str = Field( - ..., example='pooler/modules/uniswapv2/static/abis/UniswapTradeEvents.json', - ) - - -class ContractAddresses(BaseModel): - iuniswap_v2_factory: str = Field( - ..., example='0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32', - ) - iuniswap_v2_router: str = Field( - ..., example='0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff', - ) - MAKER: str = Field( - ..., example='0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2', - ) - USDT: str = Field(..., example='0xc2132d05d31c914a87c6611c10748aeb04b58e8f') - DAI: str = Field(..., example='0x8f3cf7ad23cd3cadbd9735aff958023239c6a063') - USDC: str = Field(..., example='0x2791bca1f2de4661ed88a30c99a7a9449aa84174') - WETH: str = Field(..., example='0x7ceb23fd6bc0add59e62ac25578270cff1b9f619') - WETH_USDT: str = Field( - ..., example='0xf6422b997c7f54d1c6a6e103bcb1499eea0a7046', - ) - FRAX: str = Field(..., example='0x853d955aCEf822Db058eb8505911ED77F175b99e') - SYN: str = Field(..., example='0x0f2D719407FdBeFF09D87557AbB7232601FD9F29') - FEI: str = Field(..., example='0x956F47F50A910163D8BF957Cf5846D573E7f87CA') - agEUR: str = Field( - ..., example='0x1a7e4e63778B4f12a199C062f3eFdD288afCBce8', - ) - DAI_WETH_PAIR: str = Field( - ..., example='0xa478c2975ab1ea89e8196811f51a7b7ade33eb11', - ) - USDC_WETH_PAIR: str = Field( - ..., example='0xb4e16d0168e52d35cacd2c6185b44281ec28c9', - ) - USDT_WETH_PAIR: str = Field( - ..., example='0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852', - ) - - -class Settings(BaseModel): - uniswap_contract_abis: UniswapContractAbis - contract_addresses: ContractAddresses - uniswap_v2_whitelist: List[str] diff --git a/snapshotter/modules/pooler/uniswapv2/static/abis/IERC20.json b/snapshotter/modules/pooler/uniswapv2/static/abis/IERC20.json deleted file mode 100644 index b3eb93c9..00000000 --- a/snapshotter/modules/pooler/uniswapv2/static/abis/IERC20.json +++ /dev/null @@ -1,224 +0,0 @@ -[ - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "owner", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "spender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "Approval", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "Transfer", - "type": "event" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "owner", - "type": "address" - }, - { - "internalType": "address", - "name": "spender", - "type": "address" - } - ], - "name": "allowance", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "spender", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "approve", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "owner", - "type": "address" - } - ], - "name": "balanceOf", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "decimals", - "outputs": [ - { - "internalType": "uint8", - "name": "", - "type": "uint8" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "name", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "symbol", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "transfer", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "transferFrom", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - } -] diff --git a/snapshotter/modules/pooler/uniswapv2/static/abis/ISushiswapFactory.json b/snapshotter/modules/pooler/uniswapv2/static/abis/ISushiswapFactory.json deleted file mode 100644 index 91a3b89c..00000000 --- a/snapshotter/modules/pooler/uniswapv2/static/abis/ISushiswapFactory.json +++ /dev/null @@ -1,215 +0,0 @@ -[ - { - "inputs": [ - { - "internalType": "address", - "name": "_feeToSetter", - "type": "address" - } - ], - "stateMutability": "nonpayable", - "type": "constructor" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "token0", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "token1", - "type": "address" - }, - { - "indexed": false, - "internalType": "address", - "name": "pair", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "name": "PairCreated", - "type": "event" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "name": "allPairs", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "allPairsLength", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "tokenA", - "type": "address" - }, - { - "internalType": "address", - "name": "tokenB", - "type": "address" - } - ], - "name": "createPair", - "outputs": [ - { - "internalType": "address", - "name": "pair", - "type": "address" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "feeTo", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "feeToSetter", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - }, - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "getPair", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "migrator", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "pairCodeHash", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "pure", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_feeTo", - "type": "address" - } - ], - "name": "setFeeTo", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_feeToSetter", - "type": "address" - } - ], - "name": "setFeeToSetter", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_migrator", - "type": "address" - } - ], - "name": "setMigrator", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - } -] diff --git a/snapshotter/modules/pooler/uniswapv2/static/abis/IUniswapV2Factory.json b/snapshotter/modules/pooler/uniswapv2/static/abis/IUniswapV2Factory.json deleted file mode 100644 index 75e9bd5d..00000000 --- a/snapshotter/modules/pooler/uniswapv2/static/abis/IUniswapV2Factory.json +++ /dev/null @@ -1,193 +0,0 @@ -[ - { - "inputs": [ - { - "internalType": "address", - "name": "_feeToSetter", - "type": "address" - } - ], - "payable": false, - "stateMutability": "nonpayable", - "type": "constructor" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "token0", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "token1", - "type": "address" - }, - { - "indexed": false, - "internalType": "address", - "name": "pair", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "name": "PairCreated", - "type": "event" - }, - { - "constant": true, - "inputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "name": "allPairs", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": true, - "inputs": [], - "name": "allPairsLength", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": false, - "inputs": [ - { - "internalType": "address", - "name": "tokenA", - "type": "address" - }, - { - "internalType": "address", - "name": "tokenB", - "type": "address" - } - ], - "name": "createPair", - "outputs": [ - { - "internalType": "address", - "name": "pair", - "type": "address" - } - ], - "payable": false, - "stateMutability": "nonpayable", - "type": "function" - }, - { - "constant": true, - "inputs": [], - "name": "feeTo", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": true, - "inputs": [], - "name": "feeToSetter", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": true, - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - }, - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "getPair", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "payable": false, - "stateMutability": "view", - "type": "function" - }, - { - "constant": false, - "inputs": [ - { - "internalType": "address", - "name": "_feeTo", - "type": "address" - } - ], - "name": "setFeeTo", - "outputs": [], - "payable": false, - "stateMutability": "nonpayable", - "type": "function" - }, - { - "constant": false, - "inputs": [ - { - "internalType": "address", - "name": "_feeToSetter", - "type": "address" - } - ], - "name": "setFeeToSetter", - "outputs": [], - "payable": false, - "stateMutability": "nonpayable", - "type": "function" - } -] diff --git a/snapshotter/modules/pooler/uniswapv2/static/abis/SushiswapPair.json b/snapshotter/modules/pooler/uniswapv2/static/abis/SushiswapPair.json deleted file mode 100644 index 61c709c4..00000000 --- a/snapshotter/modules/pooler/uniswapv2/static/abis/SushiswapPair.json +++ /dev/null @@ -1,658 +0,0 @@ -[ - { - "inputs": [], - "stateMutability": "nonpayable", - "type": "constructor" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "owner", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "spender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "Approval", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount0", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount1", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - } - ], - "name": "Burn", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount0", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount1", - "type": "uint256" - } - ], - "name": "Mint", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount0In", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount1In", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount0Out", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount1Out", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - } - ], - "name": "Swap", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint112", - "name": "reserve0", - "type": "uint112" - }, - { - "indexed": false, - "internalType": "uint112", - "name": "reserve1", - "type": "uint112" - } - ], - "name": "Sync", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "Transfer", - "type": "event" - }, - { - "inputs": [], - "name": "DOMAIN_SEPARATOR", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "MINIMUM_LIQUIDITY", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "PERMIT_TYPEHASH", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - }, - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "allowance", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "spender", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "approve", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "balanceOf", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "to", - "type": "address" - } - ], - "name": "burn", - "outputs": [ - { - "internalType": "uint256", - "name": "amount0", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amount1", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "decimals", - "outputs": [ - { - "internalType": "uint8", - "name": "", - "type": "uint8" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "factory", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getReserves", - "outputs": [ - { - "internalType": "uint112", - "name": "_reserve0", - "type": "uint112" - }, - { - "internalType": "uint112", - "name": "_reserve1", - "type": "uint112" - }, - { - "internalType": "uint32", - "name": "_blockTimestampLast", - "type": "uint32" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_token0", - "type": "address" - }, - { - "internalType": "address", - "name": "_token1", - "type": "address" - } - ], - "name": "initialize", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "kLast", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "to", - "type": "address" - } - ], - "name": "mint", - "outputs": [ - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "name", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "nonces", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "owner", - "type": "address" - }, - { - "internalType": "address", - "name": "spender", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - }, - { - "internalType": "uint8", - "name": "v", - "type": "uint8" - }, - { - "internalType": "bytes32", - "name": "r", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "s", - "type": "bytes32" - } - ], - "name": "permit", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "price0CumulativeLast", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "price1CumulativeLast", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "to", - "type": "address" - } - ], - "name": "skim", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amount0Out", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amount1Out", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - } - ], - "name": "swap", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "symbol", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "sync", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "token0", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "token1", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "transfer", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "transferFrom", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "nonpayable", - "type": "function" - } -] diff --git a/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapTradeEvents.json b/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapTradeEvents.json deleted file mode 100644 index 7dd79cd9..00000000 --- a/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapTradeEvents.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "Swap": { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount0In", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount1In", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount0Out", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount1Out", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - } - ], - "name": "Swap", - "type": "event" - }, - "Mint": { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount0", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount1", - "type": "uint256" - } - ], - "name": "Mint", - "type": "event" - }, - "Burn": { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount0", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "amount1", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - } - ], - "name": "Burn", - "type": "event" - } -} diff --git a/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapV2Pair.json b/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapV2Pair.json deleted file mode 100644 index 6a6ef031..00000000 --- a/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapV2Pair.json +++ /dev/null @@ -1 +0,0 @@ -[{"inputs":[],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"spender","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1","type":"uint256"},{"indexed":true,"internalType":"address","name":"to","type":"address"}],"name":"Burn","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0In","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1In","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount0Out","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1Out","type":"uint256"},{"indexed":true,"internalType":"address","name":"to","type":"address"}],"name":"Swap","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint112","name":"reserve0","type":"uint112"},{"indexed":false,"internalType":"uint112","name":"reserve1","type":"uint112"}],"name":"Sync","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"constant":true,"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"MINIMUM_LIQUIDITY","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"PERMIT_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"burn","outputs":[{"internalType":"uint256","name":"amount0","type":"uint256"},{"internalType":"uint256","name":"amount1","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"factory","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"getReserves","outputs":[{"internalType":"uint112","name":"_reserve0","type":"uint112"},{"internalType":"uint112","name":"_reserve1","type":"uint112"},{"internalType":"uint32","name":"_blockTimestampLast","type":"uint32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"_token0","type":"address"},{"internalType":"address","name":"_token1","type":"address"}],"name":"initialize","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"kLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"mint","outputs":[{"internalType":"uint256","name":"liquidity","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"price0CumulativeLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"price1CumulativeLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"skim","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"internalType":"uint256","name":"amount0Out","type":"uint256"},{"internalType":"uint256","name":"amount1Out","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"swap","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"sync","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"token0","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"token1","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"}] diff --git a/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapV2Router.json b/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapV2Router.json deleted file mode 100644 index c1d05c2c..00000000 --- a/snapshotter/modules/pooler/uniswapv2/static/abis/UniswapV2Router.json +++ /dev/null @@ -1,973 +0,0 @@ -[ - { - "inputs": [ - { - "internalType": "address", - "name": "_factory", - "type": "address" - }, - { - "internalType": "address", - "name": "_WETH", - "type": "address" - } - ], - "stateMutability": "nonpayable", - "type": "constructor" - }, - { - "inputs": [], - "name": "WETH", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "tokenA", - "type": "address" - }, - { - "internalType": "address", - "name": "tokenB", - "type": "address" - }, - { - "internalType": "uint256", - "name": "amountADesired", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountBDesired", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountAMin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountBMin", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "addLiquidity", - "outputs": [ - { - "internalType": "uint256", - "name": "amountA", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountB", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "token", - "type": "address" - }, - { - "internalType": "uint256", - "name": "amountTokenDesired", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountTokenMin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountETHMin", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "addLiquidityETH", - "outputs": [ - { - "internalType": "uint256", - "name": "amountToken", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountETH", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - } - ], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [], - "name": "factory", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountOut", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "reserveIn", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "reserveOut", - "type": "uint256" - } - ], - "name": "getAmountIn", - "outputs": [ - { - "internalType": "uint256", - "name": "amountIn", - "type": "uint256" - } - ], - "stateMutability": "pure", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountIn", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "reserveIn", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "reserveOut", - "type": "uint256" - } - ], - "name": "getAmountOut", - "outputs": [ - { - "internalType": "uint256", - "name": "amountOut", - "type": "uint256" - } - ], - "stateMutability": "pure", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountOut", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - } - ], - "name": "getAmountsIn", - "outputs": [ - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountIn", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - } - ], - "name": "getAmountsOut", - "outputs": [ - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountA", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "reserveA", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "reserveB", - "type": "uint256" - } - ], - "name": "quote", - "outputs": [ - { - "internalType": "uint256", - "name": "amountB", - "type": "uint256" - } - ], - "stateMutability": "pure", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "tokenA", - "type": "address" - }, - { - "internalType": "address", - "name": "tokenB", - "type": "address" - }, - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountAMin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountBMin", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "removeLiquidity", - "outputs": [ - { - "internalType": "uint256", - "name": "amountA", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountB", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "token", - "type": "address" - }, - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountTokenMin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountETHMin", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "removeLiquidityETH", - "outputs": [ - { - "internalType": "uint256", - "name": "amountToken", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountETH", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "token", - "type": "address" - }, - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountTokenMin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountETHMin", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "removeLiquidityETHSupportingFeeOnTransferTokens", - "outputs": [ - { - "internalType": "uint256", - "name": "amountETH", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "token", - "type": "address" - }, - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountTokenMin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountETHMin", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - }, - { - "internalType": "bool", - "name": "approveMax", - "type": "bool" - }, - { - "internalType": "uint8", - "name": "v", - "type": "uint8" - }, - { - "internalType": "bytes32", - "name": "r", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "s", - "type": "bytes32" - } - ], - "name": "removeLiquidityETHWithPermit", - "outputs": [ - { - "internalType": "uint256", - "name": "amountToken", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountETH", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "token", - "type": "address" - }, - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountTokenMin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountETHMin", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - }, - { - "internalType": "bool", - "name": "approveMax", - "type": "bool" - }, - { - "internalType": "uint8", - "name": "v", - "type": "uint8" - }, - { - "internalType": "bytes32", - "name": "r", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "s", - "type": "bytes32" - } - ], - "name": "removeLiquidityETHWithPermitSupportingFeeOnTransferTokens", - "outputs": [ - { - "internalType": "uint256", - "name": "amountETH", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "tokenA", - "type": "address" - }, - { - "internalType": "address", - "name": "tokenB", - "type": "address" - }, - { - "internalType": "uint256", - "name": "liquidity", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountAMin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountBMin", - "type": "uint256" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - }, - { - "internalType": "bool", - "name": "approveMax", - "type": "bool" - }, - { - "internalType": "uint8", - "name": "v", - "type": "uint8" - }, - { - "internalType": "bytes32", - "name": "r", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "s", - "type": "bytes32" - } - ], - "name": "removeLiquidityWithPermit", - "outputs": [ - { - "internalType": "uint256", - "name": "amountA", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountB", - "type": "uint256" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountOut", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapETHForExactTokens", - "outputs": [ - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - } - ], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountOutMin", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapExactETHForTokens", - "outputs": [ - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - } - ], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountOutMin", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapExactETHForTokensSupportingFeeOnTransferTokens", - "outputs": [], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountIn", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountOutMin", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapExactTokensForETH", - "outputs": [ - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountIn", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountOutMin", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapExactTokensForETHSupportingFeeOnTransferTokens", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountIn", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountOutMin", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapExactTokensForTokens", - "outputs": [ - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountIn", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountOutMin", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapExactTokensForTokensSupportingFeeOnTransferTokens", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountOut", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountInMax", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapTokensForExactETH", - "outputs": [ - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "amountOut", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amountInMax", - "type": "uint256" - }, - { - "internalType": "address[]", - "name": "path", - "type": "address[]" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - } - ], - "name": "swapTokensForExactTokens", - "outputs": [ - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - } - ], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "stateMutability": "payable", - "type": "receive" - } -] diff --git a/snapshotter/modules/pooler/uniswapv2/tests/liquidity_test.py b/snapshotter/modules/pooler/uniswapv2/tests/liquidity_test.py deleted file mode 100644 index ffd78f43..00000000 --- a/snapshotter/modules/pooler/uniswapv2/tests/liquidity_test.py +++ /dev/null @@ -1,92 +0,0 @@ -import asyncio -import json - -import httpx -from redis import asyncio as aioredis - -from ..utils.core import get_pair_reserves -from snapshotter.utils.redis.rate_limiter import load_rate_limiter_scripts -from snapshotter.utils.redis.redis_conn import provide_async_redis_conn_insta - - -@provide_async_redis_conn_insta -async def fetch_liquidityUSD_rpc( - pair_address, - block_num, - redis_conn: aioredis.Redis = None, -): - rate_limiting_lua_scripts = await load_rate_limiter_scripts(redis_conn) - data = await get_pair_reserves( - loop, - rate_limiting_lua_scripts, - pair_address, - block_num, - block_num, - redis_conn=redis_conn, - ) - block_pair_total_reserves = data.get(block_num) - return ( - block_pair_total_reserves['token0USD'] + - block_pair_total_reserves['token1USD'] - ) - - -def fetch_liquidityUSD_graph(pair_address, block_num): - uniswap_url = 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2' - uniswap_payload = ( - '{"query":"{\\n pair(id: \\"' + - str(pair_address) + - '\\",block:{number:' + - str( - block_num, - ) + - '}) {\\n reserveUSD \\n token0 { \\n symbol \\n } \\n token1 {' - ' \\n symbol \\n }\\n } \\n }" }' - ) - print(uniswap_payload) - headers = {'Content-Type': 'application/plain'} - response = httpx.post( - url=uniswap_url, - headers=headers, - data=uniswap_payload, - timeout=30, - ) - if response.status_code == 200: - data = json.loads(response.text) - print('Response', data) - data = data['data'] - return float(data['pair']['reserveUSD']) - else: - print('Error fetching data from uniswap THE GRAPH %s', response) - return 0 - - -async def compare_liquidity(): - total_liquidity_usd_graph = 0 - total_liquidity_usd_rpc = 0 - block_num = 16046250 - - contracts = list() - contracts.append('0xae461ca67b15dc8dc81ce7615e0320da1a9ab8d5') - for contract in contracts: - liquidity_usd_graph = fetch_liquidityUSD_graph(contract, block_num) - liquidity_usd_rpc = await fetch_liquidityUSD_rpc(contract, block_num) - print( - f'Contract {contract}, liquidityUSD_graph is' - f' {liquidity_usd_graph} , liquidityUSD_rpc {liquidity_usd_rpc},' - ' liquidityUSD difference' - f' {(liquidity_usd_rpc - liquidity_usd_graph)}', - ) - total_liquidity_usd_graph += liquidity_usd_graph - total_liquidity_usd_rpc += liquidity_usd_rpc - - print( - f'{len(contracts)} contracts compared, liquidityUSD_rpc_total is' - f' {total_liquidity_usd_rpc}, liquidityUSD_graph_total is' - f' {total_liquidity_usd_graph}', - ) - - -if __name__ == '__main__': - loop = asyncio.get_event_loop() - loop.run_until_complete(compare_liquidity()) diff --git a/snapshotter/modules/pooler/uniswapv2/tests/token_price_test.py b/snapshotter/modules/pooler/uniswapv2/tests/token_price_test.py deleted file mode 100644 index 345bb4ca..00000000 --- a/snapshotter/modules/pooler/uniswapv2/tests/token_price_test.py +++ /dev/null @@ -1,490 +0,0 @@ -import asyncio -import json -from functools import partial - -from redis import asyncio as aioredis -from web3 import Web3 - -from ..settings.config import enabled_projects -from ..settings.config import settings -from ..settings.config import settings as worker_settings -from ..utils.helpers import get_pair_metadata -from snapshotter.utils.redis.rate_limiter import load_rate_limiter_scripts -from snapshotter.utils.redis.redis_conn import provide_async_redis_conn_insta - -w3 = Web3(Web3.HTTPProvider(settings.rpc.full_nodes[0].url)) -pair_address = Web3.toChecksumAddress( - '0x97c4adc5d28a86f9470c70dd91dc6cc2f20d2d4d', -) - - -def read_json_file(file_path: str): - """Read given json file and return its content as a dictionary.""" - try: - f_ = open(file_path, 'r', encoding='utf-8') - except Exception as exc: - print(f'Unable to open the {file_path} file') - raise exc - else: - json_data = json.loads(f_.read()) - return json_data - - -router_contract_abi = read_json_file( - worker_settings.uniswap_contract_abis.router, -) -pair_contract_abi = read_json_file( - worker_settings.uniswap_contract_abis.pair_contract, -) -all_contracts = enabled_projects - - -async def get_token_price_at_block_height( - token_contract_obj, - token_metadata, - block_height, - loop: asyncio.AbstractEventLoop, - redis_conn=None, - debug_log=True, -): - """ - returns the price of a token at a given block height - """ - try: - token_price = 0 - - # else fetch from rpc - stable_coins_addresses = { - 'USDC': Web3.toChecksumAddress( - worker_settings.contract_addresses.USDC, - ), - 'DAI': Web3.toChecksumAddress( - worker_settings.contract_addresses.DAI, - ), - 'USDT': Web3.toChecksumAddress( - worker_settings.contract_addresses.USDT, - ), - } - stable_coins_decimals = { - 'USDT': 6, - 'DAI': 18, - 'USDC': 6, - } - non_stable_coins_addresses = { - Web3.toChecksumAddress(worker_settings.contract_addresses.agEUR): { - 'token0': Web3.toChecksumAddress( - worker_settings.contract_addresses.agEUR, - ), - 'token1': Web3.toChecksumAddress( - worker_settings.contract_addresses.FEI, - ), - 'decimals': 18, - }, - Web3.toChecksumAddress(worker_settings.contract_addresses.SYN): { - 'token0': Web3.toChecksumAddress( - worker_settings.contract_addresses.SYN, - ), - 'token1': Web3.toChecksumAddress( - worker_settings.contract_addresses.FRAX, - ), - 'decimals': 18, - }, - } - - # this is used to avoid INSUFFICIENT_INPUT_AMOUNT error - token_amount_multiplier = 10**18 - - # check if token is a stable coin if so then ignore price fetch call - if Web3.toChecksumAddress(token_metadata['address']) in list( - stable_coins_addresses.values(), - ): - token_price = 1 - if debug_log: - print( - ( - f"## {token_metadata['symbol']}: ignored stablecoin" - f" calculation for token0: {token_metadata['symbol']} -" - f' WETH - USDT conversion: {token_price}' - ), - ) - - # check if token has no pair with stablecoin and weth if so then use hardcoded path - elif non_stable_coins_addresses.get( - Web3.toChecksumAddress(token_metadata['address']), - ): - contract_metadata = non_stable_coins_addresses.get( - Web3.toChecksumAddress(token_metadata['address']), - ) - if not contract_metadata: - return None - price_function_token0 = partial( - token_contract_obj.functions.getAmountsOut( - 10 ** int(contract_metadata['decimals']), - [ - contract_metadata['token0'], - contract_metadata['token1'], - Web3.toChecksumAddress( - worker_settings.contract_addresses.USDC, - ), - ], - ).call, - block_identifier=block_height, - ) - temp_token_price = await loop.run_in_executor( - func=price_function_token0, - executor=None, - ) - if temp_token_price: - # USDC decimals - temp_token_price = ( - temp_token_price[2] / 10 ** stable_coins_decimals['USDC'] - if temp_token_price[2] != 0 - else 0 - ) - token_price = ( - temp_token_price if token_price < temp_token_price else token_price - ) - - # 1. if is not equals to weth then check its price against each stable coin take out heighest - # 2. if price is still 0/None then pass path as token->weth-usdt - # 3. if price is still 0/None then increase token amount in path (token->weth-usdc) - elif Web3.toChecksumAddress( - token_metadata['address'], - ) != Web3.toChecksumAddress(worker_settings.contract_addresses.WETH): - # iterate over all stable coin to find price - stable_coins_len = len(stable_coins_addresses) - for key, value in stable_coins_addresses.items(): - try: - price_function_token0 = partial( - token_contract_obj.functions.getAmountsOut( - 10 ** int(token_metadata['decimals']), - [ - Web3.toChecksumAddress( - token_metadata['address'], - ), - value, - ], - ).call, - block_identifier=block_height, - ) - temp_token_price = await loop.run_in_executor( - func=price_function_token0, - executor=None, - ) - if temp_token_price: - # USDT decimals - temp_token_price = ( - temp_token_price[1] / - 10 ** stable_coins_decimals[key] - if temp_token_price[1] != 0 - else 0 - ) - - print( - ( - f"## {token_metadata['symbol']}->{key}: token" - f' price: {temp_token_price}' - ), - ) - - token_price = ( - temp_token_price - if token_price < temp_token_price - else token_price - ) - except Exception as error: - # if reverted then it means token do not have pair with this stablecoin, try another - if 'execution reverted' in str(error): - temp_token_price = 0 - else: - # if there was no exception and price is still 0 - # then increase token amount in path (token->stablecoin) - if temp_token_price == 0: - price_function_token0 = partial( - token_contract_obj.functions.getAmountsOut( - 10 ** int(token_metadata['decimals']) * - token_amount_multiplier, - [ - Web3.toChecksumAddress( - token_metadata['address'], - ), - value, - ], - ).call, - block_identifier=block_height, - ) - temp_token_price = await loop.run_in_executor( - func=price_function_token0, - executor=None, - ) - if temp_token_price: - # USDT decimals - temp_token_price = ( - temp_token_price[1] / - 10 ** stable_coins_decimals[key] - if temp_token_price[1] != 0 - else 0 - ) - temp_token_price = ( - temp_token_price / token_amount_multiplier - ) - - print( - ( - f"## {token_metadata['symbol']}->{key}:" - ' (increased_input_amount) token price :' - f' {temp_token_price}' - ), - ) - - token_price = ( - temp_token_price - if token_price < temp_token_price - else token_price - ) - - stable_coins_len -= 1 - if stable_coins_len <= 0: - break - - print( - ( - f"## {token_metadata['symbol']}: chosed token price after" - f' all stable coin conversions: {token_price}' - ), - ) - - # After iterating over all stable coin, check if - # path conversion by token->weth->usdt give a higher price of token - # if so then replace it, as for some tokens we get accurate price - # by token->weth->usdt path only - try: - price_function_token0 = partial( - token_contract_obj.functions.getAmountsOut( - 10 ** int(token_metadata['decimals']), - [ - Web3.toChecksumAddress(token_metadata['address']), - Web3.toChecksumAddress( - worker_settings.contract_addresses.WETH, - ), - Web3.toChecksumAddress( - worker_settings.contract_addresses.USDT, - ), - ], - ).call, - block_identifier=block_height, - ) - temp_token_price = await loop.run_in_executor( - func=price_function_token0, - executor=None, - ) - if temp_token_price: - # USDT decimals - temp_token_price = ( - temp_token_price[2] / - 10 ** stable_coins_decimals['USDT'] - if temp_token_price[2] != 0 - else 0 - ) - print( - ( - f"## {token_metadata['symbol']}: token price after" - f' weth->stablecoin: {temp_token_price}' - ), - ) - token_price = ( - temp_token_price - if token_price < temp_token_price - else token_price - ) - except Exception: - # there might be INSUFFICIENT_INPUT_AMOUNT/execution_reverted - # error which can break program flow, so pass it - pass - - # after going through all stablecoins and weth conversion if price is still 0 - # then increase token amount in path (token->weth-usdt) - if token_price == 0: - price_function_token0 = partial( - token_contract_obj.functions.getAmountsOut( - 10 ** int( - token_metadata['decimals'], - ) * token_amount_multiplier, - [ - Web3.toChecksumAddress(token_metadata['address']), - Web3.toChecksumAddress( - worker_settings.contract_addresses.WETH, - ), - Web3.toChecksumAddress( - worker_settings.contract_addresses.USDT, - ), - ], - ).call, - block_identifier=block_height, - ) - temp_token_price = await loop.run_in_executor( - func=price_function_token0, - executor=None, - ) - - if temp_token_price: - # USDT decimals - temp_token_price = ( - temp_token_price[2] / - 10 ** stable_coins_decimals['USDT'] - if temp_token_price[2] != 0 - else 0 - ) - temp_token_price = temp_token_price / token_amount_multiplier - print( - ( - f"## {token_metadata['symbol']}: token price after" - ' weth->stablecoin (increased_input_amount):' - f' {temp_token_price}' - ), - ) - token_price = ( - temp_token_price - if token_price < temp_token_price - else token_price - ) - - if debug_log: - print( - f"## {token_metadata['symbol']}: final price: {token_price}", - ) - - # if token is weth then directly check its price against stable coin - else: - price_function_token0 = partial( - token_contract_obj.functions.getAmountsOut( - 10 ** int(token_metadata['decimals']), - [ - Web3.toChecksumAddress( - worker_settings.contract_addresses.WETH, - ), - Web3.toChecksumAddress( - worker_settings.contract_addresses.USDT, - ), - ], - ).call, - block_identifier=block_height, - ) - token_price = await loop.run_in_executor( - func=price_function_token0, - executor=None, - ) - token_price = ( - token_price[1] / 10 ** stable_coins_decimals['USDT'] - ) # USDT decimals - if debug_log: - print( - f"## {token_metadata['symbol']}: final prices:" f' {token_price}', - ) - except Exception as err: - print( - ( - f'Error: failed to fetch token price | error_msg: {str(err)} |' - f" contract: {token_metadata['address']}" - ), - ) - finally: - return float(token_price) - - -async def get_all_pairs_token_price(loop, redis_conn: aioredis.Redis = None): - router_contract_obj = w3.eth.contract( - address=Web3.toChecksumAddress( - worker_settings.contract_addresses.iuniswap_v2_router, - ), - abi=router_contract_abi, - ) - rate_limiting_lua_scripts = await load_rate_limiter_scripts(redis_conn) - - for contract in all_contracts: - pair_per_token_metadata = await get_pair_metadata( - rate_limit_lua_script_shas=rate_limiting_lua_scripts, - pair_address=contract, - loop=loop, - redis_conn=redis_conn, - ) - token0, token1 = await asyncio.gather( - get_token_price_at_block_height( - router_contract_obj, - pair_per_token_metadata['token0'], - 'latest', - loop, - redis_conn, - ), - get_token_price_at_block_height( - router_contract_obj, - pair_per_token_metadata['token1'], - 'latest', - loop, - redis_conn, - ), - ) - print('\n') - print( - { - pair_per_token_metadata['token0']['symbol']: token0, - pair_per_token_metadata['token1']['symbol']: token1, - 'contract': contract, - }, - ) - print('\n') - - -@provide_async_redis_conn_insta -async def get_pair_tokens_price(pair, loop, redis_conn: aioredis.Redis = None): - router_contract_obj = w3.eth.contract( - address=Web3.toChecksumAddress( - worker_settings.contract_addresses.iuniswap_v2_router, - ), - abi=router_contract_abi, - ) - - pair_address = Web3.toChecksumAddress(pair) - rate_limiting_lua_scripts = await load_rate_limiter_scripts(redis_conn) - pair_per_token_metadata = await get_pair_metadata( - rate_limit_lua_script_shas=rate_limiting_lua_scripts, - pair_address=pair_address, - loop=loop, - redis_conn=redis_conn, - ) - print('\n') - print('\n') - token0, token1 = await asyncio.gather( - get_token_price_at_block_height( - router_contract_obj, - pair_per_token_metadata['token0'], - 'latest', - loop, - redis_conn, - ), - get_token_price_at_block_height( - router_contract_obj, - pair_per_token_metadata['token1'], - 'latest', - loop, - redis_conn, - ), - ) - print('\n') - print( - { - pair_per_token_metadata['token0']['symbol']: token0, - pair_per_token_metadata['token1']['symbol']: token1, - }, - ) - print('\n') - await redis_conn.close() - - -if __name__ == '__main__': - pair_address = '0x7b73644935b8e68019ac6356c40661e1bc315860' - loop = asyncio.get_event_loop() - data = loop.run_until_complete( - get_pair_tokens_price(pair_address, loop), - ) - print(f'\n\n{data}\n') diff --git a/snapshotter/modules/pooler/uniswapv2/trade_volume.py b/snapshotter/modules/pooler/uniswapv2/trade_volume.py deleted file mode 100644 index 2dcd48f5..00000000 --- a/snapshotter/modules/pooler/uniswapv2/trade_volume.py +++ /dev/null @@ -1,89 +0,0 @@ -import time - -from redis import asyncio as aioredis - -from .utils.core import get_pair_trade_volume -from .utils.models.message_models import EpochBaseSnapshot -from .utils.models.message_models import UniswapTradesSnapshot -from snapshotter.utils.callback_helpers import GenericProcessorSnapshot -from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.message_models import PowerloomSnapshotProcessMessage -from snapshotter.utils.rpc import RpcHelper - - -class TradeVolumeProcessor(GenericProcessorSnapshot): - transformation_lambdas = None - - def __init__(self) -> None: - self.transformation_lambdas = [ - self.transform_processed_epoch_to_trade_volume, - ] - self._logger = logger.bind(module='TradeVolumeProcessor') - - async def compute( - self, - epoch: PowerloomSnapshotProcessMessage, - redis_conn: aioredis.Redis, - rpc_helper: RpcHelper, - ): - - min_chain_height = epoch.begin - max_chain_height = epoch.end - - data_source_contract_address = epoch.data_source - - self._logger.debug(f'trade volume {data_source_contract_address}, computation init time {time.time()}') - result = await get_pair_trade_volume( - data_source_contract_address=data_source_contract_address, - min_chain_height=min_chain_height, - max_chain_height=max_chain_height, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - ) - self._logger.debug(f'trade volume {data_source_contract_address}, computation end time {time.time()}') - return result - - def transform_processed_epoch_to_trade_volume( - self, - snapshot, - data_source_contract_address, - epoch_begin, - epoch_end, - ): - self._logger.debug( - 'Trade volume processed snapshot: {}', snapshot, - ) - - # Set effective trade volume at top level - total_trades_in_usd = snapshot['Trades'][ - 'totalTradesUSD' - ] - total_fee_in_usd = snapshot['Trades']['totalFeeUSD'] - total_token0_vol = snapshot['Trades'][ - 'token0TradeVolume' - ] - total_token1_vol = snapshot['Trades'][ - 'token1TradeVolume' - ] - total_token0_vol_usd = snapshot['Trades'][ - 'token0TradeVolumeUSD' - ] - total_token1_vol_usd = snapshot['Trades'][ - 'token1TradeVolumeUSD' - ] - - max_block_timestamp = snapshot.get('timestamp') - snapshot.pop('timestamp', None) - trade_volume_snapshot = UniswapTradesSnapshot( - contract=data_source_contract_address, - chainHeightRange=EpochBaseSnapshot(begin=epoch_begin, end=epoch_end), - timestamp=max_block_timestamp, - totalTrade=float(f'{total_trades_in_usd: .6f}'), - totalFee=float(f'{total_fee_in_usd: .6f}'), - token0TradeVolume=float(f'{total_token0_vol: .6f}'), - token1TradeVolume=float(f'{total_token1_vol: .6f}'), - token0TradeVolumeUSD=float(f'{total_token0_vol_usd: .6f}'), - token1TradeVolumeUSD=float(f'{total_token1_vol_usd: .6f}'), - events=snapshot, - ) - return trade_volume_snapshot diff --git a/snapshotter/modules/pooler/uniswapv2/utils/__init__.py b/snapshotter/modules/pooler/uniswapv2/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/snapshotter/modules/pooler/uniswapv2/utils/constants.py b/snapshotter/modules/pooler/uniswapv2/utils/constants.py deleted file mode 100644 index 44d7f4e1..00000000 --- a/snapshotter/modules/pooler/uniswapv2/utils/constants.py +++ /dev/null @@ -1,86 +0,0 @@ -from web3 import Web3 - -from ..settings.config import settings as worker_settings -from snapshotter.utils.default_logger import logger -from snapshotter.utils.file_utils import read_json_file -from snapshotter.utils.rpc import RpcHelper - -constants_logger = logger.bind(module='PowerLoom|Uniswap|Constants') -# Getting current node - -rpc_helper = RpcHelper() -current_node = rpc_helper.get_current_node() - -# LOAD ABIs -pair_contract_abi = read_json_file( - worker_settings.uniswap_contract_abis.pair_contract, - constants_logger, -) -erc20_abi = read_json_file( - worker_settings.uniswap_contract_abis.erc20, - constants_logger, -) -router_contract_abi = read_json_file( - worker_settings.uniswap_contract_abis.router, - constants_logger, -) -uniswap_trade_events_abi = read_json_file( - worker_settings.uniswap_contract_abis.trade_events, - constants_logger, -) -factory_contract_abi = read_json_file( - worker_settings.uniswap_contract_abis.factory, - constants_logger, -) - - -# Init Uniswap V2 Core contract Objects -router_contract_obj = current_node['web3_client'].eth.contract( - address=Web3.toChecksumAddress( - worker_settings.contract_addresses.iuniswap_v2_router, - ), - abi=router_contract_abi, -) -factory_contract_obj = current_node['web3_client'].eth.contract( - address=Web3.toChecksumAddress( - worker_settings.contract_addresses.iuniswap_v2_factory, - ), - abi=factory_contract_abi, -) -dai_eth_contract_obj = current_node['web3_client'].eth.contract( - address=Web3.toChecksumAddress( - worker_settings.contract_addresses.DAI_WETH_PAIR, - ), - abi=pair_contract_abi, -) -usdc_eth_contract_obj = current_node['web3_client'].eth.contract( - address=Web3.toChecksumAddress( - worker_settings.contract_addresses.USDC_WETH_PAIR, - ), - abi=pair_contract_abi, -) -eth_usdt_contract_obj = current_node['web3_client'].eth.contract( - address=Web3.toChecksumAddress( - worker_settings.contract_addresses.USDT_WETH_PAIR, - ), - abi=pair_contract_abi, -) - - -# FUNCTION SIGNATURES and OTHER CONSTANTS -UNISWAP_TRADE_EVENT_SIGS = { - 'Swap': 'Swap(address,uint256,uint256,uint256,uint256,address)', - 'Mint': 'Mint(address,uint256,uint256)', - 'Burn': 'Burn(address,uint256,uint256,address)', -} -UNISWAP_EVENTS_ABI = { - 'Swap': usdc_eth_contract_obj.events.Swap._get_event_abi(), - 'Mint': usdc_eth_contract_obj.events.Mint._get_event_abi(), - 'Burn': usdc_eth_contract_obj.events.Burn._get_event_abi(), -} -tokens_decimals = { - 'USDT': 6, - 'DAI': 18, - 'USDC': 6, - 'WETH': 18, -} diff --git a/snapshotter/modules/pooler/uniswapv2/utils/core.py b/snapshotter/modules/pooler/uniswapv2/utils/core.py deleted file mode 100644 index 55ebcb46..00000000 --- a/snapshotter/modules/pooler/uniswapv2/utils/core.py +++ /dev/null @@ -1,481 +0,0 @@ -import asyncio -import json - -from redis import asyncio as aioredis -from web3 import Web3 - -from .constants import pair_contract_abi -from .constants import UNISWAP_EVENTS_ABI -from .constants import UNISWAP_TRADE_EVENT_SIGS -from .helpers import get_pair_metadata -from .models.data_models import epoch_event_trade_data -from .models.data_models import event_trade_data -from .models.data_models import trade_data -from .pricing import ( - get_token_price_in_block_range, -) -from snapshotter.utils.default_logger import logger -from snapshotter.utils.rpc import get_contract_abi_dict -from snapshotter.utils.rpc import get_event_sig_and_abi -from snapshotter.utils.rpc import RpcHelper -from snapshotter.utils.snapshot_utils import ( - get_block_details_in_block_range, -) - -core_logger = logger.bind(module='PowerLoom|UniswapCore') - - -async def get_pair_reserves( - pair_address, - from_block, - to_block, - redis_conn: aioredis.Redis, - rpc_helper: RpcHelper, - fetch_timestamp=False, -): - core_logger.debug( - f'Starting pair total reserves query for: {pair_address}', - ) - pair_address = Web3.toChecksumAddress(pair_address) - - if fetch_timestamp: - try: - block_details_dict = await get_block_details_in_block_range( - from_block, - to_block, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - ) - except Exception as err: - core_logger.opt(exception=True).error( - ( - 'Error attempting to get block details of block-range' - ' {}-{}: {}, retrying again' - ), - from_block, - to_block, - err, - ) - raise err - else: - block_details_dict = dict() - - pair_per_token_metadata = await get_pair_metadata( - pair_address=pair_address, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - ) - - core_logger.debug( - ( - 'total pair reserves fetched block details for epoch for:' - f' {pair_address}' - ), - ) - - token0_price_map, token1_price_map = await asyncio.gather( - get_token_price_in_block_range( - token_metadata=pair_per_token_metadata['token0'], - from_block=from_block, - to_block=to_block, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - debug_log=False, - ), - get_token_price_in_block_range( - token_metadata=pair_per_token_metadata['token1'], - from_block=from_block, - to_block=to_block, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - debug_log=False, - ), - ) - - core_logger.debug( - f'Total reserves fetched token prices for: {pair_address}', - ) - - # create dictionary of ABI {function_name -> {signature, abi, input, output}} - pair_abi_dict = get_contract_abi_dict(pair_contract_abi) - # get token price function takes care of its own rate limit - - reserves_array = await rpc_helper.batch_eth_call_on_block_range( - abi_dict=pair_abi_dict, - function_name='getReserves', - contract_address=pair_address, - from_block=from_block, - to_block=to_block, - redis_conn=redis_conn, - ) - - core_logger.debug( - f'Total reserves fetched getReserves results: {pair_address}', - ) - token0_decimals = pair_per_token_metadata['token0']['decimals'] - token1_decimals = pair_per_token_metadata['token1']['decimals'] - - pair_reserves_arr = dict() - block_count = 0 - for block_num in range(from_block, to_block + 1): - token0Amount = ( - reserves_array[block_count][0] / 10 ** int(token0_decimals) - if reserves_array[block_count][0] - else 0 - ) - token1Amount = ( - reserves_array[block_count][1] / 10 ** int(token1_decimals) - if reserves_array[block_count][1] - else 0 - ) - - token0USD = token0Amount * token0_price_map.get(block_num, 0) - token1USD = token1Amount * token1_price_map.get(block_num, 0) - - token0Price = token0_price_map.get(block_num, 0) - token1Price = token1_price_map.get(block_num, 0) - - current_block_details = block_details_dict.get(block_num, None) - timestamp = ( - current_block_details.get( - 'timestamp', - None, - ) - if current_block_details - else None - ) - - pair_reserves_arr[block_num] = { - 'token0': token0Amount, - 'token1': token1Amount, - 'token0USD': token0USD, - 'token1USD': token1USD, - 'token0Price': token0Price, - 'token1Price': token1Price, - 'timestamp': timestamp, - } - block_count += 1 - - core_logger.debug( - ( - 'Calculated pair total reserves for epoch-range:' - f' {from_block} - {to_block} | pair_contract: {pair_address}' - ), - ) - return pair_reserves_arr - - -def extract_trade_volume_log( - event_name, - log, - pair_per_token_metadata, - token0_price_map, - token1_price_map, - block_details_dict, -): - token0_amount = 0 - token1_amount = 0 - token0_amount_usd = 0 - token1_amount_usd = 0 - - def token_native_and_usd_amount(token, token_type, token_price_map): - if log.args.get(token_type) <= 0: - return 0, 0 - - token_amount = log.args.get(token_type) / 10 ** int( - pair_per_token_metadata[token]['decimals'], - ) - token_usd_amount = token_amount * token_price_map.get( - log.get('blockNumber'), 0, - ) - return token_amount, token_usd_amount - - if event_name == 'Swap': - amount0In, amount0In_usd = token_native_and_usd_amount( - token='token0', - token_type='amount0In', - token_price_map=token0_price_map, - ) - amount0Out, amount0Out_usd = token_native_and_usd_amount( - token='token0', - token_type='amount0Out', - token_price_map=token0_price_map, - ) - amount1In, amount1In_usd = token_native_and_usd_amount( - token='token1', - token_type='amount1In', - token_price_map=token1_price_map, - ) - amount1Out, amount1Out_usd = token_native_and_usd_amount( - token='token1', - token_type='amount1Out', - token_price_map=token1_price_map, - ) - - token0_amount = abs(amount0Out - amount0In) - token1_amount = abs(amount1Out - amount1In) - - token0_amount_usd = abs(amount0Out_usd - amount0In_usd) - token1_amount_usd = abs(amount1Out_usd - amount1In_usd) - - elif event_name == 'Mint' or event_name == 'Burn': - token0_amount, token0_amount_usd = token_native_and_usd_amount( - token='token0', - token_type='amount0', - token_price_map=token0_price_map, - ) - token1_amount, token1_amount_usd = token_native_and_usd_amount( - token='token1', - token_type='amount1', - token_price_map=token1_price_map, - ) - - trade_volume_usd = 0 - trade_fee_usd = 0 - - block_details = block_details_dict.get(int(log.get('blockNumber', 0)), {}) - log = json.loads(Web3.toJSON(log)) - log['token0_amount'] = token0_amount - log['token1_amount'] = token1_amount - log['timestamp'] = block_details.get('timestamp', '') - # pop unused log props - log.pop('blockHash', None) - log.pop('transactionIndex', None) - - # if event is 'Swap' then only add single token in total volume calculation - if event_name == 'Swap': - # set one side token value in swap case - if token1_amount_usd and token0_amount_usd: - trade_volume_usd = ( - token1_amount_usd - if token1_amount_usd > token0_amount_usd - else token0_amount_usd - ) - else: - trade_volume_usd = ( - token1_amount_usd if token1_amount_usd else token0_amount_usd - ) - - # calculate uniswap LP fee - trade_fee_usd = ( - token1_amount_usd * 0.003 - if token1_amount_usd - else token0_amount_usd * 0.003 - ) # uniswap LP fee rate - - # set final usd amount for swap - log['trade_amount_usd'] = trade_volume_usd - - return ( - trade_data( - totalTradesUSD=trade_volume_usd, - totalFeeUSD=trade_fee_usd, - token0TradeVolume=token0_amount, - token1TradeVolume=token1_amount, - token0TradeVolumeUSD=token0_amount_usd, - token1TradeVolumeUSD=token1_amount_usd, - ), - log, - ) - - trade_volume_usd = token0_amount_usd + token1_amount_usd - - # set final usd amount for other events - log['trade_amount_usd'] = trade_volume_usd - - return ( - trade_data( - totalTradesUSD=trade_volume_usd, - totalFeeUSD=0.0, - token0TradeVolume=token0_amount, - token1TradeVolume=token1_amount, - token0TradeVolumeUSD=token0_amount_usd, - token1TradeVolumeUSD=token1_amount_usd, - ), - log, - ) - - -# asynchronously get trades on a pair contract -async def get_pair_trade_volume( - data_source_contract_address, - min_chain_height, - max_chain_height, - redis_conn: aioredis.Redis, - rpc_helper: RpcHelper, - fetch_timestamp=True, -): - - data_source_contract_address = Web3.toChecksumAddress( - data_source_contract_address, - ) - block_details_dict = dict() - - if fetch_timestamp: - try: - block_details_dict = await get_block_details_in_block_range( - from_block=min_chain_height, - to_block=max_chain_height, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - ) - except Exception as err: - core_logger.opt(exception=True).error( - ( - 'Error attempting to get block details of to_block {}:' - ' {}, retrying again' - ), - max_chain_height, - err, - ) - raise err - - pair_per_token_metadata = await get_pair_metadata( - pair_address=data_source_contract_address, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - ) - token0_price_map, token1_price_map = await asyncio.gather( - get_token_price_in_block_range( - token_metadata=pair_per_token_metadata['token0'], - from_block=min_chain_height, - to_block=max_chain_height, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - debug_log=False, - ), - get_token_price_in_block_range( - token_metadata=pair_per_token_metadata['token1'], - from_block=min_chain_height, - to_block=max_chain_height, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - debug_log=False, - ), - ) - - # fetch logs for swap, mint & burn - event_sig, event_abi = get_event_sig_and_abi( - UNISWAP_TRADE_EVENT_SIGS, - UNISWAP_EVENTS_ABI, - ) - - events_log = await rpc_helper.get_events_logs( - **{ - 'contract_address': data_source_contract_address, - 'to_block': max_chain_height, - 'from_block': min_chain_height, - 'topics': [event_sig], - 'event_abi': event_abi, - 'redis_conn': redis_conn, - }, - ) - - # group logs by txHashs ==> {txHash: [logs], ...} - grouped_by_tx = dict() - [ - grouped_by_tx[log.transactionHash.hex()].append(log) - if log.transactionHash.hex() in grouped_by_tx - else grouped_by_tx.update({log.transactionHash.hex(): [log]}) - for log in events_log - ] - - # init data models with empty/0 values - epoch_results = epoch_event_trade_data( - Swap=event_trade_data( - logs=[], - trades=trade_data( - totalTradesUSD=float(), - totalFeeUSD=float(), - token0TradeVolume=float(), - token1TradeVolume=float(), - token0TradeVolumeUSD=float(), - token1TradeVolumeUSD=float(), - recent_transaction_logs=list(), - ), - ), - Mint=event_trade_data( - logs=[], - trades=trade_data( - totalTradesUSD=float(), - totalFeeUSD=float(), - token0TradeVolume=float(), - token1TradeVolume=float(), - token0TradeVolumeUSD=float(), - token1TradeVolumeUSD=float(), - recent_transaction_logs=list(), - ), - ), - Burn=event_trade_data( - logs=[], - trades=trade_data( - totalTradesUSD=float(), - totalFeeUSD=float(), - token0TradeVolume=float(), - token1TradeVolume=float(), - token0TradeVolumeUSD=float(), - token1TradeVolumeUSD=float(), - recent_transaction_logs=list(), - ), - ), - Trades=trade_data( - totalTradesUSD=float(), - totalFeeUSD=float(), - token0TradeVolume=float(), - token1TradeVolume=float(), - token0TradeVolumeUSD=float(), - token1TradeVolumeUSD=float(), - recent_transaction_logs=list(), - ), - ) - - # prepare final trade logs structure - for tx_hash, logs in grouped_by_tx.items(): - # init temporary trade object to track trades at txHash level - tx_hash_trades = trade_data( - totalTradesUSD=float(), - totalFeeUSD=float(), - token0TradeVolume=float(), - token1TradeVolume=float(), - token0TradeVolumeUSD=float(), - token1TradeVolumeUSD=float(), - recent_transaction_logs=list(), - ) - # shift Burn logs in end of list to check if equal size of mint already exist - # and then cancel out burn with mint - logs = sorted(logs, key=lambda x: x.event, reverse=True) - - # iterate over each txHash logs - for log in logs: - # fetch trade value fog log - trades_result, processed_log = extract_trade_volume_log( - event_name=log.event, - log=log, - pair_per_token_metadata=pair_per_token_metadata, - token0_price_map=token0_price_map, - token1_price_map=token1_price_map, - block_details_dict=block_details_dict, - ) - - if log.event == 'Swap': - epoch_results.Swap.logs.append(processed_log) - epoch_results.Swap.trades += trades_result - tx_hash_trades += ( - trades_result # swap in single txHash should be added - ) - - elif log.event == 'Mint': - epoch_results.Mint.logs.append(processed_log) - epoch_results.Mint.trades += trades_result - - elif log.event == 'Burn': - epoch_results.Burn.logs.append(processed_log) - epoch_results.Burn.trades += trades_result - - # At the end of txHash logs we must normalize trade values, so it don't affect result of other txHash logs - epoch_results.Trades += abs(tx_hash_trades) - epoch_trade_logs = epoch_results.dict() - max_block_details = block_details_dict.get(max_chain_height, {}) - max_block_timestamp = max_block_details.get('timestamp', None) - epoch_trade_logs.update({'timestamp': max_block_timestamp}) - return epoch_trade_logs diff --git a/snapshotter/modules/pooler/uniswapv2/utils/helpers.py b/snapshotter/modules/pooler/uniswapv2/utils/helpers.py deleted file mode 100644 index 8ace7a59..00000000 --- a/snapshotter/modules/pooler/uniswapv2/utils/helpers.py +++ /dev/null @@ -1,243 +0,0 @@ -import asyncio - -from redis import asyncio as aioredis -from web3 import Web3 - -from ..redis_keys import uniswap_pair_contract_tokens_addresses -from ..redis_keys import uniswap_pair_contract_tokens_data -from ..redis_keys import uniswap_tokens_pair_map -from ..settings.config import settings as worker_settings -from .constants import current_node -from .constants import erc20_abi -from .constants import pair_contract_abi -from snapshotter.utils.default_logger import logger -from snapshotter.utils.rpc import RpcHelper - - -helper_logger = logger.bind(module='PowerLoom|Uniswap|Helpers') - - -def get_maker_pair_data(prop): - prop = prop.lower() - if prop.lower() == 'name': - return 'Maker' - elif prop.lower() == 'symbol': - return 'MKR' - else: - return 'Maker' - - -async def get_pair( - factory_contract_obj, - token0, - token1, - redis_conn: aioredis.Redis, - rpc_helper: RpcHelper, -): - # check if pair cache exists - pair_address_cache = await redis_conn.hget( - uniswap_tokens_pair_map, - f'{Web3.toChecksumAddress(token0)}-{Web3.toChecksumAddress(token1)}', - ) - if pair_address_cache: - pair_address_cache = pair_address_cache.decode('utf-8') - return Web3.toChecksumAddress(pair_address_cache) - - tasks = [ - factory_contract_obj.functions.getPair( - Web3.toChecksumAddress(token0), - Web3.toChecksumAddress(token1), - ), - ] - - result = await rpc_helper.web3_call(tasks, redis_conn=redis_conn) - pair = result[0] - # cache the pair address - await redis_conn.hset( - name=uniswap_tokens_pair_map, - mapping={ - f'{Web3.toChecksumAddress(token0)}-{Web3.toChecksumAddress(token1)}': Web3.toChecksumAddress( - pair, - ), - }, - ) - - return pair - - -async def get_pair_metadata( - pair_address, - redis_conn: aioredis.Redis, - rpc_helper: RpcHelper, -): - """ - returns information on the tokens contained within a pair contract - name, symbol, decimals of token0 and token1 - also returns pair symbol by concatenating {token0Symbol}-{token1Symbol} - """ - try: - pair_address = Web3.toChecksumAddress(pair_address) - - # check if cache exist - ( - pair_token_addresses_cache, - pair_tokens_data_cache, - ) = await asyncio.gather( - redis_conn.hgetall( - uniswap_pair_contract_tokens_addresses.format(pair_address), - ), - redis_conn.hgetall( - uniswap_pair_contract_tokens_data.format(pair_address), - ), - ) - - # parse addresses cache or call eth rpc - token0Addr = None - token1Addr = None - if pair_token_addresses_cache: - token0Addr = Web3.toChecksumAddress( - pair_token_addresses_cache[b'token0Addr'].decode('utf-8'), - ) - token1Addr = Web3.toChecksumAddress( - pair_token_addresses_cache[b'token1Addr'].decode('utf-8'), - ) - else: - pair_contract_obj = current_node['web3_client'].eth.contract( - address=Web3.toChecksumAddress(pair_address), - abi=pair_contract_abi, - ) - token0Addr, token1Addr = await rpc_helper.web3_call( - [ - pair_contract_obj.functions.token0(), - pair_contract_obj.functions.token1(), - ], - redis_conn=redis_conn, - ) - - await redis_conn.hset( - name=uniswap_pair_contract_tokens_addresses.format( - pair_address, - ), - mapping={ - 'token0Addr': token0Addr, - 'token1Addr': token1Addr, - }, - ) - - # token0 contract - token0 = current_node['web3_client'].eth.contract( - address=Web3.toChecksumAddress(token0Addr), - abi=erc20_abi, - ) - # token1 contract - token1 = current_node['web3_client'].eth.contract( - address=Web3.toChecksumAddress(token1Addr), - abi=erc20_abi, - ) - - # parse token data cache or call eth rpc - if pair_tokens_data_cache: - token0_decimals = pair_tokens_data_cache[b'token0_decimals'].decode( - 'utf-8', - ) - token1_decimals = pair_tokens_data_cache[b'token1_decimals'].decode( - 'utf-8', - ) - token0_symbol = pair_tokens_data_cache[b'token0_symbol'].decode( - 'utf-8', - ) - token1_symbol = pair_tokens_data_cache[b'token1_symbol'].decode( - 'utf-8', - ) - token0_name = pair_tokens_data_cache[b'token0_name'].decode('utf-8') - token1_name = pair_tokens_data_cache[b'token1_name'].decode('utf-8') - else: - tasks = list() - - # special case to handle maker token - maker_token0 = None - maker_token1 = None - if Web3.toChecksumAddress( - worker_settings.contract_addresses.MAKER, - ) == Web3.toChecksumAddress(token0Addr): - token0_name = get_maker_pair_data('name') - token0_symbol = get_maker_pair_data('symbol') - maker_token0 = True - else: - tasks.append(token0.functions.name()) - tasks.append(token0.functions.symbol()) - tasks.append(token0.functions.decimals()) - - if Web3.toChecksumAddress( - worker_settings.contract_addresses.MAKER, - ) == Web3.toChecksumAddress(token1Addr): - token1_name = get_maker_pair_data('name') - token1_symbol = get_maker_pair_data('symbol') - maker_token1 = True - else: - tasks.append(token1.functions.name()) - tasks.append(token1.functions.symbol()) - tasks.append(token1.functions.decimals()) - - if maker_token1: - [ - token0_name, - token0_symbol, - token0_decimals, - token1_decimals, - ] = await rpc_helper.web3_call(tasks, redis_conn=redis_conn) - elif maker_token0: - [ - token0_decimals, - token1_name, - token1_symbol, - token1_decimals, - ] = await rpc_helper.web3_call(tasks, redis_conn=redis_conn) - else: - [ - token0_name, - token0_symbol, - token0_decimals, - token1_name, - token1_symbol, - token1_decimals, - ] = await rpc_helper.web3_call(tasks, redis_conn=redis_conn) - - await redis_conn.hset( - name=uniswap_pair_contract_tokens_data.format(pair_address), - mapping={ - 'token0_name': token0_name, - 'token0_symbol': token0_symbol, - 'token0_decimals': token0_decimals, - 'token1_name': token1_name, - 'token1_symbol': token1_symbol, - 'token1_decimals': token1_decimals, - 'pair_symbol': f'{token0_symbol}-{token1_symbol}', - }, - ) - - return { - 'token0': { - 'address': token0Addr, - 'name': token0_name, - 'symbol': token0_symbol, - 'decimals': token0_decimals, - }, - 'token1': { - 'address': token1Addr, - 'name': token1_name, - 'symbol': token1_symbol, - 'decimals': token1_decimals, - }, - 'pair': { - 'symbol': f'{token0_symbol}-{token1_symbol}', - }, - } - except Exception as err: - # this will be retried in next cycle - helper_logger.opt(exception=True).error( - ( - f'RPC error while fetcing metadata for pair {pair_address},' - f' error_msg:{err}' - ), - ) - raise err diff --git a/snapshotter/modules/pooler/uniswapv2/utils/models/data_models.py b/snapshotter/modules/pooler/uniswapv2/utils/models/data_models.py deleted file mode 100644 index 1b8ee2c6..00000000 --- a/snapshotter/modules/pooler/uniswapv2/utils/models/data_models.py +++ /dev/null @@ -1,52 +0,0 @@ -from typing import Dict -from typing import List - -from pydantic import BaseModel - - -class trade_data(BaseModel): - totalTradesUSD: float - totalFeeUSD: float - token0TradeVolume: float - token1TradeVolume: float - token0TradeVolumeUSD: float - token1TradeVolumeUSD: float - - def __add__(self, other: 'trade_data') -> 'trade_data': - self.totalTradesUSD += other.totalTradesUSD - self.totalFeeUSD += other.totalFeeUSD - self.token0TradeVolume += other.token0TradeVolume - self.token1TradeVolume += other.token1TradeVolume - self.token0TradeVolumeUSD += other.token0TradeVolumeUSD - self.token1TradeVolumeUSD += other.token1TradeVolumeUSD - return self - - def __sub__(self, other: 'trade_data') -> 'trade_data': - self.totalTradesUSD -= other.totalTradesUSD - self.totalFeeUSD -= other.totalFeeUSD - self.token0TradeVolume -= other.token0TradeVolume - self.token1TradeVolume -= other.token1TradeVolume - self.token0TradeVolumeUSD -= other.token0TradeVolumeUSD - self.token1TradeVolumeUSD -= other.token1TradeVolumeUSD - return self - - def __abs__(self) -> 'trade_data': - self.totalTradesUSD = abs(self.totalTradesUSD) - self.totalFeeUSD = abs(self.totalFeeUSD) - self.token0TradeVolume = abs(self.token0TradeVolume) - self.token1TradeVolume = abs(self.token1TradeVolume) - self.token0TradeVolumeUSD = abs(self.token0TradeVolumeUSD) - self.token1TradeVolumeUSD = abs(self.token1TradeVolumeUSD) - return self - - -class event_trade_data(BaseModel): - logs: List[Dict] - trades: trade_data - - -class epoch_event_trade_data(BaseModel): - Swap: event_trade_data - Mint: event_trade_data - Burn: event_trade_data - Trades: trade_data diff --git a/snapshotter/modules/pooler/uniswapv2/utils/models/message_models.py b/snapshotter/modules/pooler/uniswapv2/utils/models/message_models.py deleted file mode 100644 index 08d0f763..00000000 --- a/snapshotter/modules/pooler/uniswapv2/utils/models/message_models.py +++ /dev/null @@ -1,115 +0,0 @@ -from typing import Dict -from typing import List - -from pydantic import BaseModel - -from snapshotter.utils.models.message_models import AggregateBase - - -class EpochBaseSnapshot(BaseModel): - begin: int - end: int - - -class SnapshotBase(BaseModel): - contract: str - chainHeightRange: EpochBaseSnapshot - timestamp: float - - -class UniswapPairTotalReservesSnapshot(SnapshotBase): - token0Reserves: Dict[ - str, - float, - ] # block number to corresponding total reserves - token1Reserves: Dict[ - str, - float, - ] # block number to corresponding total reserves - token0ReservesUSD: Dict[str, float] - token1ReservesUSD: Dict[str, float] - token0Prices: Dict[str, float] - token1Prices: Dict[str, float] - - -class logsTradeModel(BaseModel): - logs: List - trades: Dict[str, float] - - -class UniswapTradeEvents(BaseModel): - Swap: logsTradeModel - Mint: logsTradeModel - Burn: logsTradeModel - Trades: Dict[str, float] - - -class UniswapTradesSnapshot(SnapshotBase): - totalTrade: float # in USD - totalFee: float # in USD - token0TradeVolume: float # in token native decimals supply - token1TradeVolume: float # in token native decimals supply - token0TradeVolumeUSD: float - token1TradeVolumeUSD: float - events: UniswapTradeEvents - - -class UniswapTradesAggregateSnapshot(AggregateBase): - totalTrade: float = 0 # in USD - totalFee: float = 0 # in USD - token0TradeVolume: float = 0 # in token native decimals supply - token1TradeVolume: float = 0 # in token native decimals supply - token0TradeVolumeUSD: float = 0 - token1TradeVolumeUSD: float = 0 - complete: bool = True - - -class UniswapTopTokenSnapshot(BaseModel): - name: str - symbol: str - decimals: int - address: str - price: float - priceChange24h: float - volume24h: float - liquidity: float - - -class UniswapTopTokensSnapshot(AggregateBase): - tokens: List[UniswapTopTokenSnapshot] = [] - complete: bool = True - - -class UniswapTopPair24hSnapshot(BaseModel): - name: str - address: str - liquidity: float - volume24h: float - fee24h: float - - -class UniswapTopPairs24hSnapshot(AggregateBase): - pairs: List[UniswapTopPair24hSnapshot] = [] - complete: bool = True - - -class UniswapTopPair7dSnapshot(BaseModel): - name: str - address: str - volume7d: float - fee7d: float - - -class UniswapTopPairs7dSnapshot(AggregateBase): - pairs: List[UniswapTopPair7dSnapshot] = [] - complete: bool = True - - -class UniswapStatsSnapshot(AggregateBase): - volume24h: float = 0 - tvl: float = 0 - fee24h: float = 0 - volumeChange24h: float = 0 - tvlChange24h: float = 0 - feeChange24h: float = 0 - complete: bool = True diff --git a/snapshotter/modules/pooler/uniswapv2/utils/pricing.py b/snapshotter/modules/pooler/uniswapv2/utils/pricing.py deleted file mode 100644 index 71b03866..00000000 --- a/snapshotter/modules/pooler/uniswapv2/utils/pricing.py +++ /dev/null @@ -1,365 +0,0 @@ -import asyncio -import json - -from redis import asyncio as aioredis -from web3 import Web3 - -from ..redis_keys import ( - uniswap_pair_cached_block_height_token_price, -) -from ..redis_keys import ( - uniswap_token_derived_eth_cached_block_height, -) -from ..settings.config import settings as worker_settings -from .constants import factory_contract_obj -from .constants import pair_contract_abi -from .constants import router_contract_abi -from .constants import tokens_decimals -from .helpers import get_pair -from .helpers import get_pair_metadata -from snapshotter.utils.default_logger import logger -from snapshotter.utils.redis.redis_keys import source_chain_epoch_size_key -from snapshotter.utils.rpc import get_contract_abi_dict -from snapshotter.utils.rpc import RpcHelper -from snapshotter.utils.snapshot_utils import get_eth_price_usd - -pricing_logger = logger.bind(module='PowerLoom|Uniswap|Pricing') - - -async def get_token_pair_price_and_white_token_reserves( - pair_address, - from_block, - to_block, - pair_metadata, - white_token, - redis_conn, - rpc_helper: RpcHelper, -): - """ - Function to get: - 1. token price based on pair reserves of both token: token0Price = token1Price/token0Price - 2. whitelisted token reserves - - We can write different function for each value, but to optimize we are reusing reserves value - """ - token_price_dict = dict() - white_token_reserves_dict = dict() - - # get white - pair_abi_dict = get_contract_abi_dict(pair_contract_abi) - pair_reserves_list = await rpc_helper.batch_eth_call_on_block_range( - abi_dict=pair_abi_dict, - function_name='getReserves', - contract_address=pair_address, - from_block=from_block, - to_block=to_block, - redis_conn=redis_conn, - ) - - if len(pair_reserves_list) < to_block - (from_block - 1): - pricing_logger.trace( - ( - 'Unable to get pair price and white token reserves' - 'from_block: {}, to_block: {}, pair_reserves_list: {}' - ), - from_block, - to_block, - pair_reserves_list, - ) - - raise Exception( - 'Unable to get pair price and white token reserves' - f'from_block: {from_block}, to_block: {to_block}, ' - f'got result: {pair_reserves_list}', - ) - - index = 0 - for block_num in range(from_block, to_block + 1): - token_price = 0 - - pair_reserve_token0 = pair_reserves_list[index][0] / 10 ** int( - pair_metadata['token0']['decimals'], - ) - pair_reserve_token1 = pair_reserves_list[index][1] / 10 ** int( - pair_metadata['token1']['decimals'], - ) - - if float(pair_reserve_token0) == float(0) or float( - pair_reserve_token1, - ) == float(0): - token_price_dict[block_num] = token_price - white_token_reserves_dict[block_num] = 0 - elif ( - Web3.toChecksumAddress(pair_metadata['token0']['address']) == - white_token - ): - token_price_dict[block_num] = float( - pair_reserve_token0 / pair_reserve_token1, - ) - white_token_reserves_dict[block_num] = pair_reserve_token0 - else: - token_price_dict[block_num] = float( - pair_reserve_token1 / pair_reserve_token0, - ) - white_token_reserves_dict[block_num] = pair_reserve_token1 - - index += 1 - - return token_price_dict, white_token_reserves_dict - - -async def get_token_derived_eth( - from_block, - to_block, - token_metadata, - redis_conn, - rpc_helper: RpcHelper, -): - token_derived_eth_dict = dict() - token_address = Web3.toChecksumAddress( - token_metadata['address'], - ) - if token_address == Web3.toChecksumAddress(worker_settings.contract_addresses.WETH): - # set derived eth as 1 if token is weth - for block_num in range(from_block, to_block + 1): - token_derived_eth_dict[block_num] = 1 - - return token_derived_eth_dict - - cached_derived_eth_dict = await redis_conn.zrangebyscore( - name=uniswap_token_derived_eth_cached_block_height.format( - token_address, - ), - min=int(from_block), - max=int(to_block), - ) - if cached_derived_eth_dict and len(cached_derived_eth_dict) == to_block - ( - from_block - 1 - ): - token_derived_eth_dict = { - json.loads( - price.decode( - 'utf-8', - ), - )[ - 'blockHeight' - ]: json.loads(price.decode('utf-8'))['price'] - for price in cached_derived_eth_dict - } - return token_derived_eth_dict - # get white - router_abi_dict = get_contract_abi_dict(router_contract_abi) - token_derived_eth_list = await rpc_helper.batch_eth_call_on_block_range( - abi_dict=router_abi_dict, - function_name='getAmountsOut', - contract_address=worker_settings.contract_addresses.iuniswap_v2_router, - from_block=from_block, - to_block=to_block, - redis_conn=redis_conn, - params=[ - 10 ** int(token_metadata['decimals']), - [ - Web3.toChecksumAddress(token_metadata['address']), - Web3.toChecksumAddress(worker_settings.contract_addresses.WETH), - ], - ], - ) - - if len(token_derived_eth_list) < to_block - (from_block - 1): - pricing_logger.trace( - ( - 'Unable to get token derived eth' - 'from_block: {}, to_block: {}, token_derived_eth_list: {}' - ), - from_block, - to_block, - token_derived_eth_list, - ) - - raise Exception( - 'Unable to get token derived eth' - f'from_block: {from_block}, to_block: {to_block}, ' - f'got result: {token_derived_eth_list}', - ) - - index = 0 - for block_num in range(from_block, to_block + 1): - if not token_derived_eth_list[index]: - token_derived_eth_dict[block_num] = 0 - - _, derivedEth = token_derived_eth_list[index][0] - token_derived_eth_dict[block_num] = ( - derivedEth / 10 ** tokens_decimals['WETH'] if derivedEth != 0 else 0 - ) - index += 1 - - if ( - len(token_derived_eth_dict) > 0 - ): - redis_cache_mapping = { - json.dumps({'blockHeight': height, 'price': price}): int( - height, - ) - for height, price in token_derived_eth_dict.items() - } - source_chain_epoch_size = int(await redis_conn.get(source_chain_epoch_size_key())) - await asyncio.gather( - redis_conn.zadd( - name=uniswap_token_derived_eth_cached_block_height.format( - token_address, - ), - # timestamp so zset do not ignore same height on multiple heights - mapping=redis_cache_mapping, - ), - - redis_conn.zremrangebyscore( - name=uniswap_token_derived_eth_cached_block_height.format( - token_address, - ), - min=0, - max=int(from_block) - source_chain_epoch_size * 4, - ), - ) - - return token_derived_eth_dict - - -async def get_token_price_in_block_range( - token_metadata, - from_block, - to_block, - redis_conn: aioredis.Redis, - rpc_helper: RpcHelper, - debug_log=True, -): - """ - returns the price of a token at a given block range - """ - try: - token_price_dict = dict() - token_address = Web3.toChecksumAddress(token_metadata['address']) - # check if cahce exist for given epoch - cached_price_dict = await redis_conn.zrangebyscore( - name=uniswap_pair_cached_block_height_token_price.format( - token_address, - ), - min=int(from_block), - max=int(to_block), - ) - if cached_price_dict and len(cached_price_dict) == to_block - (from_block - 1): - price_dict = { - json.loads( - price.decode( - 'utf-8', - ), - )['blockHeight']: json.loads(price.decode('utf-8'))['price'] for price in cached_price_dict - } - return price_dict - - if token_address == Web3.toChecksumAddress(worker_settings.contract_addresses.WETH): - token_price_dict = await get_eth_price_usd( - from_block=from_block, to_block=to_block, - redis_conn=redis_conn, rpc_helper=rpc_helper, - ) - else: - token_eth_price_dict = dict() - - for white_token in worker_settings.uniswap_v2_whitelist: - white_token = Web3.toChecksumAddress(white_token) - pairAddress = await get_pair( - factory_contract_obj, white_token, token_metadata['address'], - redis_conn, rpc_helper, - ) - if pairAddress != '0x0000000000000000000000000000000000000000': - new_pair_metadata = await get_pair_metadata( - pair_address=pairAddress, - redis_conn=redis_conn, - rpc_helper=rpc_helper, - ) - white_token_metadata = new_pair_metadata['token0'] if white_token == new_pair_metadata[ - 'token0' - ]['address'] else new_pair_metadata['token1'] - - ( - white_token_price_dict, - white_token_reserves_dict, - ) = await get_token_pair_price_and_white_token_reserves( - pair_address=pairAddress, from_block=from_block, to_block=to_block, - pair_metadata=new_pair_metadata, white_token=white_token, redis_conn=redis_conn, - rpc_helper=rpc_helper, - ) - white_token_derived_eth_dict = await get_token_derived_eth( - from_block=from_block, to_block=to_block, token_metadata=white_token_metadata, - redis_conn=redis_conn, rpc_helper=rpc_helper, - ) - - less_than_minimum_liquidity = False - for block_num in range(from_block, to_block + 1): - - white_token_reserves = white_token_reserves_dict.get( - block_num, - ) * white_token_derived_eth_dict.get(block_num) - - # ignore if reservers are less than threshold - if white_token_reserves < 1: - less_than_minimum_liquidity = True - break - - # else store eth price in dictionary - token_eth_price_dict[block_num] = white_token_price_dict.get( - block_num, - ) * white_token_derived_eth_dict.get(block_num) - - # if reserves are less than threshold then try next whitelist token pair - if less_than_minimum_liquidity: - token_eth_price_dict = {} - continue - - break - - if len(token_eth_price_dict) > 0: - eth_usd_price_dict = await get_eth_price_usd( - from_block=from_block, to_block=to_block, redis_conn=redis_conn, - rpc_helper=rpc_helper, - ) - for block_num in range(from_block, to_block + 1): - token_price_dict[block_num] = token_eth_price_dict.get( - block_num, 0, - ) * eth_usd_price_dict.get(block_num, 0) - else: - for block_num in range(from_block, to_block + 1): - token_price_dict[block_num] = 0 - - if debug_log: - pricing_logger.debug( - f"{token_metadata['symbol']}: price is {token_price_dict}" - f' | its eth price is {token_eth_price_dict}', - ) - - # cache price at height - if len(token_price_dict) > 0: - redis_cache_mapping = { - json.dumps({'blockHeight': height, 'price': price}): int( - height, - ) for height, price in token_price_dict.items() - } - - await redis_conn.zadd( - name=uniswap_pair_cached_block_height_token_price.format( - Web3.toChecksumAddress(token_metadata['address']), - ), - mapping=redis_cache_mapping, # timestamp so zset do not ignore same height on multiple heights - ) - - return token_price_dict - - except Exception as err: - pricing_logger.opt(exception=True, lazy=True).trace( - ( - 'Error while calculating price of token:' - f" {token_metadata['symbol']} | {token_metadata['address']}|" - ' err: {err}' - ), - err=lambda: str(err), - ) - raise err diff --git a/snapshotter/process_hub_core.py b/snapshotter/process_hub_core.py index 923524d6..4a7ad327 100644 --- a/snapshotter/process_hub_core.py +++ b/snapshotter/process_hub_core.py @@ -1,10 +1,9 @@ -from datetime import datetime import json -import os +import resource import threading import time -from urllib.parse import urljoin import uuid +from datetime import datetime from multiprocessing import Process from signal import SIGCHLD from signal import SIGINT @@ -14,11 +13,13 @@ from threading import Thread from typing import Dict from typing import Optional -import httpx +from urllib.parse import urljoin +import httpx import psutil import pydantic import redis +from eth_utils.address import to_checksum_address from snapshotter.processor_distributor import ProcessorDistributor from snapshotter.settings.config import settings @@ -28,13 +29,20 @@ from snapshotter.utils.default_logger import logger from snapshotter.utils.delegate_worker import DelegateAsyncWorker from snapshotter.utils.exceptions import SelfExitException +from snapshotter.utils.file_utils import read_json_file from snapshotter.utils.helper_functions import cleanup_proc_hub_children -from snapshotter.utils.models.data_models import ProcessorWorkerDetails, SnapshotterIssue, SnapshotterReportState -from snapshotter.utils.models.data_models import SnapshotWorkerDetails +from snapshotter.utils.models.data_models import ProcessorWorkerDetails +from snapshotter.utils.models.data_models import SnapshotterIssue from snapshotter.utils.models.data_models import SnapshotterPing +from snapshotter.utils.models.data_models import SnapshotterReportState from snapshotter.utils.models.message_models import ProcessHubCommand from snapshotter.utils.rabbitmq_helpers import RabbitmqSelectLoopInteractor +from snapshotter.utils.redis.redis_conn import provide_async_redis_conn from snapshotter.utils.redis.redis_conn import provide_redis_conn +from snapshotter.utils.redis.redis_conn import provide_redis_conn_repsawning_thread +from snapshotter.utils.redis.redis_conn import REDIS_CONN_CONF +from snapshotter.utils.redis.redis_keys import process_hub_core_start_timestamp +from snapshotter.utils.rpc import RpcHelper from snapshotter.utils.snapshot_worker import SnapshotAsyncWorker PROC_STR_ID_TO_CLASS_MAP = { @@ -52,10 +60,22 @@ class ProcessHubCore(Process): + _anchor_rpc_helper: RpcHelper + _redis_connection_pool_sync: redis.BlockingConnectionPool + _redis_conn_sync: redis.Redis + def __init__(self, name, **kwargs): + """ + Initializes a new instance of the ProcessHubCore class. + + Args: + name (str): The name of the process. + **kwargs: Additional keyword arguments to pass to the Process constructor. + """ + Process.__init__(self, name=name, **kwargs) self._spawned_processes_map: Dict[str, Optional[int]] = dict() # process name to pid map - self._spawned_cb_processes_map: Dict[str, Dict[str, Optional[SnapshotWorkerDetails]]] = ( + self._spawned_cb_processes_map: Dict[str, Dict[str, Optional[ProcessorWorkerDetails]]] = ( dict() ) # separate map for callback worker spawns. unique ID -> dict(unique_name, pid) self._httpx_client = httpx.Client( @@ -67,125 +87,17 @@ def __init__(self, name, **kwargs): ), ) self._last_reporting_service_ping = 0 + self._source_chain_block_time = 0 + self._epoch_size = 0 self._thread_shutdown_event = threading.Event() self._shutdown_initiated = False def signal_handler(self, signum, frame): - if signum == SIGCHLD and not self._shutdown_initiated: - pid, status = os.waitpid( - -1, os.WNOHANG | os.WUNTRACED | os.WCONTINUED, - ) - if os.WIFCONTINUED(status) or os.WIFSTOPPED(status): - return - if os.WIFSIGNALED(status) or os.WIFEXITED(status): - self._logger.debug( - ( - 'Received process crash notification for child process' - ' PID: {}' - ), - pid, - ) - callback_worker_module_file = None - callback_worker_class = None - callback_worker_name = None - callback_worker_unique_id = None - for ( - cb_worker_type, - worker_unique_id_entries, - ) in self._spawned_cb_processes_map.items(): - for ( - unique_id, - worker_process_details, - ) in worker_unique_id_entries.items(): - if worker_process_details is not None and worker_process_details.pid == pid: - self._logger.debug( - ( - 'Found crashed child process PID in spawned' - ' callback workers | Callback worker class:' - ' {} | Unique worker identifier: {}' - ), - cb_worker_type, - worker_process_details.unique_name, - ) - callback_worker_name = worker_process_details.unique_name - callback_worker_unique_id = unique_id - callback_worker_class = cb_worker_type - - break - - if ( - callback_worker_name and - callback_worker_unique_id and callback_worker_class - ): - - if callback_worker_class == 'snapshot_workers': - worker_obj: Process = SnapshotAsyncWorker( - name=callback_worker_name, - ) - elif callback_worker_class == 'aggregation_workers': - worker_obj: Process = AggregationAsyncWorker( - name=callback_worker_name, - ) - - elif callback_worker_class == 'delegate_workers': - worker_obj: Process = DelegateAsyncWorker( - name=callback_worker_name, - ) - - worker_obj.start() - self._spawned_cb_processes_map[callback_worker_class][callback_worker_unique_id] = \ - SnapshotWorkerDetails(unique_name=callback_worker_unique_id, pid=worker_obj.pid) - self._logger.debug( - ( - 'Respawned callback worker class {} unique ID {}' - ' with PID {} after receiving crash signal against' - ' PID {}' - ), - callback_worker_class, - callback_worker_unique_id, - worker_obj.pid, - pid, - ) - if settings.reporting.service_url: - send_failure_notifications_sync( - client=self._httpx_client, - message=SnapshotterIssue( - instanceID=settings.instance_id, - issueType=SnapshotterReportState.CRASHED_CHILD_WORKER.value, - projectID='', - epochId='', - timeOfReporting=datetime.now().isoformat(), - extra=json.dumps( - { - 'worker_name': callback_worker_name, - 'pid': pid, - 'worker_class': callback_worker_class, - 'worker_unique_id': callback_worker_unique_id, - 'respawned_pid': worker_obj.pid, - } - ), - ) - ) - return - - for cb_worker_type, worker_pid in self._spawned_processes_map.items(): - if worker_pid is not None and worker_pid == pid: - self._logger.debug('RESPAWNING: process for {}', cb_worker_type) - proc_details: dict = PROC_STR_ID_TO_CLASS_MAP.get(cb_worker_type) - init_kwargs = dict(name=proc_details['name']) - if proc_details.get('class'): - proc_obj = proc_details['class'](**init_kwargs) - proc_obj.start() - else: - proc_obj = Process(target=proc_details['target']) - proc_obj.start() - self._logger.debug( - 'RESPAWNED: process for {} with PID: {}', - cb_worker_type, - proc_obj.pid, - ) - self._spawned_processes_map[cb_worker_type] = proc_obj.pid - elif signum in [SIGINT, SIGTERM, SIGQUIT]: + """ + Handles the specified signal by initiating a shutdown and sending a shutdown signal + to the reporting service. + """ + if signum in [SIGINT, SIGTERM, SIGQUIT]: self._shutdown_initiated = True if settings.reporting.service_url: self._logger.debug('Sending shutdown signal to reporting service') @@ -197,12 +109,21 @@ def signal_handler(self, signum, frame): projectID='', epochId='', timeOfReporting=datetime.now().isoformat(), - ) + ), ) self.rabbitmq_interactor.stop() # raise GenericExitOnSignal def kill_process(self, pid: int): + """ + Terminate a process with the given process ID (pid). + + Args: + pid (int): The process ID of the process to be terminated. + + Returns: + None + """ p = psutil.Process(pid) self._logger.debug( 'Attempting to send SIGTERM to process ID {} for following command', @@ -222,16 +143,25 @@ def kill_process(self, pid: int): for unique_worker_entry in v.values(): if unique_worker_entry is not None and unique_worker_entry.pid == pid: psutil.Process(pid).wait() + break for k, v in self._spawned_processes_map.items(): if v is not None and v == pid: self._logger.debug('Waiting for process ID {} to join...', pid) psutil.Process(pid).wait() self._logger.debug('Process ID {} joined...', pid) + break - @provide_redis_conn + @provide_redis_conn_repsawning_thread def internal_state_reporter(self, redis_conn: redis.Redis = None): - while not self._thread_shutdown_event.wait(timeout=2): + """ + Internal state reporter function that periodically reports the state of spawned processes to Redis + and pings a reporting service. + + Args: + redis_conn (redis.Redis, optional): Redis connection object. Defaults to None. + """ + while not self._thread_shutdown_event.wait(timeout=1): proc_id_map = dict() for k, v in self._spawned_processes_map.items(): if v: @@ -266,6 +196,7 @@ def internal_state_reporter(self, redis_conn: redis.Redis = None): mapping=proc_id_map, ) if settings.reporting.service_url and int(time.time()) - self._last_reporting_service_ping >= 30: + self._last_reporting_service_ping = int(time.time()) try: self._httpx_client.post( url=urljoin(settings.reporting.service_url, '/ping'), @@ -273,12 +204,11 @@ def internal_state_reporter(self, redis_conn: redis.Redis = None): ) except Exception as e: if settings.logs.trace_enabled: - self._logger.opt(exception=True).error('Error while pinging reporting service: {}', e,) + self._logger.opt(exception=True).error('Error while pinging reporting service: {}', e) else: self._logger.error( 'Error while pinging reporting service: {}', e, ) - self._last_reporting_service_ping = int(time.time()) self._logger.error( ( 'Caught thread shutdown notification event. Deleting process' @@ -289,13 +219,85 @@ def internal_state_reporter(self, redis_conn: redis.Redis = None): f'powerloom:snapshotter:{settings.namespace}:{settings.instance_id}:Processes', ) - @cleanup_proc_hub_children - def run(self) -> None: - self._logger = logger.bind(module='Powerloom|ProcessHub|Core') - - for signame in [SIGINT, SIGTERM, SIGQUIT, SIGCHLD]: - signal(signame, self.signal_handler) - + def _kill_all_children(self, core_workers=True): + """ + Terminate all the child processes spawned by the current process. + + Args: + core_workers (bool): If True, terminate all the core workers as well. + """ + self._logger.error('Waiting on spawned callback workers to join...') + for ( + worker_class_name, + unique_worker_entries, + ) in self._spawned_cb_processes_map.items(): + procs = [] + for ( + worker_unique_id, + worker_unique_process_details, + ) in unique_worker_entries.items(): + if worker_unique_process_details is not None and worker_unique_process_details.pid: + self._logger.error( + ( + 'Waiting on spawned callback worker {} | Unique' + ' ID {} | PID {} to join...' + ), + worker_class_name, + worker_unique_id, + worker_unique_process_details.pid, + ) + _ = psutil.Process(pid=worker_unique_process_details.pid) + procs.append(_) + _.terminate() + gone, alive = psutil.wait_procs(procs, timeout=3) + for p in alive: + self._logger.error( + 'Sending SIGKILL to spawned callback worker {} after not exiting on SIGTERM | PID {}', + worker_class_name, + p.pid, + ) + p.kill() + self._spawned_cb_processes_map = dict() + if core_workers: + logger.error( + 'Waiting on spawned core workers to join... {}', + self._spawned_processes_map, + ) + procs = [] + for ( + worker_class_name, + worker_pid, + ) in self._spawned_processes_map.items(): + self._logger.error( + 'spawned Process Pid to wait on {}', + worker_pid, + ) + if worker_pid is not None: + self._logger.error( + ( + 'Waiting on spawned core worker {} | PID {} to' + ' join...' + ), + worker_class_name, + worker_pid, + ) + _ = psutil.Process(worker_pid) + procs.append(_) + _.terminate() + gone, alive = psutil.wait_procs(procs, timeout=3) + for p in alive: + self._logger.error( + 'Sending SIGKILL to spawned core worker after not exiting on SIGTERM | PID {}', + p.pid, + ) + p.kill() + self._spawned_processes_map = dict() + + def _launch_snapshot_cb_workers(self): + """ + Launches snapshot, aggregation and delegate workers based on the configuration specified in the settings. + Each worker is launched as a separate process and its details are stored in the `_spawned_cb_processes_map` dictionary. + """ self._logger.debug('=' * 80) self._logger.debug('Launching Workers') @@ -316,8 +318,8 @@ def run(self) -> None: ) self._logger.debug( ( - 'Process Hub Core launched process {} for snapshot' - ' worker {} with PID: {}' + 'Process Hub Core launched process {} for' + ' worker type {} with PID: {}' ), unique_name, 'snapshot_workers', @@ -341,7 +343,7 @@ def run(self) -> None: self._logger.debug( ( 'Process Hub Core launched process {} for' - ' worker {} with PID: {}' + ' worker type {} with PID: {}' ), unique_name, 'aggregation_workers', @@ -368,16 +370,135 @@ def run(self) -> None: self._logger.debug( ( 'Process Hub Core launched process {} for' - ' worker {} with PID: {}' + ' worker type {} with PID: {}' ), unique_name, 'delegate_workers', delegate_worker_obj.pid, ) + def _launch_core_worker(self, proc_name, proc_init_kwargs=dict()): + """ + Launches a core worker process with the given process name and initialization arguments. + + Args: + proc_name (str): The name of the process to launch. + proc_init_kwargs (dict): The initialization arguments for the process. + + Returns: + None + """ + try: + proc_details: dict = PROC_STR_ID_TO_CLASS_MAP[proc_name] + init_kwargs = dict(name=proc_details['name']) + init_kwargs.update(proc_init_kwargs) + if proc_details.get('class'): + proc_obj = proc_details['class'](**init_kwargs) + proc_obj.start() + else: + proc_obj = Process( + target=proc_details['target'], + kwargs=proc_init_kwargs, + ) + proc_obj.start() + self._logger.debug( + 'Process Hub Core launched process for {} with PID: {}', + proc_name, + proc_obj.pid, + ) + self._spawned_processes_map[proc_name] = proc_obj.pid + except Exception as err: + self._logger.opt(exception=True).error( + 'Error while starting process {} | ' + '{}', + proc_name, + str(err), + ) + + def _respawn_all_children(self): + """ + Kills all existing child processes and launches new ones. + Resets the start time and last epoch processing health check. + """ + self._kill_all_children() + self._launch_all_children() + self._set_start_time() + + def _launch_all_children(self): + """ + Launches all the child processes for the process hub core. + """ + self._logger.debug('=' * 80) + self._logger.debug('Launching Core Workers') + self._launch_snapshot_cb_workers() + for proc_name in PROC_STR_ID_TO_CLASS_MAP.keys(): + self._launch_core_worker(proc_name) + self._launch_snapshot_cb_workers() + + def _set_start_time(self): + self._redis_conn_sync.set( + process_hub_core_start_timestamp(), + str(int(time.time())), + ) + + @cleanup_proc_hub_children + def run(self) -> None: + """ + Runs the Process Hub Core. + + Sets up signal handlers, resource limits, Redis connection pool, Anchor RPC helper, + Protocol State contract, source chain block time, epoch size, snapshot callback workers, + internal state reporter, RabbitMQ consumer, and raises a SelfExitException to exit the process. + """ + self._logger = logger.bind(module='Powerloom|ProcessHub|Core') + + for signame in [SIGINT, SIGTERM, SIGQUIT, SIGCHLD]: + signal(signame, self.signal_handler) + + soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit( + resource.RLIMIT_NOFILE, + (settings.rlimit.file_descriptors, hard), + ) + + self._redis_connection_pool_sync = redis.BlockingConnectionPool(**REDIS_CONN_CONF) + self._redis_conn_sync = redis.Redis(connection_pool=self._redis_connection_pool_sync) + self._anchor_rpc_helper = RpcHelper( + rpc_settings=settings.anchor_chain_rpc, + ) + self._anchor_rpc_helper._load_web3_providers_and_rate_limits() + protocol_abi = read_json_file(settings.protocol_state.abi, self._logger) + self._protocol_state_contract = self._anchor_rpc_helper.get_current_node()['web3_client'].eth.contract( + address=to_checksum_address( + settings.protocol_state.address, + ), + abi=protocol_abi, + ) + try: + source_block_time = self._protocol_state_contract.functions.SOURCE_CHAIN_BLOCK_TIME().call() + except Exception as e: + self._logger.exception( + 'Exception in querying protocol state for source chain block time: {}', + e, + ) + else: + self._source_chain_block_time = source_block_time / 10 ** 4 + self._logger.debug('Set source chain block time to {}', self._source_chain_block_time) + + try: + epoch_size = self._protocol_state_contract.functions.EPOCH_SIZE().call() + except Exception as e: + self._logger.exception( + 'Exception in querying protocol state for epoch size: {}', + e, + ) + else: + self._epoch_size = epoch_size + self._launch_snapshot_cb_workers() self._logger.debug( 'Starting Internal Process State reporter for Process Hub Core...', ) + self._set_start_time() self._reporter_thread = Thread(target=self.internal_state_reporter) self._reporter_thread.start() self._logger.debug('Starting Process Hub Core...') @@ -397,6 +518,10 @@ def run(self) -> None: raise SelfExitException def callback(self, dont_use_ch, method, properties, body): + """ + Callback function that is called when a message is received from the RabbitMQ queue. + Parses the message and performs the appropriate action based on the command received. + """ self.rabbitmq_interactor._channel.basic_ack( delivery_tag=method.delivery_tag, ) @@ -443,7 +568,7 @@ def callback(self, dont_use_ch, method, properties, body): self.kill_process(worker_process_details.pid) self._spawned_cb_processes_map[ cb_worker_type - ][worker_unique_id] = SnapshotWorkerDetails( + ][worker_unique_id] = ProcessorWorkerDetails( unique_name=worker_unique_id, pid=None, ) self._logger.info( @@ -455,39 +580,25 @@ def callback(self, dont_use_ch, method, properties, body): self._spawned_processes_map[proc_str_id] = None elif cmd_json.command == 'start': - try: - self._logger.debug( - 'Process Hub Core received start command: {}', cmd_json, - ) - proc_name = cmd_json.proc_str_id - self._logger.debug( - 'Process Hub Core launching process for {}', proc_name, - ) - proc_details: dict = PROC_STR_ID_TO_CLASS_MAP.get(proc_name) - init_kwargs = dict(name=proc_details['name']) - init_kwargs.update(cmd_json.init_kwargs) - if proc_details.get('class'): - proc_obj = proc_details['class'](**init_kwargs) - proc_obj.start() - else: - proc_obj = Process( - target=proc_details['target'], - kwargs=cmd_json.init_kwargs, - ) - proc_obj.start() - self._logger.debug( - 'Process Hub Core launched process for {} with PID: {}', - proc_name, - proc_obj.pid, + self._logger.debug( + 'Process Hub Core received start command: {}', cmd_json, + ) + proc_name = cmd_json.proc_str_id + if not proc_name: + self._logger.error( + 'Received start command without process name', ) - self._spawned_processes_map[proc_name] = proc_obj.pid - except Exception as err: - self._logger.opt(exception=True).error( - ( - f'Error while starting a process:{cmd_json} |' - f' error_msg: {str(err)}' - ), + return + if proc_name not in PROC_STR_ID_TO_CLASS_MAP.keys(): + self._logger.error( + 'Received unrecognized process name to start: {}', proc_name, ) + return + self._logger.debug( + 'Process Hub Core launching process for {}', proc_name, + ) + self._launch_core_worker(proc_name, cmd_json.init_kwargs) + elif cmd_json.command == 'restart': try: self._logger.debug( @@ -508,6 +619,8 @@ def callback(self, dont_use_ch, method, properties, body): f' error_msg: {str(err)}' ), ) + elif cmd_json.command == 'respawn': + self._respawn_all_children() if __name__ == '__main__': diff --git a/snapshotter/processhub_cmd.py b/snapshotter/processhub_cmd.py index 14337330..e5131d04 100644 --- a/snapshotter/processhub_cmd.py +++ b/snapshotter/processhub_cmd.py @@ -14,50 +14,78 @@ app = typer.Typer() +def process_up(pid): + """ + Is the process up? + :return: True if process is up + """ + p_ = psutil.Process(pid) + return p_.is_running() + # try: + # return os.waitpid(pid, os.WNOHANG) is not None + # except ChildProcessError: # no child processes + # return False + # try: + # call = subprocess.check_output("pidof '{}'".format(self.processName), shell=True) + # return True + # except subprocess.CalledProcessError: + # return False + + @app.command() -def pidStatus(connections: bool = False): - def print_formatted_status(process_name, pid): - try: - process = psutil.Process(pid=pid) - print(f'{pid} -') - print(f'\t name: {process.name()}') - print(f'\t status: {process.status()}') - print(f'\t threads: {process.num_threads()}') - print(f'\t file descriptors: {process.num_fds()}') - print(f'\t memory: {process.memory_info()}') - print(f'\t cpu: {process.cpu_times()}') - print( - f"\t number of connections: {len(process.connections(kind='inet'))}", - ) - if connections: - print( - f"\t number of connections: {process.connections(kind='inet')}", - ) - print('\n') - except Exception as err: - if type(err).__name__ == 'NoSuchProcess': - print(f'{pid} - NoSuchProcess') - print(f'\t name: {process_name}\n') +def processReport(): + """ + This function retrieves process details from Redis cache and prints their running status. + It prints the running status of System Event Detector, Processor Distributor, and Worker Processes. + """ + connection_pool = redis.BlockingConnectionPool(**REDIS_CONN_CONF) + redis_conn = redis.Redis(connection_pool=connection_pool) + map_raw = redis_conn.hgetall( + name=f'powerloom:snapshotter:{settings.namespace}:{settings.instance_id}:Processes', + ) + event_det_pid = map_raw[b'SystemEventDetector'] + print('\n' + '=' * 20 + 'System Event Detector' + '=' * 20) + try: + event_det_pid = int(event_det_pid) + except ValueError: + print('Event detector pid found in process map not a PID: ', event_det_pid) + else: + # event_detector_proc = psutil.Process(event_det_pid) + print('Event detector process running status: ', process_up(event_det_pid)) + + print('\n' + '=' * 20 + 'Worker Processor Distributor' + '=' * 20) + proc_dist_pid = map_raw[b'ProcessorDistributor'] + try: + proc_dist_pid = int(proc_dist_pid) + except ValueError: + print('Processor distributor pid found in process map not a PID: ', proc_dist_pid) + else: + # proc_dist_proc = psutil.Process(proc_dist_pid) + print('Processor distributor process running status: ', process_up(proc_dist_pid)) + + print('\n' + '=' * 20 + 'Worker Processes' + '=' * 20) + cb_worker_map = map_raw[b'callback_workers'] + try: + cb_worker_map = json.loads(cb_worker_map) + except json.JSONDecodeError: + print('Callback worker entries in cache corrupted...', cb_worker_map) + return + for worker_type, worker_details in cb_worker_map.items(): + section_name = worker_type.capitalize() + print('\n' + '*' * 10 + section_name + '*' * 10) + if not worker_details or not isinstance(worker_details, dict): + print(f'No {section_name} workers found in process map: ', worker_details) + continue + for short_id, worker_details in worker_details.items(): + print('\n' + '-' * 5 + short_id + '-' * 5) + proc_pid = worker_details['pid'] + try: + proc_pid = int(proc_pid) + except ValueError: + print(f'Process name {worker_details["id"]} pid found in process map not a PID: ', proc_pid) else: - print(f'Unknown Error: {str(err)}') - - r = redis.Redis(**REDIS_CONN_CONF, single_connection_client=True) - print('\n') - for k, v in r.hgetall( - name=f'powerloom:uniswap:{settings.namespace}:{settings.instance_id}:Processes', - ).items(): - key = k.decode('utf-8') - value = v.decode('utf-8') - - if key == 'callback_workers': - value = json.loads(value) - for i, j in value.items(): - print_formatted_status(j['id'], int(j['pid'])) - elif value.isdigit(): - print_formatted_status(key, int(value)) - else: - print(f'# Unknown type of key:{key}, value:{value}') - print('\n') + # proc = psutil.Process(proc_pid) + print('Process name ' + worker_details['id'] + ' running status: ', process_up(proc_pid)) # https://typer.tiangolo.com/tutorial/commands/context/#configuring-the-context @@ -65,6 +93,16 @@ def print_formatted_status(process_name, pid): context_settings={'allow_extra_args': True, 'ignore_unknown_options': True}, ) def start(ctx: typer.Context, process_str_id: str): + """ + Starts a process with the given process_str_id by sending a command to ProcessHubCore through RabbitMQ. + + Args: + - ctx: typer.Context object + - process_str_id: str, the identifier of the process to be started + + Returns: + - None + """ if process_str_id not in PROC_STR_ID_TO_CLASS_MAP.keys(): typer.secho( 'Unknown Process identifier supplied. Check list with listProcesses command', @@ -98,6 +136,16 @@ def stop( help='Using this flag allows you to pass a process ID instead of the name', ), ): + """ + Stop a process by sending a command to ProcessHubCore. + + Args: + process_str_id (str): The identifier of the process to stop. + pid (bool): If True, process_str_id is interpreted as a process ID instead of a name. + + Returns: + None + """ if not pid: if ( process_str_id not in PROC_STR_ID_TO_CLASS_MAP.keys() and @@ -143,5 +191,23 @@ def stop( ) +@app.command() +def respawn(): + """ + Sends a 'respawn' command to the ProcessHubCore via RabbitMQ. + """ + c = create_rabbitmq_conn() + typer.secho('Opening RabbitMQ channel...', fg=typer.colors.GREEN) + ch = c.channel() + proc_hub_cmd = ProcessHubCommand( + command='respawn', + ) + processhub_command_publish(ch, proc_hub_cmd.json()) + typer.secho( + f'Sent command to ProcessHubCore | Command: {proc_hub_cmd.json()}', + fg=typer.colors.YELLOW, + ) + + if __name__ == '__main__': app() diff --git a/snapshotter/processor_distributor.py b/snapshotter/processor_distributor.py index b7790088..b81ff989 100644 --- a/snapshotter/processor_distributor.py +++ b/snapshotter/processor_distributor.py @@ -1,12 +1,17 @@ import asyncio -import copy import importlib import json import multiprocessing import queue +import resource import time from collections import defaultdict +from datetime import datetime from functools import partial +from signal import SIGINT +from signal import signal +from signal import SIGQUIT +from signal import SIGTERM from typing import Awaitable from typing import Dict from typing import List @@ -33,12 +38,16 @@ from snapshotter.settings.config import settings from snapshotter.utils.callback_helpers import get_rabbitmq_channel from snapshotter.utils.callback_helpers import get_rabbitmq_robust_connection_async +from snapshotter.utils.callback_helpers import send_failure_notifications_async from snapshotter.utils.data_utils import get_projects_list from snapshotter.utils.data_utils import get_snapshot_submision_window from snapshotter.utils.data_utils import get_source_chain_epoch_size from snapshotter.utils.data_utils import get_source_chain_id from snapshotter.utils.default_logger import logger -from snapshotter.utils.models.data_models import PreloaderAsyncFutureDetails +from snapshotter.utils.file_utils import read_json_file +from snapshotter.utils.models.data_models import SnapshotterEpochProcessingReportItem +from snapshotter.utils.models.data_models import SnapshotterIssue +from snapshotter.utils.models.data_models import SnapshotterReportState from snapshotter.utils.models.data_models import SnapshotterStates from snapshotter.utils.models.data_models import SnapshotterStateUpdate from snapshotter.utils.models.data_models import SnapshottersUpdatedEvent @@ -49,21 +58,61 @@ from snapshotter.utils.models.message_models import PowerloomSnapshotFinalizedMessage from snapshotter.utils.models.message_models import PowerloomSnapshotProcessMessage from snapshotter.utils.models.message_models import PowerloomSnapshotSubmittedMessage +from snapshotter.utils.models.message_models import ProcessHubCommand from snapshotter.utils.models.settings_model import AggregateOn from snapshotter.utils.redis.redis_conn import RedisPoolCache from snapshotter.utils.redis.redis_keys import active_status_key from snapshotter.utils.redis.redis_keys import epoch_id_epoch_released_key from snapshotter.utils.redis.redis_keys import epoch_id_project_to_state_mapping +from snapshotter.utils.redis.redis_keys import last_epoch_detected_timestamp_key +from snapshotter.utils.redis.redis_keys import last_snapshot_processing_complete_timestamp_key +from snapshotter.utils.redis.redis_keys import process_hub_core_start_timestamp from snapshotter.utils.redis.redis_keys import project_finalized_data_zset +from snapshotter.utils.redis.redis_keys import project_last_finalized_epoch_key from snapshotter.utils.redis.redis_keys import snapshot_submission_window_key from snapshotter.utils.rpc import RpcHelper +# from snapshotter.utils.data_utils import build_projects_list_from_events class ProcessorDistributor(multiprocessing.Process): _aioredis_pool: RedisPoolCache _redis_conn: aioredis.Redis + _anchor_rpc_helper: RpcHelper + _async_transport: AsyncHTTPTransport + _client: AsyncClient def __init__(self, name, **kwargs): + """ + Initialize the ProcessorDistributor object. + + Args: + name (str): The name of the ProcessorDistributor. + **kwargs: Additional keyword arguments. + + Attributes: + _unique_id (str): The unique ID of the ProcessorDistributor. + _q (queue.Queue): The queue used for processing tasks. + _rabbitmq_interactor: The RabbitMQ interactor object. + _shutdown_initiated (bool): Flag indicating if shutdown has been initiated. + _rpc_helper: The RPC helper object. + _source_chain_id: The source chain ID. + _projects_list: The list of projects. + _consume_exchange_name (str): The name of the exchange for consuming events. + _consume_queue_name (str): The name of the queue for consuming events. + _initialized (bool): Flag indicating if the ProcessorDistributor has been initialized. + _consume_queue_routing_key (str): The routing key for consuming events. + _callback_exchange_name (str): The name of the exchange for callbacks. + _payload_commit_exchange_name (str): The name of the exchange for payload commits. + _payload_commit_routing_key (str): The routing key for payload commits. + _upcoming_project_changes (defaultdict): Dictionary of upcoming project changes. + _preload_completion_conditions (defaultdict): Dictionary of preload completion conditions. + _newly_added_projects (set): Set of newly added projects. + _shutdown_initiated (bool): Flag indicating if shutdown has been initiated. + _all_preload_tasks (set): Set of all preload tasks. + _project_type_config_mapping (dict): Dictionary mapping project types to their configurations. + _last_epoch_processing_health_check (int): Timestamp of the last epoch processing health check. + _preloader_compute_mapping (dict): Dictionary mapping preloader tasks to compute resources. + """ super(ProcessorDistributor, self).__init__(name=name, **kwargs) self._unique_id = f'{name}-' + keccak(text=str(uuid4())).hex()[:8] self._q = queue.Queue() @@ -76,6 +125,9 @@ def __init__(self, name, **kwargs): self._consume_queue_name = ( f'powerloom-event-detector:{settings.namespace}:{settings.instance_id}' ) + + # ... + self._initialized = False self._consume_queue_routing_key = f'powerloom-event-detector:{settings.namespace}:{settings.instance_id}.*' self._callback_exchange_name = ( @@ -84,48 +136,64 @@ def __init__(self, name, **kwargs): self._payload_commit_exchange_name = ( f'{settings.rabbitmq.setup.commit_payload.exchange}:{settings.namespace}' ) - self._payload_commit_routing_key = f'powerloom-backend-commit-payload:{settings.namespace}:{settings.instance_id}.Finalized' + self._payload_commit_routing_key = ( + f'powerloom-backend-commit-payload:{settings.namespace}:{settings.instance_id}.Finalized' + ) - self.projects_config = copy.copy(projects_config) self._upcoming_project_changes = defaultdict(list) - self._preload_completion_conditions: Dict[ - int, Awaitable, - ] = defaultdict(dict) # epoch ID to preloading complete event + self._preload_completion_conditions: Dict[int, Dict] = defaultdict( + dict, + ) # epoch ID to preloading complete event + self._newly_added_projects = set() self._shutdown_initiated = False self._all_preload_tasks = set() self._project_type_config_mapping = dict() - for project_config in self.projects_config: + for project_config in projects_config: self._project_type_config_mapping[project_config.project_type] = project_config for proload_task in project_config.preload_tasks: self._all_preload_tasks.add(proload_task) - + self._last_epoch_processing_health_check = 0 self._preloader_compute_mapping = dict() - self._async_transport = AsyncHTTPTransport( - limits=Limits( - max_connections=100, - max_keepalive_connections=50, - keepalive_expiry=None, - ), - ) - self._client = AsyncClient( - base_url=settings.reporting.service_url, - timeout=Timeout(timeout=5.0), - follow_redirects=False, - transport=self._async_transport, - ) + + def _signal_handler(self, signum, frame): + """ + Signal handler method that cancels the core RMQ consumer when a SIGINT, SIGTERM, or SIGQUIT signal is received. + + Args: + signum (int): The signal number. + frame (frame): The current stack frame at the time the signal was received. + """ + + if signum in [SIGINT, SIGTERM, SIGQUIT]: + self._core_rmq_consumer.cancel() async def _init_redis_pool(self): + """ + Initializes the Redis connection pool and populates it with connections. + """ self._aioredis_pool = RedisPoolCache() await self._aioredis_pool.populate() self._redis_conn = self._aioredis_pool._aioredis_pool async def _init_rpc_helper(self): + """ + Initializes the RpcHelper instance if it is not already initialized. + """ if not self._rpc_helper: self._rpc_helper = RpcHelper() self._anchor_rpc_helper = RpcHelper(rpc_settings=settings.anchor_chain_rpc) async def _init_rabbitmq_connection(self): + """ + Initializes the RabbitMQ connection pool and channel pool. + + The RabbitMQ connection pool is used to manage a pool of connections to the RabbitMQ server, + while the channel pool is used to manage a pool of channels for each connection. + + Returns: + None + """ self._rmq_connection_pool = Pool( get_rabbitmq_robust_connection_async, max_size=20, loop=asyncio.get_event_loop(), @@ -135,7 +203,56 @@ async def _init_rabbitmq_connection(self): loop=asyncio.get_event_loop(), ) + async def _init_httpx_client(self): + """ + Initializes the HTTPX client with the specified settings. + """ + self._async_transport = AsyncHTTPTransport( + limits=Limits( + max_connections=100, + max_keepalive_connections=50, + keepalive_expiry=None, + ), + ) + self._client = AsyncClient( + base_url=settings.reporting.service_url, + timeout=Timeout(timeout=5.0), + follow_redirects=False, + transport=self._async_transport, + ) + + async def _send_proc_hub_respawn(self): + """ + Sends a respawn command to the process hub. + + This method creates a ProcessHubCommand object with the command 'respawn', + acquires a channel from the channel pool, gets the exchange, and publishes + the command message to the exchange. + + Args: + None + + Returns: + None + """ + proc_hub_cmd = ProcessHubCommand( + command='respawn', + ) + async with self._rmq_channel_pool.acquire() as channel: + await channel.set_qos(10) + exchange = await channel.get_exchange( + name=f'{settings.rabbitmq.setup.core.exchange}:{settings.namespace}', + ) + await exchange.publish( + routing_key=f'processhub-commands:{settings.namespace}:{settings.instance_id}', + message=Message(proc_hub_cmd.json().encode('utf-8')), + ) + async def _init_preloader_compute_mapping(self): + """ + Initializes the preloader compute mapping by importing the preloader module and class and + adding it to the mapping dictionary. + """ if self._preloader_compute_mapping: return @@ -146,8 +263,13 @@ async def _init_preloader_compute_mapping(self): self._preloader_compute_mapping[preloader.task_type] = preloader_class async def init_worker(self): + """ + Initializes the worker by initializing the Redis pool, RPC helper, loading project metadata, + initializing the RabbitMQ connection, and initializing the preloader compute mapping. + """ if not self._initialized: await self._init_redis_pool() + await self._init_httpx_client() await self._init_rpc_helper() await self._load_projects_metadata() await self._init_rabbitmq_connection() @@ -155,6 +277,10 @@ async def init_worker(self): self._initialized = True async def _load_projects_metadata(self): + """ + Loads the metadata for the projects, including the source chain ID, the list of projects, and the submission window + for snapshots. It also updates the project type configuration mapping with the relevant projects. + """ if not self._projects_list: with open(settings.protocol_state.abi, 'r') as f: abi_dict = json.load(f) @@ -181,6 +307,30 @@ async def _load_projects_metadata(self): state_contract_obj=protocol_state_contract, ) + # TODO: will be used after full project management overhaul + # using project set for now, keeping empty if not present in contract + + # self._projects_list = await build_projects_list_from_events( + # redis_conn=self._redis_conn, + # rpc_helper=self._anchor_rpc_helper, + # state_contract_obj=protocol_state_contract, + # ) + + # self._logger.info('Generated project list with {} projects', self._projects_list) + + # iterate over project list fetched + for project_type, project_config in self._project_type_config_mapping.items(): + project_type = project_config.project_type + if project_config.projects == []: + relevant_projects = set(filter(lambda x: project_type in x, self._projects_list)) + project_data = set() + for project in relevant_projects: + data_source = project.split(':')[-2] + project_data.add( + data_source, + ) + project_config.projects = list(project_data) + submission_window = await get_snapshot_submision_window( redis_conn=self._redis_conn, rpc_helper=self._anchor_rpc_helper, @@ -193,24 +343,205 @@ async def _load_projects_metadata(self): submission_window, ) - # iterate over project list fetched - for project_config in self.projects_config: - type_ = project_config.project_type - if project_config.projects == []: - relevant_projects = set(filter(lambda x: type_ in x, self._projects_list)) - project_data = [] - for project in relevant_projects: - data_source = project.split(':')[-2] - data_source = '_'.join(to_checksum_address(d) for d in data_source.split('_')) - project_data.append( - data_source, - ) - project_config.projects = project_data + async def _get_proc_hub_start_time(self) -> int: + """ + Retrieves the start time of the process hub core from Redis. + + Returns: + int: The start time of the process hub core, or 0 if not found. + """ + _ = await self._redis_conn.get(process_hub_core_start_timestamp()) + if _: + return int(_) + else: + return 0 + + async def _epoch_processing_health_check(self, current_epoch_id): + """ + Perform health check for epoch processing. + + Args: + current_epoch_id (int): The current epoch ID. + + Returns: + None + """ + # TODO: make the threshold values configurable. + # Range of epochs to be checked, success percentage/criteria, offset from current epoch + if current_epoch_id < 5: + return + # get last set start time by proc hub core + start_time = await self._get_proc_hub_start_time() + + # only start if 5 minutes have passed since proc hub core start time + if int(time.time()) - start_time < 5 * 60: + self._logger.info( + 'Skipping epoch processing health check because 5 minutes have not passed since proc hub core start time', + ) + return + + if start_time == 0: + self._logger.info('Skipping epoch processing health check because proc hub start time is not set') + return + + # only runs once every minute + if self._last_epoch_processing_health_check != 0 and int(time.time()) - self._last_epoch_processing_health_check < 60: + self._logger.debug( + 'Skipping epoch processing health check because it was run less than a minute ago', + ) + return + + if not (self._source_chain_block_time != 0 and self._epoch_size != 0): + self._logger.info( + 'Skipping epoch processing health check because source chain block time or epoch size is not known | ' + 'Source chain block time: {} | Epoch size: {}', + self._source_chain_block_time, + self._epoch_size, + ) + return + self._last_epoch_processing_health_check = int(time.time()) + + last_epoch_detected = await self._redis_conn.get(last_epoch_detected_timestamp_key()) + last_snapshot_processed = await self._redis_conn.get(last_snapshot_processing_complete_timestamp_key()) + + if last_epoch_detected: + last_epoch_detected = int(last_epoch_detected) + + if last_snapshot_processed: + last_snapshot_processed = int(last_snapshot_processed) + + # if no epoch is detected for 30 epochs, report unhealthy and send respawn command + if last_epoch_detected and int(time.time()) - last_epoch_detected > 30 * self._source_chain_block_time * self._epoch_size: + self._logger.debug( + 'Sending unhealthy epoch report to reporting service due to no epoch detected for ~30 epochs', + ) + await send_failure_notifications_async( + client=self._client, + message=SnapshotterIssue( + instanceID=settings.instance_id, + issueType=SnapshotterReportState.UNHEALTHY_EPOCH_PROCESSING.value, + projectID='', + epochId='', + timeOfReporting=datetime.now().isoformat(), + extra=json.dumps( + { + 'last_epoch_detected': last_epoch_detected, + }, + ), + ), + ) + self._logger.info( + 'Sending respawn command for all process hub core children because no epoch was detected for ~30 epochs', + ) + await self._send_proc_hub_respawn() + + # if time difference between last epoch detected and last snapshot processed + # is more than 30 epochs, report unhealthy and send respawn command + if last_epoch_detected and last_snapshot_processed and \ + last_epoch_detected - last_snapshot_processed > 30 * self._source_chain_block_time * self._epoch_size: + self._logger.debug( + 'Sending unhealthy epoch report to reporting service due to no snapshot processing for ~30 epochs', + ) + await send_failure_notifications_async( + client=self._client, + message=SnapshotterIssue( + instanceID=settings.instance_id, + issueType=SnapshotterReportState.UNHEALTHY_EPOCH_PROCESSING.value, + projectID='', + epochId='', + timeOfReporting=datetime.now().isoformat(), + extra=json.dumps( + { + 'last_epoch_detected': last_epoch_detected, + 'last_snapshot_processed': last_snapshot_processed, + }, + ), + ), + ) + self._logger.info( + 'Sending respawn command for all process hub core children because no snapshot processing was done for ~30 epochs', + ) + await self._send_proc_hub_respawn() + + # check for epoch processing status + epoch_health = dict() + # check from previous epoch processing status until 2 further epochs + build_state_val = SnapshotterStates.SNAPSHOT_BUILD.value + for epoch_id in range(current_epoch_id - 1, current_epoch_id - 3 - 1, -1): + epoch_specific_report = SnapshotterEpochProcessingReportItem.construct() + success_percentage = 0 + epoch_specific_report.epochId = epoch_id + state_report_entries = await self._redis_conn.hgetall( + name=epoch_id_project_to_state_mapping(epoch_id=epoch_id, state_id=build_state_val), + ) + if state_report_entries: + project_state_report_entries = { + project_id.decode('utf-8'): SnapshotterStateUpdate.parse_raw(project_state_entry) + for project_id, project_state_entry in state_report_entries.items() + } + epoch_specific_report.transitionStatus[build_state_val] = project_state_report_entries + success_percentage += len( + [ + project_state_report_entry + for project_state_report_entry in project_state_report_entries.values() + if project_state_report_entry.status == 'success' + ], + ) / len(project_state_report_entries) + + if any([x is None for x in epoch_specific_report.transitionStatus.values()]): + epoch_health[epoch_id] = False + self._logger.debug( + 'Marking epoch {} as unhealthy due to missing state reports against transitions {}', + epoch_id, + [x for x, y in epoch_specific_report.transitionStatus.items() if y is None], + ) + if success_percentage < 0.5 and success_percentage != 0: + epoch_health[epoch_id] = False + self._logger.debug( + 'Marking epoch {} as unhealthy due to low success percentage: {}', + epoch_id, + success_percentage, + ) + if len([epoch_id for epoch_id, healthy in epoch_health.items() if not healthy]) >= 2: + self._logger.debug( + 'Sending unhealthy epoch report to reporting service: {}', + epoch_health, + ) + await send_failure_notifications_async( + client=self._client, + message=SnapshotterIssue( + instanceID=settings.instance_id, + issueType=SnapshotterReportState.UNHEALTHY_EPOCH_PROCESSING.value, + projectID='', + epochId='', + timeOfReporting=datetime.now().isoformat(), + extra=json.dumps( + { + 'epoch_health': epoch_health, + }, + ), + ), + ) + self._logger.info( + 'Sending respawn command for all process hub core children because epochs were found unhealthy: {}', epoch_health, + ) + await self._send_proc_hub_respawn() async def _preloader_waiter( self, epoch: EpochBase, ): + """ + Wait for all preloading tasks to complete for the given epoch, and distribute snapshot build tasks if all preloading + dependencies are satisfied. + + Args: + epoch: The epoch for which to wait for preloading tasks to complete. + + Returns: + None + """ + preloader_types_l = list(self._preload_completion_conditions[epoch.epochId].keys()) conditions: List[Awaitable] = [ self._preload_completion_conditions[epoch.epochId][k] @@ -288,8 +619,13 @@ async def _exec_preloaders( self, msg_obj: EpochBase, ): """ - Functions to preload data points required by snapshot builders - This is to save on redundant RPC and cache calls + Executes preloading tasks for the given epoch object. + + Args: + msg_obj (EpochBase): The epoch object for which preloading tasks need to be executed. + + Returns: + None """ # cleanup previous preloading complete tasks and events # start all preload tasks @@ -309,12 +645,13 @@ async def _exec_preloaders( ) f = preloader_obj.compute(**preloader_compute_kwargs) self._preload_completion_conditions[msg_obj.epochId][preloader.task_type] = f - for project_config in self.projects_config: + + for project_type, project_config in self._project_type_config_mapping.items(): if not project_config.preload_tasks: # release for snapshotting asyncio.ensure_future( self._distribute_callbacks_snapshotting( - project_config.project_type, msg_obj, + project_type, msg_obj, ), ) continue @@ -326,6 +663,12 @@ async def _exec_preloaders( ) async def _epoch_release_processor(self, message: IncomingMessage): + """ + This method is called when an epoch is released. It enables pending projects for the epoch and executes preloaders. + + Args: + message (IncomingMessage): The message containing the epoch information. + """ try: msg_obj: EpochBase = ( EpochBase.parse_raw(message.body) @@ -346,8 +689,19 @@ async def _epoch_release_processor(self, message: IncomingMessage): ) asyncio.ensure_future(self._exec_preloaders(msg_obj=msg_obj)) + asyncio.ensure_future(self._epoch_processing_health_check(msg_obj.epochId)) async def _distribute_callbacks_snapshotting(self, project_type: str, epoch: EpochBase): + """ + Distributes callbacks for snapshotting to the appropriate snapshotters based on the project type and epoch. + + Args: + project_type (str): The type of project. + epoch (EpochBase): The epoch to snapshot. + + Returns: + None + """ # send to snapshotters to get the balances of the addresses queuing_tasks = [] @@ -359,6 +713,28 @@ async def _distribute_callbacks_snapshotting(self, project_type: str, epoch: Epo project_config = self._project_type_config_mapping[project_type] + # handling bulk mode projects + if project_config.bulk_mode: + process_unit = PowerloomSnapshotProcessMessage( + begin=epoch.begin, + end=epoch.end, + epochId=epoch.epochId, + bulk_mode=True, + ) + + msg_body = Message(process_unit.json().encode('utf-8')) + await exchange.publish( + routing_key=f'powerloom-backend-callback:{settings.namespace}' + f':{settings.instance_id}:EpochReleased.{project_type}', + message=msg_body, + ) + + self._logger.debug( + 'Sent out message to be processed by worker' + f' {project_type} : {process_unit}', + ) + return + # handling projects with no data sources if project_config.projects is None: project_id = f'{project_type}:{settings.namespace}' if project_id.lower() in self._newly_added_projects: @@ -385,6 +761,7 @@ async def _distribute_callbacks_snapshotting(self, project_type: str, epoch: Epo ) return + # handling projects with data sources for project in project_config.projects: project_id = f'{project_type}:{project}:{settings.namespace}' @@ -433,31 +810,43 @@ async def _distribute_callbacks_snapshotting(self, project_type: str, epoch: Epo ) async def _enable_pending_projects_for_epoch(self, epoch_id) -> Set[str]: + """ + Enables pending projects for the given epoch ID and returns a set of project IDs that were allowed. + + :param epoch_id: The epoch ID for which to enable pending projects. + :type epoch_id: Any + :return: A set of project IDs that were allowed. + :rtype: set + """ pending_project_msgs: List[PowerloomProjectsUpdatedMessage] = self._upcoming_project_changes.pop(epoch_id, []) if not pending_project_msgs: return set() else: for msg_obj in pending_project_msgs: # Update projects list - for project_config in self.projects_config: - type_ = project_config.project_type - if type_ in msg_obj.projectId: + for project_type, project_config in self._project_type_config_mapping.items(): + projects_set = set(project_config.projects) + if project_type in msg_obj.projectId: if project_config.projects is None: continue data_source = msg_obj.projectId.split(':')[-2] - data_source = '_'.join(to_checksum_address(d) for d in data_source.split('_')) if msg_obj.allowed: - project_config.projects.append(data_source) - project_config.projects = list(set(project_config.projects)) + projects_set.add(data_source) else: if data_source in project_config.projects: - items = set(project_config.projects) - items.remove(data_source) - project_config.projects = list(items) + projects_set.discard(data_source) + project_config.projects = list(projects_set) return set([msg.projectId.lower() for msg in pending_project_msgs if msg.allowed]) async def _update_all_projects(self, message: IncomingMessage): + """ + Updates all projects based on the incoming message. + + Args: + message (IncomingMessage): The incoming message containing the project updates. + """ + event_type = message.routing_key.split('.')[-1] if event_type == 'ProjectsUpdated': @@ -470,6 +859,15 @@ async def _update_all_projects(self, message: IncomingMessage): self._upcoming_project_changes[msg_obj.enableEpochId].append(msg_obj) async def _cache_and_forward_to_payload_commit_queue(self, message: IncomingMessage): + """ + Caches the snapshot data and forwards it to the payload commit queue. + + Args: + message (IncomingMessage): The incoming message containing the snapshot data. + + Returns: + None + """ event_type = message.routing_key.split('.')[-1] if event_type == 'SnapshotFinalized': @@ -479,6 +877,12 @@ async def _cache_and_forward_to_payload_commit_queue(self, message: IncomingMess else: return + # set project last finalized epoch in redis + await self._redis_conn.set( + project_last_finalized_epoch_key(msg_obj.projectId), + msg_obj.epochId, + ) + # Add to project finalized data zset await self._redis_conn.zadd( project_finalized_data_zset(project_id=msg_obj.projectId), @@ -522,6 +926,11 @@ async def _cache_and_forward_to_payload_commit_queue(self, message: IncomingMess ) async def _distribute_callbacks_aggregate(self, message: IncomingMessage): + """ + Distributes the callbacks for aggregation. + + :param message: IncomingMessage object containing the message to be processed. + """ event_type = message.routing_key.split('.')[-1] try: if event_type != 'SnapshotSubmitted': @@ -551,7 +960,7 @@ async def _distribute_callbacks_aggregate(self, message: IncomingMessage): name=self._callback_exchange_name, ) for config in aggregator_config: - type_ = config.project_type + task_type = config.project_type if config.aggregate_on == AggregateOn.single_project: if config.filters.projectId not in process_unit.projectId: self._logger.trace(f'projectId mismatch {process_unit.projectId} {config.filters.projectId}') @@ -560,7 +969,7 @@ async def _distribute_callbacks_aggregate(self, message: IncomingMessage): rabbitmq_publish_tasks.append( exchange.publish( routing_key=f'powerloom-backend-callback:{settings.namespace}:' - f'{settings.instance_id}:CalculateAggregate.{type_}', + f'{settings.instance_id}:CalculateAggregate.{task_type}', message=Message(process_unit.json().encode('utf-8')), ), ) @@ -612,7 +1021,7 @@ async def _distribute_callbacks_aggregate(self, message: IncomingMessage): rabbitmq_publish_tasks.append( exchange.publish( routing_key=f'powerloom-backend-callback:{settings.namespace}' - f':{settings.instance_id}:CalculateAggregate.{type_}', + f':{settings.instance_id}:CalculateAggregate.{task_type}', message=Message(final_msg.json().encode('utf-8')), ), ) @@ -633,6 +1042,9 @@ async def _distribute_callbacks_aggregate(self, message: IncomingMessage): await asyncio.gather(*rabbitmq_publish_tasks, return_exceptions=True) async def _cleanup_older_epoch_status(self, epoch_id: int): + """ + Deletes the epoch status keys for the epoch that is 30 epochs older than the given epoch_id. + """ tasks = [self._redis_conn.delete(epoch_id_epoch_released_key(epoch_id - 30))] delete_keys = list() for state in SnapshotterStates: @@ -643,6 +1055,15 @@ async def _cleanup_older_epoch_status(self, epoch_id: int): await asyncio.gather(*tasks, return_exceptions=True) async def _on_rabbitmq_message(self, message: IncomingMessage): + """ + Callback function to handle incoming RabbitMQ messages. + + Args: + message (IncomingMessage): The incoming RabbitMQ message. + + Returns: + None + """ await message.ack() message_type = message.routing_key.split('.')[-1] @@ -684,9 +1105,9 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): ) elif message_type == 'ProjectsUpdated': await self._update_all_projects(message) - elif message_type == 'SnapshottersUpdated': + elif message_type == 'allSnapshottersUpdated': msg_cast = SnapshottersUpdatedEvent.parse_raw(message.body) - if msg_cast.snapshotterAddress == settings.instance_id: + if msg_cast.snapshotterAddress == to_checksum_address(settings.instance_id): if self._redis_conn: await self._redis_conn.set( active_status_key, @@ -704,6 +1125,15 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): await self._redis_conn.close() async def _rabbitmq_consumer(self, loop): + """ + Consume messages from a RabbitMQ queue. + + Args: + loop: The event loop to use for the consumer. + + Returns: + None + """ async with self._rmq_channel_pool.acquire() as channel: await channel.set_qos(10) exchange = await channel.get_exchange( @@ -720,10 +1150,52 @@ async def _rabbitmq_consumer(self, loop): await q_obj.consume(self._on_rabbitmq_message) def run(self) -> None: + """ + Runs the ProcessorDistributor by setting resource limits, registering signal handlers, + initializing the worker, starting the RabbitMQ consumer, and running the event loop. + """ self._logger = logger.bind( module=f'Powerloom|Callbacks|ProcessDistributor:{settings.namespace}-{settings.instance_id}', ) + soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit( + resource.RLIMIT_NOFILE, + (settings.rlimit.file_descriptors, hard), + ) + for signame in [SIGINT, SIGTERM, SIGQUIT]: + signal(signame, self._signal_handler) asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + self._anchor_rpc_helper = RpcHelper( + rpc_settings=settings.anchor_chain_rpc, + ) + self._anchor_rpc_helper._load_web3_providers_and_rate_limits() + protocol_abi = read_json_file(settings.protocol_state.abi, self._logger) + self._protocol_state_contract = self._anchor_rpc_helper.get_current_node()['web3_client'].eth.contract( + address=to_checksum_address( + settings.protocol_state.address, + ), + abi=protocol_abi, + ) + try: + source_block_time = self._protocol_state_contract.functions.SOURCE_CHAIN_BLOCK_TIME().call() + except Exception as e: + self._logger.exception( + 'Exception in querying protocol state for source chain block time: {}', + e, + ) + else: + self._source_chain_block_time = source_block_time / 10 ** 4 + self._logger.debug('Set source chain block time to {}', self._source_chain_block_time) + + try: + epoch_size = self._protocol_state_contract.functions.EPOCH_SIZE().call() + except Exception as e: + self._logger.exception( + 'Exception in querying protocol state for epoch size: {}', + e, + ) + else: + self._epoch_size = epoch_size ev_loop = asyncio.get_event_loop() ev_loop.run_until_complete(self.init_worker()) diff --git a/snapshotter/project_id_generator.py b/snapshotter/project_id_generator.py deleted file mode 100644 index b0c0a8ba..00000000 --- a/snapshotter/project_id_generator.py +++ /dev/null @@ -1,132 +0,0 @@ -import hashlib - -from snapshotter.settings.config import aggregator_config -from snapshotter.settings.config import projects_config -from snapshotter.settings.config import settings -from snapshotter.utils.models.settings_model import AggregateOn - - -def generate_base_project_id(project_type, contract): - """ - Generate the base project ID based on the project type and contract. - - Args: - project_type (str): The type of the project. - contract (str): The contract associated with the project. - - Returns: - str: The generated base project ID. - """ - project_id = f'{project_type}:{contract}:{settings.namespace}' - return project_id - - -def generate_aggregation_single_type_project_id(type_, underlying_project): - """ - Generate the project ID for single project aggregation based on the type and underlying project. - - Args: - type_ (str): The type of the aggregation project. - underlying_project (str): The underlying project for aggregation. - - Returns: - str: The generated project ID for single project aggregation. - """ - contract = underlying_project.split(':')[-2] - project_id = f'{type_}:{contract}:{settings.namespace}' - return project_id - - -def generate_aggregation_multiple_type_project_id(type_, underlying_projects): - """ - Generate the project ID for multiple project aggregation based on the type and underlying projects. - - Args: - type_ (str): The type of the aggregation project. - underlying_projects (list): The list of underlying projects for aggregation. - - Returns: - str: The generated project ID for multiple project aggregation. - """ - unique_project_id = ''.join(sorted(underlying_projects)) - project_hash = hashlib.sha3_256(unique_project_id.encode()).hexdigest() - project_id = f'{type_}:{project_hash}:{settings.namespace}' - return project_id - - -def generate_all_projects(): - """ - Generate all projects based on the configuration. - - Returns: - list: The list of all generated project IDs. - """ - base_projects = [] - - for project_config in projects_config: - project_type = project_config.project_type - contracts = project_config.projects - for contract in contracts: - base_projects.append(generate_base_project_id(project_type, contract.lower())) - - aggregate_projects = [] - - for config in aggregator_config: - type_ = config.project_type - - if config.aggregate_on == AggregateOn.single_project: - for underlying_project in base_projects: - if config.filters.projectId not in underlying_project: - continue - # Adding to base projects because other aggregates might filter based on these ids - base_projects.append(generate_aggregation_single_type_project_id(type_, underlying_project)) - - elif config.aggregate_on == AggregateOn.multi_project: - aggregate_projects.append(generate_aggregation_multiple_type_project_id(type_, config.projects_to_wait_for)) - - total_projects = base_projects + aggregate_projects - return total_projects - - -def generate_projects_string(project_ids): - """ - Generate a string representation of project IDs. - - Args: - project_ids (list): The list of project IDs. - - Returns: - str: The string representation of project IDs. - """ - projects_string = '[' + ','.join('"' + project + '"' for project in project_ids) + ']' - return projects_string - - -def generate_enable_string(project_ids): - """ - Generate a string representation of project enable status. - - Args: - project_ids (list): The list of project IDs. - - Returns: - str: The string representation of project enable status. - """ - enable_string = '[' + ','.join('true' for _ in project_ids) + ']' - return enable_string - - -if __name__ == '__main__': - # Generate all projects - all_projects = generate_all_projects() - - # Print the total number of projects - print(len(all_projects)) - - # Generate and print the projects string - projects_string = generate_projects_string(all_projects) - print(projects_string) - - # Generate and print the enable string - enable_string = generate_enable_string(all_projects) - print(enable_string) diff --git a/snapshotter/protocol_state_loader_exporter.py b/snapshotter/protocol_state_loader_exporter.py index 4eaea510..f055a11e 100644 --- a/snapshotter/protocol_state_loader_exporter.py +++ b/snapshotter/protocol_state_loader_exporter.py @@ -112,11 +112,23 @@ async def _load_finalized_cids_from_contract(self, project_id, epoch_id_list, se return eid_cid_map async def _init_redis_pool(self): + """ + Initializes the Redis connection pool and populates it with connections. + """ self._aioredis_pool = RedisPoolCache(pool_size=1000) await self._aioredis_pool.populate() self._redis_conn = self._aioredis_pool._aioredis_pool async def _init_rpc_helper(self): + """ + Initializes the RPC helper and sets the protocol state contract. + + This method creates an instance of RpcHelper using the specified RPC settings. + It also reads the protocol ABI from the settings and sets the protocol state contract. + + Returns: + None + """ self._anchor_rpc_helper = RpcHelper(rpc_settings=settings.anchor_chain_rpc) protocol_abi = read_json_file(settings.protocol_state.abi, self._logger) self._protocol_state_contract = self._anchor_rpc_helper.get_current_node()['web3_client'].eth.contract( @@ -127,6 +139,10 @@ async def _init_rpc_helper(self): ) async def init(self): + """ + Initializes the object by setting up the logger, initializing the Redis pool, + initializing the RPC helper, and creating a bounded semaphore for protocol state queries. + """ self._logger = logger.bind( module=f'Powerloom|ProtocolStateLoader|{settings.namespace}-{settings.instance_id[:5]}', ) @@ -135,6 +151,13 @@ async def init(self): self._protocol_state_query_semaphore = asyncio.BoundedSemaphore(10) async def prelim_load(self): + """ + Performs preliminary loading of protocol state data. + + Returns: + Tuple: A tuple containing the current epoch ID, a dictionary mapping project IDs to their first epoch IDs, + and a list of all project IDs. + """ await self.init() state_query_call_tasks = [] cur_epoch_id_task = self._protocol_state_contract.functions.currentEpoch() @@ -165,7 +188,18 @@ async def prelim_load(self): return cur_epoch_id, project_id_first_epoch_id_map, all_project_ids def _export_project_state(self, project_id, first_epoch_id, end_epoch_id, redis_conn: redis.Redis) -> ProjectSpecificState: + """ + Export the project state for a specific project. + + Args: + project_id (str): The ID of the project. + first_epoch_id (int): The ID of the first epoch. + end_epoch_id (int): The ID of the last epoch. + redis_conn (redis.Redis): The Redis connection. + Returns: + ProjectSpecificState: The exported project state. + """ self._logger.debug('Exporting project state for {}', project_id) project_state = ProjectSpecificState.construct() project_state.first_epoch_id = first_epoch_id @@ -190,6 +224,16 @@ def _export_project_state(self, project_id, first_epoch_id, end_epoch_id, redis_ return project_state def export(self): + """ + Export the protocol state to a compressed JSON file. + + This method runs the preliminary load, retrieves the current epoch ID, project ID mapping, + and all project IDs. It then exports the project-specific states and finalized CIDs for each project. + The exported state is saved as a compressed JSON file named 'state.json.bz2'. + + Returns: + None + """ asyncio.get_event_loop().run_until_complete(self.prelim_load()) state = ProtocolState.construct() r = redis.Redis(**REDIS_CONN_CONF, max_connections=20, decode_responses=True) @@ -238,6 +282,17 @@ def export(self): self._logger.info('Exported state.json.bz2') def _load_project_state(self, project_id, project_state: ProjectSpecificState, redis_conn: redis.Redis): + """ + Loads the project state for a specific project. + + Args: + project_id (str): The ID of the project. + project_state (ProjectSpecificState): The project-specific state object. + redis_conn (redis.Redis): The Redis connection object. + + Returns: + None + """ self._logger.debug('Loading project state for {}', project_id) redis_conn.hset(project_first_epoch_hmap(), project_id, project_state.first_epoch_id) self._logger.debug('Loaded first epoch ID {} for project {}', project_state.first_epoch_id, project_id) @@ -252,6 +307,12 @@ def _load_project_state(self, project_id, project_state: ProjectSpecificState, r self._logger.debug('Loaded {} finalized CIDs for project {}', s, project_id) def load(self, file_name='state.json.bz2'): + """ + Loads the protocol state from a file. + + Args: + file_name (str): The name of the file to load the state from. Default is 'state.json.bz2'. + """ asyncio.get_event_loop().run_until_complete(self.init()) r = redis.Redis(**REDIS_CONN_CONF, max_connections=20, decode_responses=True) self._logger.debug('Loading state from file {}', file_name) diff --git a/snapshotter/snapshotter_id_ping.py b/snapshotter/snapshotter_id_ping.py index 9455a348..079afdf6 100644 --- a/snapshotter/snapshotter_id_ping.py +++ b/snapshotter/snapshotter_id_ping.py @@ -1,6 +1,9 @@ import asyncio import sys + +from eth_utils.address import to_checksum_address from web3 import Web3 + from snapshotter.auth.helpers.redis_conn import RedisPoolCache from snapshotter.settings.config import settings from snapshotter.utils.file_utils import read_json_file @@ -9,6 +12,11 @@ async def main(): + """ + Checks if snapshotting is allowed for the given instance ID by querying the protocol state contract. + If snapshotting is allowed, sets the active status key in Redis to True and exits with code 0. + If snapshotting is not allowed, sets the active status key in Redis to False and exits with code 1. + """ aioredis_pool = RedisPoolCache(pool_size=1000) await aioredis_pool.populate() redis_conn = aioredis_pool._aioredis_pool @@ -22,16 +30,16 @@ async def main(): ) snapshotters_arr_query = await anchor_rpc.web3_call( [ - protocol_state_contract.functions.getAllSnapshotters(), + protocol_state_contract.functions.getSnapshotters(), ], - redis_conn + redis_conn, ) allowed_snapshotters = snapshotters_arr_query[0] - if settings.instance_id in allowed_snapshotters: + if to_checksum_address(settings.instance_id) in allowed_snapshotters: print('Snapshotting allowed...') await redis_conn.set( active_status_key, - int(True) + int(True), ) sys.exit(0) else: diff --git a/snapshotter/static/abis/ProtocolContract.json b/snapshotter/static/abis/ProtocolContract.json index c1cd5170..9ff03992 100644 --- a/snapshotter/static/abis/ProtocolContract.json +++ b/snapshotter/static/abis/ProtocolContract.json @@ -15,6 +15,11 @@ "internalType": "uint256", "name": "sourceChainBlockTime", "type": "uint256" + }, + { + "internalType": "bool", + "name": "useBlockNumberAsEpochId", + "type": "bool" } ], "stateMutability": "nonpayable", @@ -110,24 +115,6 @@ "name": "EpochReleased", "type": "event" }, - { - "inputs": [ - { - "internalType": "string", - "name": "projectId", - "type": "string" - }, - { - "internalType": "uint256", - "name": "epochId", - "type": "uint256" - } - ], - "name": "forceCompleteConsensusSnapshot", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, { "anonymous": false, "inputs": [ @@ -172,31 +159,6 @@ "name": "ProjectsUpdated", "type": "event" }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "begin", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "end", - "type": "uint256" - } - ], - "name": "releaseEpoch", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "renounceOwnership", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, { "anonymous": false, "inputs": [ @@ -277,7 +239,7 @@ { "indexed": false, "internalType": "address", - "name": "snapshotterAddress", + "name": "validatorAddress", "type": "address" }, { @@ -287,175 +249,161 @@ "type": "bool" } ], - "name": "SnapshottersUpdated", + "name": "ValidatorsUpdated", "type": "event" }, { + "anonymous": false, "inputs": [ { - "internalType": "string", - "name": "snapshotCid", - "type": "string" - }, - { - "internalType": "uint256", - "name": "epochId", - "type": "uint256" - }, - { - "internalType": "string", - "name": "projectId", - "type": "string" - }, - { - "components": [ - { - "internalType": "uint256", - "name": "deadline", - "type": "uint256" - }, - { - "internalType": "string", - "name": "snapshotCid", - "type": "string" - }, - { - "internalType": "uint256", - "name": "epochId", - "type": "uint256" - }, - { - "internalType": "string", - "name": "projectId", - "type": "string" - } - ], - "internalType": "struct PowerloomProtocolState.Request", - "name": "request", - "type": "tuple" + "indexed": false, + "internalType": "address", + "name": "snapshotterAddress", + "type": "address" }, { - "internalType": "bytes", - "name": "signature", - "type": "bytes" + "indexed": false, + "internalType": "bool", + "name": "allowed", + "type": "bool" } ], - "name": "submitSnapshot", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" + "name": "allSnapshottersUpdated", + "type": "event" }, { + "anonymous": false, "inputs": [ { + "indexed": false, "internalType": "address", - "name": "newOwner", + "name": "snapshotterAddress", "type": "address" + }, + { + "indexed": false, + "internalType": "bool", + "name": "allowed", + "type": "bool" } ], - "name": "transferOwnership", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" + "name": "masterSnapshottersUpdated", + "type": "event" }, { + "anonymous": false, "inputs": [ { + "indexed": false, + "internalType": "string", + "name": "projectId", + "type": "string" + }, + { + "indexed": false, + "internalType": "bool", + "name": "allowed", + "type": "bool" + }, + { + "indexed": false, "internalType": "uint256", - "name": "_minSubmissionsForConsensus", + "name": "enableEpochId", "type": "uint256" } ], - "name": "updateMinSnapshottersForConsensus", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" + "name": "pretestProjectsUpdated", + "type": "event" }, { - "inputs": [ - { - "internalType": "string[]", - "name": "_projects", - "type": "string[]" - }, + "inputs": [], + "name": "EPOCH_SIZE", + "outputs": [ { - "internalType": "bool[]", - "name": "_status", - "type": "bool[]" + "internalType": "uint8", + "name": "", + "type": "uint8" } ], - "name": "updateProjects", - "outputs": [], - "stateMutability": "nonpayable", + "stateMutability": "view", "type": "function" }, { - "inputs": [ + "inputs": [], + "name": "SOURCE_CHAIN_BLOCK_TIME", + "outputs": [ { "internalType": "uint256", - "name": "newsnapshotSubmissionWindow", + "name": "", "type": "uint256" } ], - "name": "updateSnapshotSubmissionWindow", - "outputs": [], - "stateMutability": "nonpayable", + "stateMutability": "view", "type": "function" }, { - "inputs": [ + "inputs": [], + "name": "SOURCE_CHAIN_ID", + "outputs": [ { - "internalType": "address[]", - "name": "_snapshotters", - "type": "address[]" - }, + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "USE_BLOCK_NUMBER_AS_EPOCH_ID", + "outputs": [ { - "internalType": "bool[]", - "name": "_status", - "type": "bool[]" + "internalType": "bool", + "name": "", + "type": "bool" } ], - "name": "updateSnapshotters", - "outputs": [], - "stateMutability": "nonpayable", + "stateMutability": "view", "type": "function" }, { "inputs": [ { - "internalType": "address[]", - "name": "_validators", - "type": "address[]" - }, + "internalType": "string", + "name": "", + "type": "string" + } + ], + "name": "allProjects", + "outputs": [ { - "internalType": "bool[]", - "name": "_status", - "type": "bool[]" + "internalType": "bool", + "name": "", + "type": "bool" } ], - "name": "updateValidators", - "outputs": [], - "stateMutability": "nonpayable", + "stateMutability": "view", "type": "function" }, { - "anonymous": false, "inputs": [ { - "indexed": false, "internalType": "address", - "name": "validatorAddress", + "name": "", "type": "address" - }, + } + ], + "name": "allSnapshotters", + "outputs": [ { - "indexed": false, "internalType": "bool", - "name": "allowed", + "name": "", "type": "bool" } ], - "name": "ValidatorsUpdated", - "type": "event" + "stateMutability": "view", + "type": "function" }, { "inputs": [ @@ -505,29 +453,53 @@ "type": "function" }, { - "inputs": [], - "name": "eip712Domain", - "outputs": [ - { - "internalType": "bytes1", - "name": "fields", - "type": "bytes1" - }, - { - "internalType": "string", - "name": "name", - "type": "string" - }, + "inputs": [ { "internalType": "string", - "name": "version", + "name": "", "type": "string" }, { "internalType": "uint256", - "name": "chainId", + "name": "", "type": "uint256" - }, + } + ], + "name": "currentFinalizedSnapshot", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "eip712Domain", + "outputs": [ + { + "internalType": "bytes1", + "name": "fields", + "type": "bytes1" + }, + { + "internalType": "string", + "name": "name", + "type": "string" + }, + { + "internalType": "string", + "name": "version", + "type": "string" + }, + { + "internalType": "uint256", + "name": "chainId", + "type": "uint256" + }, { "internalType": "address", "name": "verifyingContract", @@ -548,13 +520,29 @@ "type": "function" }, { - "inputs": [], - "name": "EPOCH_SIZE", - "outputs": [ + "inputs": [ { - "internalType": "uint8", + "internalType": "uint256", "name": "", - "type": "uint8" + "type": "uint256" + } + ], + "name": "epochInfo", + "outputs": [ + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "blocknumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "epochEnd", + "type": "uint256" } ], "stateMutability": "view", @@ -562,36 +550,80 @@ }, { "inputs": [ + { + "internalType": "string", + "name": "projectId", + "type": "string" + }, { "internalType": "uint256", - "name": "", + "name": "epochId", "type": "uint256" } ], - "name": "epochInfo", - "outputs": [ + "name": "forceCompleteConsensusSnapshot", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ { "internalType": "uint256", - "name": "timestamp", + "name": "begin", "type": "uint256" }, { "internalType": "uint256", - "name": "blocknumber", + "name": "end", + "type": "uint256" + } + ], + "name": "forceSkipEpoch", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "getBlock", + "outputs": [ + { + "internalType": "uint256", + "name": "", "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "string", + "name": "projectId", + "type": "string" }, { "internalType": "uint256", - "name": "epochEnd", + "name": "epochId", "type": "uint256" } ], + "name": "getFinalizedSnapshot", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], "stateMutability": "view", "type": "function" }, { "inputs": [], - "name": "getAllSnapshotters", + "name": "getMasterSnapshotters", "outputs": [ { "internalType": "address[]", @@ -602,6 +634,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getPretestProjects", + "outputs": [ + { + "internalType": "string[]", + "name": "", + "type": "string[]" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "getProjects", @@ -615,6 +660,32 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "getSnapshotters", + "outputs": [ + { + "internalType": "address[]", + "name": "", + "type": "address[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalMasterSnapshotterCount", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "getTotalSnapshotterCount", @@ -660,6 +731,25 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "name": "masterSnapshotters", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -742,12 +832,12 @@ "type": "string" } ], - "name": "projectFirstEpochId", + "name": "pretestProjects", "outputs": [ { - "internalType": "uint256", + "internalType": "bool", "name": "", - "type": "uint256" + "type": "bool" } ], "stateMutability": "view", @@ -761,12 +851,12 @@ "type": "string" } ], - "name": "projects", + "name": "projectFirstEpochId", "outputs": [ { - "internalType": "bool", + "internalType": "uint256", "name": "", - "type": "bool" + "type": "uint256" } ], "stateMutability": "view", @@ -796,6 +886,73 @@ "stateMutability": "pure", "type": "function" }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "begin", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "end", + "type": "uint256" + } + ], + "name": "releaseEpoch", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "renounceOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + }, + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "name": "snapshotStatus", + "outputs": [ + { + "internalType": "bool", + "name": "finalized", + "type": "bool" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "snapshotSubmissionWindow", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [ { @@ -858,87 +1015,184 @@ "inputs": [ { "internalType": "string", - "name": "", + "name": "snapshotCid", "type": "string" }, { "internalType": "uint256", - "name": "", + "name": "epochId", "type": "uint256" - } - ], - "name": "snapshotStatus", - "outputs": [ + }, { - "internalType": "bool", - "name": "finalized", - "type": "bool" + "internalType": "string", + "name": "projectId", + "type": "string" }, { - "internalType": "uint256", - "name": "timestamp", - "type": "uint256" + "components": [ + { + "internalType": "uint256", + "name": "deadline", + "type": "uint256" + }, + { + "internalType": "string", + "name": "snapshotCid", + "type": "string" + }, + { + "internalType": "uint256", + "name": "epochId", + "type": "uint256" + }, + { + "internalType": "string", + "name": "projectId", + "type": "string" + } + ], + "internalType": "struct PowerloomProtocolState.Request", + "name": "request", + "type": "tuple" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" } ], - "stateMutability": "view", + "name": "submitSnapshot", + "outputs": [], + "stateMutability": "nonpayable", "type": "function" }, { - "inputs": [], - "name": "snapshotSubmissionWindow", - "outputs": [ + "inputs": [ { - "internalType": "uint256", - "name": "", - "type": "uint256" + "internalType": "address", + "name": "newOwner", + "type": "address" } ], - "stateMutability": "view", + "name": "transferOwnership", + "outputs": [], + "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { - "internalType": "address", - "name": "", - "type": "address" + "internalType": "string[]", + "name": "_projects", + "type": "string[]" + }, + { + "internalType": "bool[]", + "name": "_status", + "type": "bool[]" } ], - "name": "snapshotters", - "outputs": [ + "name": "updateAllProjects", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ { - "internalType": "bool", - "name": "", - "type": "bool" + "internalType": "address[]", + "name": "_snapshotters", + "type": "address[]" + }, + { + "internalType": "bool[]", + "name": "_status", + "type": "bool[]" } ], - "stateMutability": "view", + "name": "updateAllSnapshotters", + "outputs": [], + "stateMutability": "nonpayable", "type": "function" }, { - "inputs": [], - "name": "SOURCE_CHAIN_BLOCK_TIME", - "outputs": [ + "inputs": [ + { + "internalType": "address[]", + "name": "_snapshotters", + "type": "address[]" + }, + { + "internalType": "bool[]", + "name": "_status", + "type": "bool[]" + } + ], + "name": "updateMasterSnapshotters", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ { "internalType": "uint256", - "name": "", + "name": "_minSubmissionsForConsensus", "type": "uint256" } ], - "stateMutability": "view", + "name": "updateMinSnapshottersForConsensus", + "outputs": [], + "stateMutability": "nonpayable", "type": "function" }, { - "inputs": [], - "name": "SOURCE_CHAIN_ID", - "outputs": [ + "inputs": [ + { + "internalType": "string[]", + "name": "_projects", + "type": "string[]" + }, + { + "internalType": "bool[]", + "name": "_status", + "type": "bool[]" + } + ], + "name": "updatePretestProjects", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ { "internalType": "uint256", - "name": "", + "name": "newsnapshotSubmissionWindow", "type": "uint256" } ], - "stateMutability": "view", + "name": "updateSnapshotSubmissionWindow", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "_validators", + "type": "address[]" + }, + { + "internalType": "bool[]", + "name": "_status", + "type": "bool[]" + } + ], + "name": "updateValidators", + "outputs": [], + "stateMutability": "nonpayable", "type": "function" }, { diff --git a/snapshotter/static/docs/assets/OverallArchitecture.png b/snapshotter/static/docs/assets/OverallArchitecture.png index a962f729..ff4c22f0 100644 Binary files a/snapshotter/static/docs/assets/OverallArchitecture.png and b/snapshotter/static/docs/assets/OverallArchitecture.png differ diff --git a/snapshotter/static/docs/assets/SnapshotterArchitecture.png b/snapshotter/static/docs/assets/SnapshotterArchitecture.png new file mode 100644 index 00000000..cc35eae8 Binary files /dev/null and b/snapshotter/static/docs/assets/SnapshotterArchitecture.png differ diff --git a/snapshotter/static/docs/assets/SnapshotterSwaggerUI.png b/snapshotter/static/docs/assets/SnapshotterSwaggerUI.png new file mode 100644 index 00000000..e2e10d30 Binary files /dev/null and b/snapshotter/static/docs/assets/SnapshotterSwaggerUI.png differ diff --git a/snapshotter/system_event_detector.py b/snapshotter/system_event_detector.py index fdace168..e62b15a5 100644 --- a/snapshotter/system_event_detector.py +++ b/snapshotter/system_event_detector.py @@ -2,6 +2,7 @@ import json import multiprocessing import queue +import resource import signal import sys import threading @@ -25,15 +26,22 @@ from snapshotter.utils.rabbitmq_helpers import RabbitmqThreadedSelectLoopInteractor from snapshotter.utils.redis.redis_conn import RedisPoolCache from snapshotter.utils.redis.redis_keys import event_detector_last_processed_block +from snapshotter.utils.redis.redis_keys import last_epoch_detected_timestamp_key from snapshotter.utils.rpc import get_event_sig_and_abi from snapshotter.utils.rpc import RpcHelper def rabbitmq_and_redis_cleanup(fn): """ - A decorator that wraps the provided function and handles cleaning up RabbitMQ and Redis resources before exiting. - """ + A decorator function that wraps the given function and handles cleanup of RabbitMQ and Redis connections in case of + a GenericExitOnSignal or KeyboardInterrupt exception. + + Args: + fn: The function to be wrapped. + Returns: + The wrapped function. + """ @wraps(fn) def wrapper(self, *args, **kwargs): try: @@ -74,10 +82,28 @@ class EventDetectorProcess(multiprocessing.Process): def __init__(self, name, **kwargs): """ - Initializes a new instance of the `EpochDetectorProcess` class. - - Arguments: - name -- the name of the process + Initializes the SystemEventDetector class. + + Args: + name (str): The name of the process. + **kwargs: Additional keyword arguments to be passed to the multiprocessing.Process class. + + Attributes: + _rabbitmq_thread (threading.Thread): The RabbitMQ thread. + _rabbitmq_queue (queue.Queue): The RabbitMQ queue. + _shutdown_initiated (bool): A flag indicating whether shutdown has been initiated. + _logger (logging.Logger): The logger instance. + _exchange (str): The exchange name. + _routing_key_prefix (str): The routing key prefix. + _aioredis_pool (None): The aioredis pool. + _redis_conn (None): The redis connection. + _last_processed_block (None): The last processed block. + rpc_helper (RpcHelper): The RpcHelper instance. + contract_abi (dict): The contract ABI. + contract_address (str): The contract address. + contract (web3.eth.Contract): The contract instance. + event_sig (dict): The event signature. + event_abi (dict): The event ABI. """ multiprocessing.Process.__init__(self, name=name, **kwargs) self._rabbitmq_thread: threading.Thread @@ -111,23 +137,23 @@ def __init__(self, name, **kwargs): abi=self.contract_abi, ) -# event EpochReleased(uint256 indexed epochId, uint256 begin, uint256 end, uint256 timestamp); -# event SnapshotFinalized(uint256 indexed epochId, uint256 epochEnd, string projectId, -# string snapshotCid, uint256 timestamp); -# event ProjectsUpdated(string projectId, bool allowed); + # event EpochReleased(uint256 indexed epochId, uint256 begin, uint256 end, uint256 timestamp); + # event SnapshotFinalized(uint256 indexed epochId, uint256 epochEnd, string projectId, + # string snapshotCid, uint256 timestamp); + # event ProjectsUpdated(string projectId, bool allowed); EVENTS_ABI = { 'EpochReleased': self.contract.events.EpochReleased._get_event_abi(), 'SnapshotFinalized': self.contract.events.SnapshotFinalized._get_event_abi(), 'ProjectsUpdated': self.contract.events.ProjectsUpdated._get_event_abi(), - 'SnapshottersUpdated': self.contract.events.SnapshottersUpdated._get_event_abi(), + 'allSnapshottersUpdated': self.contract.events.allSnapshottersUpdated._get_event_abi(), } EVENT_SIGS = { 'EpochReleased': 'EpochReleased(uint256,uint256,uint256,uint256)', 'SnapshotFinalized': 'SnapshotFinalized(uint256,uint256,string,string,uint256)', 'ProjectsUpdated': 'ProjectsUpdated(string,bool,uint256)', - 'SnapshottersUpdated': 'SnapshottersUpdated(address,bool)', + 'allSnapshottersUpdated': 'allSnapshottersUpdated(address,bool)', } @@ -137,20 +163,26 @@ def __init__(self, name, **kwargs): ) async def _init_redis_pool(self): + """ + Initializes the Redis connection pool if it hasn't been initialized yet. + """ if not self._aioredis_pool: self._aioredis_pool = RedisPoolCache() await self._aioredis_pool.populate() self._redis_conn = self._aioredis_pool._aioredis_pool async def get_events(self, from_block: int, to_block: int): - """Get the events from the block range. + """ + Retrieves events from the blockchain for the given block range and returns them as a list of tuples. + Each tuple contains the event name and an object representing the event data. - Arguments: - int : from block - int: to block + Args: + from_block (int): The starting block number. + to_block (int): The ending block number. Returns: - list : (type, event) + List[Tuple[str, Any]]: A list of tuples, where each tuple contains the event name + and an object representing the event data. """ events_log = await self.rpc_helper.get_events_logs( **{ @@ -164,6 +196,7 @@ async def get_events(self, from_block: int, to_block: int): ) events = [] + new_epoch_detected = False for log in events_log: if log.event == 'EpochReleased': event = EpochReleasedEvent( @@ -172,6 +205,7 @@ async def get_events(self, from_block: int, to_block: int): epochId=log.args.epochId, timestamp=log.args.timestamp, ) + new_epoch_detected = True events.append((log.event, event)) elif log.event == 'SnapshotFinalized': @@ -191,7 +225,7 @@ async def get_events(self, from_block: int, to_block: int): timestamp=int(time.time()), ) events.append((log.event, event)) - elif log.event == 'SnapshottersUpdated': + elif log.event == 'allSnapshottersUpdated': event = SnapshottersUpdatedEvent( snapshotterAddress=log.args.snapshotterAddress, allowed=log.args.allowed, @@ -199,10 +233,22 @@ async def get_events(self, from_block: int, to_block: int): ) events.append((log.event, event)) + if new_epoch_detected: + await self._redis_conn.set( + last_epoch_detected_timestamp_key(), + int(time.time()), + ) + self._logger.info('Events: {}', events) return events def _interactor_wrapper(self, q: queue.Queue): # run in a separate thread + """ + A wrapper method that runs in a separate thread and initializes a RabbitmqThreadedSelectLoopInteractor object. + + Args: + - q: A queue.Queue object that is used to publish messages to RabbitMQ. + """ self._rabbitmq_interactor = RabbitmqThreadedSelectLoopInteractor( publish_queue=q, consumer_worker_name=self.name, @@ -210,6 +256,16 @@ def _interactor_wrapper(self, q: queue.Queue): # run in a separate thread self._rabbitmq_interactor.run() # blocking def _generic_exit_handler(self, signum, sigframe): + """ + Handles the generic exit signal and initiates shutdown. + + Args: + signum (int): The signal number. + sigframe (object): The signal frame. + + Raises: + GenericExitOnSignal: If the shutdown is initiated. + """ if ( signum in [SIGINT, SIGTERM, SIGQUIT] and not self._shutdown_initiated @@ -219,7 +275,13 @@ def _generic_exit_handler(self, signum, sigframe): raise GenericExitOnSignal def _broadcast_event(self, event_type: str, event: EventBase): - """Broadcast event to the RabbitMQ queue and save update in redis.""" + """ + Broadcasts the given event to the RabbitMQ queue. + + Args: + event_type (str): The type of the event being broadcasted. + event (EventBase): The event being broadcasted. + """ self._logger.info('Broadcasting event: {}', event) brodcast_msg = ( event.json().encode('utf-8'), @@ -229,6 +291,11 @@ def _broadcast_event(self, event_type: str, event: EventBase): self._rabbitmq_queue.put(brodcast_msg) async def _detect_events(self): + """ + Continuously detects events by fetching the current block and comparing it to the last processed block. + If the last processed block is too far behind the current block, it processes the current block and broadcasts the events. + The last processed block is saved in Redis for future reference. + """ while True: try: current_block = await self.rpc_helper.get_current_block(redis_conn=self._redis_conn) @@ -326,6 +393,19 @@ async def _detect_events(self): @rabbitmq_and_redis_cleanup def run(self): + """ + A class for detecting system events using RabbitMQ and Redis. + + Methods: + -------- + run() + Starts the event detection process. + """ + soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit( + resource.RLIMIT_NOFILE, + (settings.rlimit.file_descriptors, hard), + ) for signame in [signal.SIGINT, signal.SIGTERM, signal.SIGQUIT]: signal.signal(signame, self._generic_exit_handler) self._rabbitmq_thread = threading.Thread( diff --git a/snapshotter/tests/test_web3_async_provider.py b/snapshotter/tests/test_web3_async_provider.py index 6d6628b8..39e90f5f 100644 --- a/snapshotter/tests/test_web3_async_provider.py +++ b/snapshotter/tests/test_web3_async_provider.py @@ -4,7 +4,7 @@ from aiohttp import ClientSession from aiohttp import ClientTimeout from aiohttp import TCPConnector -from eth_utils import to_checksum_address +from eth_utils.address import to_checksum_address from web3 import AsyncHTTPProvider from web3 import HTTPProvider from web3 import Web3 diff --git a/snapshotter/utils/aggregation_worker.py b/snapshotter/utils/aggregation_worker.py index e75e3c05..bcaa72d7 100644 --- a/snapshotter/utils/aggregation_worker.py +++ b/snapshotter/utils/aggregation_worker.py @@ -32,6 +32,13 @@ class AggregationAsyncWorker(GenericAsyncWorker): _ipfs_reader_client: AsyncIPFSClient def __init__(self, name, **kwargs): + """ + Initializes an instance of AggregationAsyncWorker. + + Args: + name (str): The name of the worker. + **kwargs: Additional keyword arguments to be passed to the parent class constructor. + """ self._q = f'powerloom-backend-cb-aggregate:{settings.namespace}:{settings.instance_id}' self._rmq_routing = f'powerloom-backend-callback:{settings.namespace}' f':{settings.instance_id}:CalculateAggregate.*' @@ -41,7 +48,6 @@ def __init__(self, name, **kwargs): self._single_project_types = set() self._multi_project_types = set() self._task_types = set() - self._ipfs_singleton = None for config in aggregator_config: if config.aggregate_on == AggregateOn.single_project: @@ -50,35 +56,77 @@ def __init__(self, name, **kwargs): self._multi_project_types.add(config.project_type) self._task_types.add(config.project_type) - def _gen_single_type_project_id(self, type_, epoch): + def _gen_single_type_project_id(self, task_type, epoch): + """ + Generates a project ID for a single task type and epoch. + + Args: + task_type (str): The task type. + epoch (Epoch): The epoch object. + + Returns: + str: The generated project ID. + """ data_source = epoch.projectId.split(':')[-2] - project_id = f'{type_}:{data_source}:{settings.namespace}' + project_id = f'{task_type}:{data_source}:{settings.namespace}' return project_id - def _gen_multiple_type_project_id(self, type_, epoch): + def _gen_multiple_type_project_id(self, task_type, epoch): + """ + Generates a unique project ID based on the task type and epoch messages. + + Args: + task_type (str): The type of task. + epoch (Epoch): The epoch object containing messages. + Returns: + str: The generated project ID. + """ underlying_project_ids = [project.projectId for project in epoch.messages] unique_project_id = ''.join(sorted(underlying_project_ids)) project_hash = hashlib.sha3_256(unique_project_id.encode()).hexdigest() - project_id = f'{type_}:{project_hash}:{settings.namespace}' + project_id = f'{task_type}:{project_hash}:{settings.namespace}' return project_id - def _gen_project_id(self, type_, epoch): - if type_ in self._single_project_types: - return self._gen_single_type_project_id(type_, epoch) - elif type_ in self._multi_project_types: - return self._gen_multiple_type_project_id(type_, epoch) + def _gen_project_id(self, task_type, epoch): + """ + Generates a project ID based on the given task type and epoch. + + Args: + task_type (str): The type of task. + epoch (int): The epoch number. + + Returns: + str: The generated project ID. + + Raises: + ValueError: If the task type is unknown. + """ + if task_type in self._single_project_types: + return self._gen_single_type_project_id(task_type, epoch) + elif task_type in self._multi_project_types: + return self._gen_multiple_type_project_id(task_type, epoch) else: - raise ValueError(f'Unknown project type {type_}') + raise ValueError(f'Unknown project type {task_type}') async def _processor_task( self, msg_obj: Union[PowerloomSnapshotSubmittedMessage, PowerloomCalculateAggregateMessage], task_type: str, ): - """Function used to process the received message object.""" + """ + Process the given message object and task type. + + Args: + msg_obj (Union[PowerloomSnapshotSubmittedMessage, PowerloomCalculateAggregateMessage]): + The message object to be processed. + task_type (str): The type of task to be performed. + + Returns: + None + """ self._logger.debug( 'Processing callback: {}', msg_obj, ) @@ -112,7 +160,7 @@ async def _processor_task( rpc_helper=self._rpc_helper, anchor_rpc_helper=self._anchor_rpc_helper, ipfs_reader=self._ipfs_reader_client, - protocol_state_contract=self.protocol_state_contract, + protocol_state_contract=self._protocol_state_contract, project_id=project_id, ) @@ -121,7 +169,7 @@ async def _processor_task( snapshot = each_lambda(snapshot, msg_obj) except Exception as e: - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( 'Exception processing callback for epoch: {}, Error: {},' 'sending failure notifications', msg_obj, e, ) @@ -148,26 +196,63 @@ async def _processor_task( }, ) else: - await self._send_payload_commit_service_queue( - type_=task_type, - project_id=project_id, - epoch=msg_obj, - snapshot=snapshot, - storage_flag=settings.web3storage.upload_aggregates, - ) - await self._redis_conn.hset( - name=epoch_id_project_to_state_mapping( - epoch_id=msg_obj.epochId, state_id=SnapshotterStates.SNAPSHOT_BUILD.value, - ), - mapping={ - project_id: SnapshotterStateUpdate( - status='success', timestamp=int(time.time()), - ).json(), - }, + if not snapshot: + await self._redis_conn.hset( + name=epoch_id_project_to_state_mapping( + epoch_id=msg_obj.epochId, state_id=SnapshotterStates.SNAPSHOT_BUILD.value, + ), + mapping={ + project_id: SnapshotterStateUpdate( + status='failed', timestamp=int(time.time()), error='Empty snapshot', + ).json(), + }, + ) + notification_message = SnapshotterIssue( + instanceID=settings.instance_id, + issueType=SnapshotterReportState.MISSED_SNAPSHOT.value, + projectID=project_id, + epochId=str(msg_obj.epochId), + timeOfReporting=str(time.time()), + extra=json.dumps({'issueDetails': 'Error : Empty snapshot'}), + ) + await send_failure_notifications_async( + client=self._client, message=notification_message, + ) + else: + await self._redis_conn.hset( + name=epoch_id_project_to_state_mapping( + epoch_id=msg_obj.epochId, state_id=SnapshotterStates.SNAPSHOT_BUILD.value, + ), + mapping={ + project_id: SnapshotterStateUpdate( + status='success', timestamp=int(time.time()), + ).json(), + }, + ) + await self._commit_payload( + task_type=task_type, + project_id=project_id, + epoch=msg_obj, + snapshot=snapshot, + storage_flag=settings.web3storage.upload_aggregates, + _ipfs_writer_client=self._ipfs_writer_client, + ) + self._logger.debug( + 'Updated epoch processing status in aggregation worker for project {} for transition {}', + project_id, SnapshotterStates.SNAPSHOT_BUILD.value, ) await self._redis_conn.close() async def _on_rabbitmq_message(self, message: IncomingMessage): + """ + Callback function to handle incoming RabbitMQ messages. + + Args: + message (IncomingMessage): The incoming RabbitMQ message. + + Returns: + None + """ task_type = message.routing_key.split('.')[-1] if task_type not in self._task_types: return @@ -180,11 +265,9 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): # TODO: Update based on new single project based design if task_type in self._single_project_types: try: - msg_obj: PowerloomSnapshotSubmittedMessage = ( - PowerloomSnapshotSubmittedMessage.parse_raw(message.body) - ) + msg_obj: PowerloomSnapshotSubmittedMessage = PowerloomSnapshotSubmittedMessage.parse_raw(message.body) except ValidationError as e: - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( ( 'Bad message structure of callback processor. Error: {}' ), @@ -192,7 +275,7 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): ) return except Exception as e: - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( ( 'Unexpected message structure of callback in processor. Error: {}' ), @@ -205,7 +288,7 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): PowerloomCalculateAggregateMessage.parse_raw(message.body) ) except ValidationError as e: - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( ( 'Bad message structure of callback processor. Error: {}' ), @@ -213,7 +296,7 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): ) return except Exception as e: - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( ( 'Unexpected message structure of callback in processor. Error: {}' ), @@ -228,6 +311,10 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): asyncio.ensure_future(self._processor_task(msg_obj=msg_obj, task_type=task_type)) async def _init_project_calculation_mapping(self): + """ + Initializes the project calculation mapping by importing the processor module and class for each project type + specified in the aggregator and projects configuration. Raises an exception if a duplicate project type is found. + """ if self._project_calculation_mapping is not None: return @@ -248,13 +335,18 @@ async def _init_project_calculation_mapping(self): self._project_calculation_mapping[key] = class_() async def _init_ipfs_client(self): - if not self._ipfs_singleton: - self._ipfs_singleton = AsyncIPFSClientSingleton(settings.ipfs) - await self._ipfs_singleton.init_sessions() - self._ipfs_writer_client = self._ipfs_singleton._ipfs_write_client - self._ipfs_reader_client = self._ipfs_singleton._ipfs_read_client + """ + Initializes the IPFS client and sets the write and read clients for the class. + """ + self._ipfs_singleton = AsyncIPFSClientSingleton(settings.ipfs) + await self._ipfs_singleton.init_sessions() + self._ipfs_writer_client = self._ipfs_singleton._ipfs_write_client + self._ipfs_reader_client = self._ipfs_singleton._ipfs_read_client async def init_worker(self): + """ + Initializes the worker by initializing project calculation mapping, IPFS client, and other necessary components. + """ if not self._initialized: await self._init_project_calculation_mapping() await self._init_ipfs_client() diff --git a/snapshotter/utils/callback_helpers.py b/snapshotter/utils/callback_helpers.py index 78dff227..40fc47ec 100644 --- a/snapshotter/utils/callback_helpers.py +++ b/snapshotter/utils/callback_helpers.py @@ -1,9 +1,9 @@ import asyncio +import functools from abc import ABC from abc import ABCMeta from abc import abstractmethod from abc import abstractproperty -import functools from typing import Any from typing import Dict from typing import Union @@ -31,6 +31,9 @@ async def get_rabbitmq_robust_connection_async(): + """ + Returns a robust connection to RabbitMQ server using the settings specified in the configuration file. + """ return await aio_pika.connect_robust( host=settings.rabbitmq.host, port=settings.rabbitmq.port, @@ -41,6 +44,11 @@ async def get_rabbitmq_robust_connection_async(): async def get_rabbitmq_basic_connection_async(): + """ + Returns an async connection to RabbitMQ using the settings specified in the config file. + + :return: An async connection to RabbitMQ. + """ return await aio_pika.connect( host=settings.rabbitmq.host, port=settings.rabbitmq.port, @@ -51,11 +59,29 @@ async def get_rabbitmq_basic_connection_async(): async def get_rabbitmq_channel(connection_pool) -> aio_pika.Channel: + """ + Acquires a connection from the connection pool and returns a channel object for RabbitMQ communication. + + Args: + connection_pool: An instance of `aio_pika.pool.Pool`. + + Returns: + An instance of `aio_pika.Channel`. + """ async with connection_pool.acquire() as connection: return await connection.channel() def misc_notification_callback_result_handler(fut: asyncio.Future): + """ + Handles the result of a callback or notification. + + Args: + fut (asyncio.Future): The future object representing the callback or notification. + + Returns: + None + """ try: r = fut.result() except Exception as e: @@ -70,6 +96,15 @@ def misc_notification_callback_result_handler(fut: asyncio.Future): def sync_notification_callback_result_handler(f: functools.partial): + """ + Handles the result of a synchronous notification callback. + + Args: + f (functools.partial): The function to handle. + + Returns: + None + """ try: result = f() except Exception as exc: @@ -84,6 +119,16 @@ def sync_notification_callback_result_handler(f: functools.partial): async def send_failure_notifications_async(client: AsyncClient, message: BaseModel): + """ + Sends failure notifications to the configured reporting services. + + Args: + client (AsyncClient): The async HTTP client to use for sending notifications. + message (BaseModel): The message to send as notification. + + Returns: + None + """ if settings.reporting.service_url: f = asyncio.ensure_future( client.post( @@ -104,6 +149,16 @@ async def send_failure_notifications_async(client: AsyncClient, message: BaseMod def send_failure_notifications_sync(client: SyncClient, message: BaseModel): + """ + Sends failure notifications synchronously to the reporting service and/or Slack. + + Args: + client (SyncClient): The HTTP client to use for sending notifications. + message (BaseModel): The message to send as notification. + + Returns: + None + """ if settings.reporting.service_url: f = functools.partial( client.post, diff --git a/snapshotter/utils/data_utils.py b/snapshotter/utils/data_utils.py index 05ac5032..821c76a2 100644 --- a/snapshotter/utils/data_utils.py +++ b/snapshotter/utils/data_utils.py @@ -28,16 +28,39 @@ from snapshotter.utils.redis.redis_keys import source_chain_block_time_key from snapshotter.utils.redis.redis_keys import source_chain_epoch_size_key from snapshotter.utils.redis.redis_keys import source_chain_id_key +from snapshotter.utils.rpc import get_event_sig_and_abi logger = logger.bind(module='data_helper') def retry_state_callback(retry_state: tenacity.RetryCallState): + """ + Callback function to handle retry attempts for IPFS cat operation. + + Parameters: + retry_state (tenacity.RetryCallState): The current state of the retry call. + + Returns: + None + """ logger.warning(f'Encountered IPFS cat exception: {retry_state.outcome.exception()}') # TODO: warmup cache to reduce RPC calls overhead async def get_project_finalized_cid(redis_conn: aioredis.Redis, state_contract_obj, rpc_helper, epoch_id, project_id): + """ + Get the CID of the finalized data for a given project and epoch. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: Contract object for the state contract. + rpc_helper: Helper object for making RPC calls. + epoch_id (int): Epoch ID for which to get the CID. + project_id (str): ID of the project for which to get the CID. + + Returns: + str: CID of the finalized data for the given project and epoch, or None if not found. + """ project_first_epoch = await get_project_first_epoch( redis_conn, state_contract_obj, rpc_helper, project_id, @@ -75,7 +98,21 @@ async def w3_get_and_cache_finalized_cid( epoch_id, project_id, ): - + """ + This function retrieves the consensus status and the max snapshot CID for a given project and epoch. + If the consensus status is True, the CID is added to a Redis sorted set with the epoch ID as the score. + If the consensus status is False, a null value is added to the sorted set with the epoch ID as the score. + + Args: + redis_conn (aioredis.Redis): Redis connection object + state_contract_obj: Contract object for the state contract + rpc_helper: Helper object for making web3 calls + epoch_id (int): Epoch ID + project_id (int): Project ID + + Returns: + Tuple[str, int]: The CID and epoch ID if the consensus status is True, or the null value and epoch ID if the consensus status is False. + """ tasks = [ state_contract_obj.functions.snapshotStatus(project_id, epoch_id), state_contract_obj.functions.maxSnapshotsCid(project_id, epoch_id), @@ -100,7 +137,18 @@ async def w3_get_and_cache_finalized_cid( # TODO: warmup cache to reduce RPC calls overhead async def get_project_first_epoch(redis_conn: aioredis.Redis, state_contract_obj, rpc_helper, project_id): - + """ + Get the first epoch for a given project ID. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: Contract object for the state contract. + rpc_helper: RPC helper object. + project_id (str): ID of the project. + + Returns: + int: The first epoch for the given project ID. + """ first_epoch_data = await redis_conn.hget( project_first_epoch_hmap(), project_id, @@ -136,10 +184,32 @@ async def get_project_first_epoch(redis_conn: aioredis.Redis, state_contract_obj before_sleep=retry_state_callback, ) async def fetch_file_from_ipfs(ipfs_reader, cid): + """ + Fetches a file from IPFS using the given IPFS reader and CID. + + Args: + ipfs_reader: An IPFS reader object. + cid: The CID of the file to fetch. + + Returns: + The contents of the file as bytes. + """ return await ipfs_reader.cat(cid) async def get_submission_data(redis_conn: aioredis.Redis, cid, ipfs_reader, project_id: str) -> dict: + """ + Fetches submission data from cache or IPFS. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + cid (str): IPFS content ID. + ipfs_reader (ipfshttpclient.client.Client): IPFS client object. + project_id (str): ID of the project. + + Returns: + dict: Submission data. + """ if not cid: return dict() @@ -166,12 +236,24 @@ async def get_submission_data(redis_conn: aioredis.Redis, cid, ipfs_reader, proj return submission_data -async def get_sumbmission_data_bulk( +async def get_submission_data_bulk( redis_conn: aioredis.Redis, cids: List[str], ipfs_reader, project_ids: List[str], ) -> List[dict]: + """ + Retrieves submission data for multiple submissions in bulk. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + cids (List[str]): List of submission CIDs. + ipfs_reader: IPFS reader object. + project_ids (List[str]): List of project IDs. + + Returns: + List[dict]: List of submission data dictionaries. + """ batch_size = 10 all_snapshot_data = [] for i in range(0, len(cids), batch_size): @@ -191,6 +273,20 @@ async def get_sumbmission_data_bulk( async def get_project_epoch_snapshot( redis_conn: aioredis.Redis, state_contract_obj, rpc_helper, ipfs_reader, epoch_id, project_id, ) -> dict: + """ + Retrieves the epoch snapshot for a given project. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: State contract object. + rpc_helper: RPC helper object. + ipfs_reader: IPFS reader object. + epoch_id (int): Epoch ID. + project_id (str): Project ID. + + Returns: + dict: The epoch snapshot data. + """ cid = await get_project_finalized_cid(redis_conn, state_contract_obj, rpc_helper, epoch_id, project_id) if cid: data = await get_submission_data(redis_conn, cid, ipfs_reader, project_id) @@ -200,7 +296,17 @@ async def get_project_epoch_snapshot( async def get_source_chain_id(redis_conn: aioredis.Redis, state_contract_obj, rpc_helper): + """ + Retrieves the source chain ID from Redis cache if available, otherwise fetches it from the state contract and caches it in Redis. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: State contract object. + rpc_helper: RPC helper object. + Returns: + int: The source chain ID. + """ source_chain_id_data = await redis_conn.get( source_chain_id_key(), ) @@ -221,17 +327,107 @@ async def get_source_chain_id(redis_conn: aioredis.Redis, state_contract_obj, rp return source_chain_id +async def build_projects_list_from_events(redis_conn: aioredis.Redis, state_contract_obj, rpc_helper): + """ + Builds a list of project IDs from the 'ProjectsUpdated' events emitted by the state contract. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: Contract object of the state contract. + rpc_helper: Helper object for making RPC calls. + + Returns: + list: List of project IDs. + """ + EVENT_SIGS = { + 'ProjectsUpdated': 'ProjectsUpdated(string,bool,uint256)', + } + + EVENT_ABI = { + 'ProjectsUpdated': state_contract_obj.events.ProjectsUpdated._get_event_abi(), + } + + [start_block] = await rpc_helper.web3_call( + [state_contract_obj.functions.DeploymentBlockNumber()], + redis_conn=redis_conn, + ) + + current_block = await rpc_helper.get_current_block_number(redis_conn) + event_sig, event_abi = get_event_sig_and_abi(EVENT_SIGS, EVENT_ABI) + + # from start_block to current block, get all events in batches of 1000, 10 requests parallelly + request_task_batch_size = 10 + project_updates = set() + for cumulative_block_range in range(start_block, current_block, 1000 * request_task_batch_size): + # split into 10 requests + tasks = [] + for block_range in range( + cumulative_block_range, + min(current_block, cumulative_block_range + 1000 * request_task_batch_size), + 1000, + ): + tasks.append( + rpc_helper.get_events_logs( + **{ + 'contract_address': state_contract_obj.address, + 'to_block': min(current_block, block_range + 1000), + 'from_block': block_range, + 'topics': [event_sig], + 'event_abi': event_abi, + 'redis_conn': redis_conn, + }, + ), + ) + + block_range_event_logs = await asyncio.gather(*tasks) + + for event_logs in block_range_event_logs: + for event_log in event_logs: + if event_log.args.allowed: + project_updates.add(event_log.args.projectId) + else: + project_updates.discard(event_log.args.projectId) + + return list(project_updates) + + async def get_projects_list(redis_conn: aioredis.Redis, state_contract_obj, rpc_helper): - tasks = [ - state_contract_obj.functions.getProjects(), - ] + """ + Fetches the list of projects from the state contract. - [projects_list] = await rpc_helper.web3_call(tasks, redis_conn=redis_conn) + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: Contract object for the state contract. + rpc_helper: RPC helper object. - return projects_list + Returns: + List: List of projects. + """ + try: + tasks = [ + state_contract_obj.functions.getProjects(), + ] + + [projects_list] = await rpc_helper.web3_call(tasks, redis_conn=redis_conn) + return projects_list + + except Exception as e: + logger.warning('Error while fetching projects list from contract', error=e) + return [] async def get_snapshot_submision_window(redis_conn: aioredis.Redis, state_contract_obj, rpc_helper): + """ + Get the snapshot submission window from the state contract. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: State contract object. + rpc_helper: RPC helper object. + + Returns: + submission_window (int): The snapshot submission window. + """ tasks = [ state_contract_obj.functions.snapshotSubmissionWindow(), ] @@ -242,7 +438,17 @@ async def get_snapshot_submision_window(redis_conn: aioredis.Redis, state_contra async def get_source_chain_epoch_size(redis_conn: aioredis.Redis, state_contract_obj, rpc_helper): + """ + This function retrieves the epoch size of the source chain from the state contract. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: Contract object for the state contract. + rpc_helper: Helper object for making RPC calls. + Returns: + int: The epoch size of the source chain. + """ source_chain_epoch_size_data = await redis_conn.get( source_chain_epoch_size_key(), ) @@ -265,6 +471,17 @@ async def get_source_chain_epoch_size(redis_conn: aioredis.Redis, state_contract async def get_source_chain_block_time(redis_conn: aioredis.Redis, state_contract_obj, rpc_helper): + """ + Get the block time of the source chain. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: Contract object for the state contract. + rpc_helper: RPC helper object. + + Returns: + int: Block time of the source chain. + """ source_chain_block_time_data = await redis_conn.get( source_chain_block_time_key(), ) @@ -287,7 +504,6 @@ async def get_source_chain_block_time(redis_conn: aioredis.Redis, state_contract return source_chain_block_time -# calculate tail epoch_id given current epoch and time in seconds async def get_tail_epoch_id( redis_conn: aioredis.Redis, state_contract_obj, @@ -296,7 +512,20 @@ async def get_tail_epoch_id( time_in_seconds, project_id, ): - # Returns tail epoch_id and a boolean indicating if tail contains the full time window + """ + Returns the tail epoch_id and a boolean indicating if tail contains the full time window. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: State contract object. + rpc_helper: RPC helper object. + current_epoch_id (int): Current epoch ID. + time_in_seconds (int): Time in seconds. + project_id (str): Project ID. + + Returns: + Tuple[int, bool]: Tail epoch ID and a boolean indicating if tail contains the full time window. + """ source_chain_epoch_size = await get_source_chain_epoch_size(redis_conn, state_contract_obj, rpc_helper) source_chain_block_time = await get_source_chain_block_time(redis_conn, state_contract_obj, rpc_helper) @@ -326,7 +555,21 @@ async def get_project_epoch_snapshot_bulk( epoch_id_max: int, project_id, ): - + """ + Fetches the snapshot data for a given project and epoch range. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + state_contract_obj: State contract object. + rpc_helper: RPC helper object. + ipfs_reader: IPFS reader object. + epoch_id_min (int): Minimum epoch ID to fetch snapshot data for. + epoch_id_max (int): Maximum epoch ID to fetch snapshot data for. + project_id: ID of the project to fetch snapshot data for. + + Returns: + A list of snapshot data for the given project and epoch range. + """ batch_size = 100 project_first_epoch = await get_project_first_epoch( @@ -368,7 +611,7 @@ async def get_project_epoch_snapshot_bulk( if cid and 'null' not in cid: valid_cid_data_with_epochs.append((cid, epoch_id)) - all_snapshot_data = await get_sumbmission_data_bulk( + all_snapshot_data = await get_submission_data_bulk( redis_conn, [cid for cid, _ in valid_cid_data_with_epochs], ipfs_reader, [ project_id, ] * len(valid_cid_data_with_epochs), @@ -377,8 +620,16 @@ async def get_project_epoch_snapshot_bulk( return all_snapshot_data -# get snapshotter high level status for all the projects async def get_snapshotter_status(redis_conn: aioredis.Redis): + """ + Returns the snapshotter status for all projects. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + + Returns: + SnapshotterStatus: Object containing the snapshotter status for all projects. + """ status_keys = [] all_projects = await redis_conn.smembers('storedProjectIds') @@ -433,8 +684,18 @@ async def get_snapshotter_status(redis_conn: aioredis.Redis): return overall_status -# gets snapshotter status for a particular project async def get_snapshotter_project_status(redis_conn: aioredis.Redis, project_id: str, with_data: bool): + """ + Retrieves the snapshotter project status for a given project ID. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + project_id (str): ID of the project to retrieve the status for. + with_data (bool): Whether to include snapshot data in the response. + + Returns: + SnapshotterProjectStatus: Object containing the project status. + """ reports = await redis_conn.hgetall(project_snapshotter_status_report_key(project_id)) reports = { diff --git a/snapshotter/utils/default_logger.py b/snapshotter/utils/default_logger.py index bd410cb9..1a66ad46 100644 --- a/snapshotter/utils/default_logger.py +++ b/snapshotter/utils/default_logger.py @@ -7,46 +7,118 @@ def trace_enabled(_): + """ + Returns the value of trace_enabled setting from the settings module. + + Args: + _: Unused argument. + + Returns: + bool: The value of trace_enabled setting. + """ return settings.logs.trace_enabled def logger_filter_trace(record): + """ + Filter function for logging records with level 'TRACE'. + + Args: + record (dict): The logging record to be filtered. + + Returns: + bool: True if the record's level is 'TRACE', False otherwise. + """ if record['level'].name == 'TRACE': return True return False def logger_filter_debug(record): + """ + Filter function to be used with Python's logging module to filter out log records with level lower than DEBUG. + + Args: + record (logging.LogRecord): The log record to be filtered. + + Returns: + bool: True if the log record's level is DEBUG, False otherwise. + """ if record['level'].name == 'DEBUG': return True return False def logger_filter_info(record): + """ + Filter function for logger to only allow INFO level logs. + + Args: + record (dict): The log record to be filtered. + + Returns: + bool: True if the log record is INFO level, False otherwise. + """ if record['level'].name == 'INFO': return True return False def logger_filter_success(record): + """ + Filter function to only allow records with level 'SUCCESS'. + + Args: + record (dict): The log record to filter. + + Returns: + bool: True if the record's level is 'SUCCESS', False otherwise. + """ if record['level'].name == 'SUCCESS': return True return False def logger_filter_warning(record): + """ + Filter function to only allow warning level logs through. + + Args: + record (dict): The log record to filter. + + Returns: + bool: True if the log record is a warning level log, False otherwise. + """ if record['level'].name == 'WARNING': return True return False def logger_filter_error(record): + """ + Filter function to only allow ERROR level logs to be processed. + + Args: + record (dict): The log record to be filtered. + + Returns: + bool: True if the log record is an ERROR level log, False otherwise. + """ if record['level'].name == 'ERROR': return True return False def logger_filter_critical(record): + """ + Filter function to only allow records with CRITICAL level. + + Args: + record (dict): The log record to be filtered. + + Returns: + bool: True if the record's level is CRITICAL, False otherwise. + """ if record['level'].name == 'CRITICAL': return True return False diff --git a/snapshotter/utils/delegate_worker.py b/snapshotter/utils/delegate_worker.py index a92ed507..2f8a367b 100644 --- a/snapshotter/utils/delegate_worker.py +++ b/snapshotter/utils/delegate_worker.py @@ -21,19 +21,35 @@ class DelegateAsyncWorker(GenericAsyncWorker): def __init__(self, name, **kwargs): + """ + Initializes a new instance of the DelegateAsyncWorker class. + + Args: + name (str): The name of the worker. + **kwargs: Additional keyword arguments to pass to the base class constructor. + """ super(DelegateAsyncWorker, self).__init__(name=name, **kwargs) self._qos = 1 - self._exchange_name = f'{settings.rabbitmq.setup.delegated_worker.exchange}:{settings.namespace}' + self._exchange_name = f'{settings.rabbitmq.setup.delegated_worker.exchange}:Request:{settings.namespace}' + self._response_exchange_name = f'{settings.rabbitmq.setup.delegated_worker.exchange}:Response:{settings.namespace}' self._delegate_task_calculation_mapping = None self._task_types = [] for task in delegate_tasks: - type_ = task.task_type - self._task_types.append(type_) + task_type = task.task_type + self._task_types.append(task_type) self._q, self._rmq_routing = get_delegate_worker_request_queue_routing_key() async def _processor_task(self, msg_obj: PowerloomDelegateWorkerRequestMessage): - """Function used to process the received message object.""" + """ + Process a delegate task for the given message object. + + Args: + msg_obj (PowerloomDelegateWorkerRequestMessage): The message object containing the task to process. + + Returns: + None + """ self._logger.trace( 'Processing delegate task for {}', msg_obj, ) @@ -62,14 +78,13 @@ async def _processor_task(self, msg_obj: PowerloomDelegateWorkerRequestMessage): ) self._logger.trace('got result from delegate worker compute {}', result) - await self._send_delegate_worker_response_queue( request_msg=msg_obj, response_msg=result, ) except Exception as e: - self._logger.opt(exception=True).error( - 'Exception while processing tx receipt fetch for {}', msg_obj, + self._logger.opt(exception=settings.logs.trace_enabled).error( + 'Exception while processing tx receipt fetch for {}: {}', msg_obj, e, ) notification_message = DelegateTaskProcessorIssue( @@ -93,11 +108,20 @@ async def _send_delegate_worker_response_queue( request_msg: PowerloomDelegateWorkerRequestMessage, response_msg: BaseModel, ): + """ + Sends a response message to the delegate worker response queue. + Args: + request_msg (PowerloomDelegateWorkerRequestMessage): The request message that triggered the response. + response_msg (BaseModel): The response message to send. + + Raises: + Exception: If there was an error sending the message to the delegate worker response queue. + """ response_queue_name, response_routing_key_pattern = get_delegate_worker_response_queue_routing_key_pattern() response_routing_key = response_routing_key_pattern.replace( - '*', f'{request_msg.epochId}_{request_msg.task_type}', + '*', request_msg.extra['unique_id'], ) # send through rabbitmq @@ -106,7 +130,7 @@ async def _send_delegate_worker_response_queue( # Prepare a message to send delegate_workers_response_exchange = await channel.get_exchange( # request and response payloads for delegate workers are sent through the same exchange - name=self._exchange_name, + name=self._response_exchange_name, ) message_data = response_msg.json().encode('utf-8') # Prepare a message to send @@ -117,7 +141,7 @@ async def _send_delegate_worker_response_queue( ) except Exception as e: - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( ( 'Exception sending message to delegate :' ' {} | dump: {}' @@ -127,7 +151,16 @@ async def _send_delegate_worker_response_queue( ) async def _on_rabbitmq_message(self, message: IncomingMessage): + """ + Callback function that is called when a message is received from RabbitMQ. + It processes the message and starts a new task to handle the message. + + Args: + message (IncomingMessage): The incoming message from RabbitMQ. + Returns: + None + """ if not self._initialized: await self.init_worker() @@ -160,11 +193,17 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): asyncio.ensure_future(self._processor_task(msg_obj=msg_obj)) async def init_worker(self): + """ + Initializes the worker by calling the _init_delegate_task_calculation_mapping and init functions. + """ if not self._initialized: await self._init_delegate_task_calculation_mapping() await self.init() async def _init_delegate_task_calculation_mapping(self): + """ + Initializes the mapping of delegate tasks to their corresponding calculation classes. + """ if self._delegate_task_calculation_mapping is not None: return # Generate project function mapping @@ -175,7 +214,3 @@ async def _init_delegate_task_calculation_mapping(self): module = importlib.import_module(delegate_task.module) class_ = getattr(module, delegate_task.class_name) self._delegate_task_calculation_mapping[key] = class_() - - -# wkr = DelegateAsyncWorker("test") -# wkr.start() diff --git a/snapshotter/utils/exceptions.py b/snapshotter/utils/exceptions.py index a60f85a9..48596ac0 100644 --- a/snapshotter/utils/exceptions.py +++ b/snapshotter/utils/exceptions.py @@ -3,23 +3,39 @@ class SelfExitException( Exception, -): # used by process hub core to signal core exit +): + """ + Exception used by process hub core to signal core exit. + """ pass class GenericExitOnSignal(Exception): - # to be used whenever any other launched process/callback worker receives a signal to 'exit' - [INT, TERM, QUIT] + """Exception to be used whenever any other launched process/callback worker receives a signal to 'exit' - [INT, TERM, QUIT]""" pass class RPCException(Exception): def __init__(self, request, response, underlying_exception, extra_info): + """ + Initializes a new instance of the ApiException class. + + :param request: The request that caused the exception. + :type request: Any + :param response: The response received from the server. + :type response: Any + :param underlying_exception: The underlying exception that caused this exception. + :type underlying_exception: Exception + :param extra_info: Additional information about the exception. + :type extra_info: Any + """ self.request = request self.response = response self.underlying_exception: Exception = underlying_exception self.extra_info = extra_info def __str__(self): + """Return a JSON string representation of the exception object.""" ret = { 'request': self.request, 'response': self.response, @@ -31,4 +47,7 @@ def __str__(self): return json.dumps(ret) def __repr__(self): + """ + Return a string representation of the exception. + """ return self.__str__() diff --git a/snapshotter/utils/file_utils.py b/snapshotter/utils/file_utils.py index 5dd10eed..f9e51554 100644 --- a/snapshotter/utils/file_utils.py +++ b/snapshotter/utils/file_utils.py @@ -13,7 +13,19 @@ def read_json_file( file_path: str, logger: logger = default_logger, ) -> dict: - """Read given json file and return its content as a dictionary.""" + """ + Read a JSON file and return its content as a dictionary. + + Args: + file_path (str): The path to the JSON file to read. + logger (logger, optional): The logger to use for logging. Defaults to default_logger. + + Returns: + dict: The content of the JSON file as a dictionary. + + Raises: + FileNotFoundError: If the specified file does not exist. + """ # check if file is present if not os.path.exists(file_path): raise FileNotFoundError(f'File {file_path} not found') @@ -40,6 +52,21 @@ def write_json_file( data: Any, logger: logger = logger, ) -> None: + """ + Write data to a JSON file at the specified directory with the specified file name. + + Args: + directory (str): The directory where the file will be created. + file_name (str): The name of the file to be created. + data (Any): The data to be written to the file. + logger (logger, optional): The logger object to be used for logging. Defaults to logger. + + Raises: + Exception: If there is an error while writing to the file. + + Returns: + None + """ try: file_path = os.path.join(directory, file_name) if not os.path.exists(directory): @@ -53,6 +80,20 @@ def write_json_file( def write_bytes_to_file(directory: str, file_name: str, data): + """ + Write bytes to a file in the specified directory. + + Args: + directory (str): The directory where the file will be written. + file_name (str): The name of the file to be written. + data (bytes): The bytes to be written to the file. + + Raises: + Exception: If the file cannot be opened. + + Returns: + None + """ try: file_path = directory + file_name if not os.path.exists(directory): @@ -68,7 +109,15 @@ def write_bytes_to_file(directory: str, file_name: str, data): def read_text_file(file_path: str): - """Read given file and return the read bytes in form of a string.""" + """ + Read the given file and return the contents as a string. + + Args: + file_path (str): The path to the file to be read. + + Returns: + str: The contents of the file as a string, or None if the file could not be read. + """ try: file_obj = open(file_path, 'r', encoding='utf-8') except FileNotFoundError: diff --git a/snapshotter/utils/generic_delegator_preloader.py b/snapshotter/utils/generic_delegator_preloader.py index f6acb14c..e243bf02 100644 --- a/snapshotter/utils/generic_delegator_preloader.py +++ b/snapshotter/utils/generic_delegator_preloader.py @@ -1,4 +1,5 @@ import asyncio +import uuid from collections import defaultdict from typing import Any from typing import Dict @@ -7,6 +8,10 @@ import aiorwlock from aio_pika import Message from redis import asyncio as aioredis +from tenacity import retry +from tenacity import retry_if_exception_type +from tenacity import stop_after_attempt +from tenacity import wait_random_exponential from snapshotter.init_rabbitmq import get_delegate_worker_request_queue_routing_key from snapshotter.init_rabbitmq import get_delegate_worker_response_queue_routing_key_pattern @@ -20,14 +25,39 @@ class DelegatorPreloaderAsyncWorker(GenericDelegatorPreloader): def __init__(self, **kwargs): - self._qos = 10 - self._filter_worker_exchange_name = f'{settings.rabbitmq.setup.delegated_worker.exchange}:{settings.namespace}' + """ + Initializes the delegator preloader. + Args: + **kwargs: Arbitrary keyword arguments. + + Attributes: + _qos (int): Quality of service. + _filter_worker_request_exchange_name (str): Name of the exchange for worker requests. + _filter_worker_response_exchange_name (str): Name of the exchange for worker responses. + _filter_worker_request_queue (str): Name of the queue for worker requests. + _filter_worker_request_routing_key (str): Routing key for worker requests. + _awaited_delegated_response_ids (set): Set of request IDs for which responses are awaited. + _collected_response_objects (Dict[str, Dict[Any, Any]]): Dictionary of response objects collected so far. + _filter_worker_response_queue (str): Name of the queue for worker responses. + _filter_worker_response_routing_key (str): Routing key for worker responses. + _rw_lock (aiorwlock.RWLock): Read-write lock for synchronizing access to shared resources. + _preload_finished_event (asyncio.Event): Event that is set when preloading is complete. + _redis_conn: Redis connection object. + _channel: RabbitMQ channel object. + _q_obj: RabbitMQ queue object. + _unique_id: Unique identifier for this instance. + _response_exchange: RabbitMQ exchange object for worker responses. + """ + + self._qos = 1 + self._filter_worker_request_exchange_name = f'{settings.rabbitmq.setup.delegated_worker.exchange}:Request:{settings.namespace}' + self._filter_worker_response_exchange_name = f'{settings.rabbitmq.setup.delegated_worker.exchange}:Response:{settings.namespace}' self._filter_worker_request_queue, \ self._filter_worker_request_routing_key = get_delegate_worker_request_queue_routing_key() # request IDs on which responses are awaited. preloading is complete when this set is empty self._awaited_delegated_response_ids = set() # epoch ID -> task type/ID (for eg, txHash) -> response object on task (for. eg tx receipt against txHash) - self._collected_response_objects: Dict[int, Dict[str, Dict[Any, Any]]] = defaultdict(dict) + self._collected_response_objects: Dict[str, Dict[Any, Any]] = defaultdict(dict) self._filter_worker_response_queue = None self._filter_worker_response_routing_key = None self._rw_lock = aiorwlock.RWLock() @@ -35,8 +65,14 @@ def __init__(self, **kwargs): self._redis_conn = None self._channel = None self._q_obj = None + self._unique_id = None + self._response_exchange = None async def cleanup(self): + """ + Cleans up the resources used by the delegator preloader. + Closes the Redis connection and cancels the consumer tag. + """ if self._redis_conn: try: await self._redis_conn.close() @@ -48,6 +84,10 @@ async def cleanup(self): await self._channel.close() async def _periodic_awaited_responses_checker(self): + """ + Periodically checks for awaited delegated responses and triggers the on_delegated_responses_complete callback + when all responses have been received. + """ _running = True while _running: await asyncio.sleep(1) @@ -62,29 +102,95 @@ async def _on_filter_worker_response_message( self, message: aio_pika.abc.AbstractIncomingMessage, ): - if message.routing_key.split('.')[-1] != f'{self._epoch.epochId}_{self._task_type}': + """ + Callback function that is called when a response message is received from the filter worker. + + Args: + message (aio_pika.abc.AbstractIncomingMessage): The incoming message from the filter worker. + """ + if message.routing_key.split('.')[-1] != self._unique_id: await message.nack(requeue=True) return else: await message.ack() asyncio.ensure_future(self._handle_filter_worker_response_message(message.body)) - async def compute( + @retry( + reraise=True, + retry=retry_if_exception_type(Exception), + stop=stop_after_attempt(2), + wait=wait_random_exponential(multiplier=1, max=3), + + ) + async def compute_with_retry(self, epoch: EpochBase, redis_conn: aioredis.Redis, rpc_helper: RpcHelper): + """ + Compute with retry logic. + + Args: + epoch (EpochBase): The epoch to compute. + redis_conn (aioredis.Redis): The Redis connection. + rpc_helper (RpcHelper): The RPC helper. + + Returns: + The result of the computation. + """ + return await self.compute_with_delegate_workers(epoch=epoch, redis_conn=redis_conn, rpc_helper=rpc_helper) + + async def compute_with_delegate_workers( self, epoch: EpochBase, redis_conn: aioredis.Redis, rpc_helper: RpcHelper, ): + """ + Computes the delegated worker responses for the given epoch using RabbitMQ and Redis. + + Args: + epoch: An instance of EpochBase representing the epoch for which to compute the delegated worker responses. + redis_conn: An instance of aioredis.Redis representing the Redis connection to use. + rpc_helper: An instance of RpcHelper representing the RPC helper to use. + + Raises: + Exception: If the preloading task times out or if an exception occurs while waiting for preloading to complete. + """ self._redis_conn = redis_conn self._awaited_delegated_response_ids = set(self._request_id_query_obj_map.keys()) + self._unique_id = str(uuid.uuid4()) async with await get_rabbitmq_basic_connection_async() as rmq_conn: self._channel = await rmq_conn.channel() - await self._channel.set_qos(10) + await self._channel.set_qos(self._qos) + + self._q_obj = await self._channel.declare_queue(exclusive=True) + + self._filter_worker_response_queue, \ + _filter_worker_response_routing_key_pattern = get_delegate_worker_response_queue_routing_key_pattern() + + self._filter_worker_response_routing_key = _filter_worker_response_routing_key_pattern.replace( + '*', self._unique_id, + ) + + self._logger.debug( + 'Consuming {} fetch response queue {} ' + 'in preloader, with routing key {}', + self._task_type, + self._filter_worker_response_queue, + self._filter_worker_response_routing_key, + ) + self._response_exchange = await self._channel.get_exchange( + name=self._filter_worker_response_exchange_name, + ) + await self._q_obj.bind(self._response_exchange, routing_key=self._filter_worker_response_routing_key) + self._consumer_tag = await self._q_obj.consume( + callback=self._on_filter_worker_response_message, + ) + asyncio.ensure_future(self._periodic_awaited_responses_checker()) + self._exchange = await self._channel.get_exchange( - name=self._filter_worker_exchange_name, + name=self._filter_worker_request_exchange_name, ) query_tasks = list() for query_obj in self._request_id_query_obj_map.values(): + query_obj.extra['unique_id'] = self._unique_id message_data = query_obj.json().encode('utf-8') # Prepare a message to send @@ -103,33 +209,11 @@ async def compute( ) asyncio.ensure_future(asyncio.gather(*query_tasks, return_exceptions=True)) - self._filter_worker_response_queue, \ - _filter_worker_response_routing_key_pattern = get_delegate_worker_response_queue_routing_key_pattern() - - self._filter_worker_response_routing_key = _filter_worker_response_routing_key_pattern.replace( - '*', f'{epoch.epochId}_{self._task_type}', - ) - - self._q_obj = await self._channel.declare_queue(exclusive=True) - - self._logger.debug( - 'Consuming {} fetch response queue {} ' - 'in preloader, with routing key {}', - self._task_type, - self._filter_worker_response_queue, - self._filter_worker_response_routing_key, - ) - await self._q_obj.bind(self._exchange, routing_key=self._filter_worker_response_routing_key) - self._consumer_tag = await self._q_obj.consume( - callback=self._on_filter_worker_response_message, - ) - asyncio.ensure_future(self._periodic_awaited_responses_checker()) - try: await asyncio.wait_for(self._preload_finished_event.wait(), timeout=preloader_config.timeout) await self.cleanup() except asyncio.TimeoutError: - self._logger.error( + self._logger.warning( 'Preloading task {} for epoch {} timed out after {} seconds', self._task_type, epoch.epochId, preloader_config.timeout, ) @@ -138,6 +222,6 @@ async def compute( f'Preloading task {self._task_type} for epoch {epoch.epochId} timed out after {preloader_config.timeout} seconds', ) except Exception as e: - self._logger.error('Exception while waiting for preloading to complete: {}', e) + self._logger.warning('Exception while waiting for preloading to complete: {}', e) await self.cleanup() raise e diff --git a/snapshotter/utils/generic_worker.py b/snapshotter/utils/generic_worker.py index e77d10fa..588a812e 100644 --- a/snapshotter/utils/generic_worker.py +++ b/snapshotter/utils/generic_worker.py @@ -1,12 +1,19 @@ import asyncio +import json import multiprocessing import resource import time from functools import partial +from signal import SIGINT +from signal import signal +from signal import SIGQUIT +from signal import SIGTERM from typing import Dict from typing import Union from uuid import uuid4 +import httpx +import tenacity from aio_pika import IncomingMessage from aio_pika import Message from aio_pika.pool import Pool @@ -15,18 +22,27 @@ from httpx import AsyncHTTPTransport from httpx import Limits from httpx import Timeout +from ipfs_client.dag import IPFSAsyncClientError +from ipfs_client.main import AsyncIPFSClient from pydantic import BaseModel from redis import asyncio as aioredis +from tenacity import retry +from tenacity import stop_after_attempt +from tenacity import wait_random_exponential from web3 import Web3 from snapshotter.settings.config import settings from snapshotter.utils.callback_helpers import get_rabbitmq_channel from snapshotter.utils.callback_helpers import get_rabbitmq_robust_connection_async +from snapshotter.utils.callback_helpers import send_failure_notifications_async from snapshotter.utils.data_utils import get_source_chain_id from snapshotter.utils.default_logger import logger from snapshotter.utils.file_utils import read_json_file +from snapshotter.utils.models.data_models import SnapshotterIssue +from snapshotter.utils.models.data_models import SnapshotterReportState from snapshotter.utils.models.data_models import SnapshotterStates from snapshotter.utils.models.data_models import SnapshotterStateUpdate +from snapshotter.utils.models.data_models import UnfinalizedSnapshot from snapshotter.utils.models.message_models import AggregateBase from snapshotter.utils.models.message_models import PayloadCommitMessage from snapshotter.utils.models.message_models import PowerloomCalculateAggregateMessage @@ -34,9 +50,42 @@ from snapshotter.utils.models.message_models import PowerloomSnapshotSubmittedMessage from snapshotter.utils.redis.redis_conn import RedisPoolCache from snapshotter.utils.redis.redis_keys import epoch_id_project_to_state_mapping +from snapshotter.utils.redis.redis_keys import submitted_unfinalized_snapshot_cids from snapshotter.utils.rpc import RpcHelper +def web3_storage_retry_state_callback(retry_state: tenacity.RetryCallState): + """ + Callback function to handle retry attempts for web3 storage upload. + + Args: + retry_state (tenacity.RetryCallState): The current state of the retry call. + + Returns: + None + """ + if retry_state and retry_state.outcome.failed: + logger.warning( + f'Encountered web3 storage upload exception: {retry_state.outcome.exception()} | args: {retry_state.args}, kwargs:{retry_state.kwargs}', + ) + + +def ipfs_upload_retry_state_callback(retry_state: tenacity.RetryCallState): + """ + Callback function to handle retry attempts for IPFS uploads. + + Args: + retry_state (tenacity.RetryCallState): The current state of the retry attempt. + + Returns: + None + """ + if retry_state and retry_state.outcome.failed: + logger.warning( + f'Encountered ipfs upload exception: {retry_state.outcome.exception()} | args: {retry_state.args}, kwargs:{retry_state.kwargs}', + ) + + class GenericAsyncWorker(multiprocessing.Process): _async_transport: AsyncHTTPTransport _rmq_connection_pool: Pool @@ -46,33 +95,227 @@ class GenericAsyncWorker(multiprocessing.Process): _rpc_helper: RpcHelper _anchor_rpc_helper: RpcHelper _httpx_client: AsyncClient + _web3_storage_upload_transport: AsyncHTTPTransport + _web3_storage_upload_client: AsyncClient def __init__(self, name, **kwargs): + """ + Initializes a GenericAsyncWorker instance. + + Args: + name (str): The name of the worker. + **kwargs: Additional keyword arguments to pass to the superclass constructor. + """ self._core_rmq_consumer: asyncio.Task self._exchange_name = f'{settings.rabbitmq.setup.callbacks.exchange}:{settings.namespace}' self._unique_id = f'{name}-' + keccak(text=str(uuid4())).hex()[:8] self._running_callback_tasks: Dict[str, asyncio.Task] = dict() super(GenericAsyncWorker, self).__init__(name=name, **kwargs) - self._logger = logger.bind(module=self.name) - self.protocol_state_contract = None - self._qos = 20 + self._protocol_state_contract = None + self._qos = 1 self._rate_limiting_lua_scripts = None - self.protocol_state_contract_abi = read_json_file( - settings.protocol_state.abi, - self._logger, - ) self.protocol_state_contract_address = settings.protocol_state.address self._commit_payload_exchange = ( f'{settings.rabbitmq.setup.commit_payload.exchange}:{settings.namespace}' ) + self._event_detector_exchange = f'{settings.rabbitmq.setup.event_detector.exchange}:{settings.namespace}' + self._event_detector_routing_key_prefix = f'powerloom-event-detector:{settings.namespace}:{settings.instance_id}.' self._commit_payload_routing_key = ( f'powerloom-backend-commit-payload:{settings.namespace}:{settings.instance_id}.Data' ) self._initialized = False + def _signal_handler(self, signum, frame): + """ + Signal handler function that cancels the core RMQ consumer when a SIGINT, SIGTERM or SIGQUIT signal is received. + + Args: + signum (int): The signal number. + frame (frame): The current stack frame at the time the signal was received. + """ + if signum in [SIGINT, SIGTERM, SIGQUIT]: + self._core_rmq_consumer.cancel() + + @retry( + wait=wait_random_exponential(multiplier=1, max=10), + stop=stop_after_attempt(5), + retry=tenacity.retry_if_not_exception_type(httpx.HTTPStatusError), + after=web3_storage_retry_state_callback, + ) + async def _upload_web3_storage(self, snapshot: bytes): + """ + Uploads the given snapshot to web3 storage. + + Args: + snapshot (bytes): The snapshot to upload. + + Returns: + None + + Raises: + HTTPError: If the upload fails. + """ + web3_storage_settings = settings.web3storage + # if no api token is provided, skip + if not web3_storage_settings.api_token: + return + files = {'file': snapshot} + r = await self._web3_storage_upload_client.post( + url=f'{web3_storage_settings.url}{web3_storage_settings.upload_url_suffix}', + files=files, + ) + r.raise_for_status() + resp = r.json() + self._logger.info('Uploaded snapshot to web3 storage: {} | Response: {}', snapshot, resp) + + @retry( + wait=wait_random_exponential(multiplier=1, max=10), + stop=stop_after_attempt(5), + retry=tenacity.retry_if_not_exception_type(IPFSAsyncClientError), + after=ipfs_upload_retry_state_callback, + ) + async def _upload_to_ipfs(self, snapshot: bytes, _ipfs_writer_client: AsyncIPFSClient): + """ + Uploads a snapshot to IPFS using the provided AsyncIPFSClient. + + Args: + snapshot (bytes): The snapshot to upload. + _ipfs_writer_client (AsyncIPFSClient): The IPFS client to use for uploading. + + Returns: + str: The CID of the uploaded snapshot. + """ + snapshot_cid = await _ipfs_writer_client.add_bytes(snapshot) + return snapshot_cid + + async def _commit_payload( + self, + task_type: str, + _ipfs_writer_client: AsyncIPFSClient, + project_id: str, + epoch: Union[ + PowerloomSnapshotProcessMessage, + PowerloomSnapshotSubmittedMessage, + PowerloomCalculateAggregateMessage, + ], + snapshot: Union[BaseModel, AggregateBase], + storage_flag: bool, + ): + """ + Commits the given snapshot to IPFS and web3 storage (if enabled), and sends messages to the event detector and relayer + dispatch queues. + + Args: + task_type (str): The type of task being committed. + _ipfs_writer_client (AsyncIPFSClient): The IPFS client to use for uploading the snapshot. + project_id (str): The ID of the project the snapshot belongs to. + epoch (Union[PowerloomSnapshotProcessMessage, PowerloomSnapshotSubmittedMessage, PowerloomCalculateAggregateMessage]): The epoch the snapshot belongs to. + snapshot (Union[BaseModel, AggregateBase]): The snapshot to commit. + storage_flag (bool): Whether to upload the snapshot to web3 storage. + + Returns: + None + """ + # payload commit sequence begins + # upload to IPFS + snapshot_json = json.dumps(snapshot.dict(by_alias=True), sort_keys=True, separators=(',', ':')) + snapshot_bytes = snapshot_json.encode('utf-8') + try: + snapshot_cid = await self._upload_to_ipfs(snapshot_bytes, _ipfs_writer_client) + except Exception as e: + self._logger.opt(exception=True).error( + 'Exception uploading snapshot to IPFS for epoch {}: {}, Error: {},' + 'sending failure notifications', epoch, snapshot, e, + ) + notification_message = SnapshotterIssue( + instanceID=settings.instance_id, + issueType=SnapshotterReportState.MISSED_SNAPSHOT.value, + projectID=project_id, + epochId=str(epoch.epochId), + timeOfReporting=str(time.time()), + extra=json.dumps({'issueDetails': f'Error : {e}'}), + ) + await send_failure_notifications_async( + client=self._client, message=notification_message, + ) + else: + # add to zset of unfinalized snapshot CIDs + unfinalized_entry = UnfinalizedSnapshot( + snapshotCid=snapshot_cid, + snapshot=snapshot.dict(by_alias=True), + ) + await self._redis_conn.zadd( + name=submitted_unfinalized_snapshot_cids(project_id), + mapping={unfinalized_entry.json(sort_keys=True): epoch.epochId}, + ) + # publish snapshot submitted event to event detector queue + snapshot_submitted_message = PowerloomSnapshotSubmittedMessage( + snapshotCid=snapshot_cid, + epochId=epoch.epochId, + projectId=project_id, + timestamp=int(time.time()), + ) + try: + async with self._rmq_connection_pool.acquire() as connection: + async with self._rmq_channel_pool.acquire() as channel: + # Prepare a message to send + commit_payload_exchange = await channel.get_exchange( + name=self._event_detector_exchange, + ) + message_data = snapshot_submitted_message.json().encode() + + # Prepare a message to send + message = Message(message_data) + + await commit_payload_exchange.publish( + message=message, + routing_key=self._event_detector_routing_key_prefix + 'SnapshotSubmitted', + ) + + self._logger.debug( + 'Sent snapshot submitted message to event detector queue | ' + 'Project: {} | Epoch: {} | Snapshot CID: {}', + project_id, epoch.epochId, snapshot_cid, + ) + + except Exception as e: + self._logger.opt(exception=True).error( + 'Exception sending snapshot submitted message to event detector queue: {} | Project: {} | Epoch: {} | Snapshot CID: {}', + e, project_id, epoch.epochId, snapshot_cid, + ) + + try: + await self._redis_conn.zremrangebyscore( + name=submitted_unfinalized_snapshot_cids(project_id), + min='-inf', + max=epoch.epochId - 32, + ) + except: + pass + # send to relayer dispatch queue + await self._send_payload_commit_service_queue( + task_type=task_type, + project_id=project_id, + epoch=epoch, + snapshot_cid=snapshot_cid, + ) + + # upload to web3 storage + if storage_flag: + asyncio.ensure_future(self._upload_web3_storage(snapshot_bytes)) + async def _rabbitmq_consumer(self, loop): + """ + Consume messages from a RabbitMQ queue. + + Args: + loop (asyncio.AbstractEventLoop): The event loop to use for the consumer. + + Returns: + None + """ self._rmq_connection_pool = Pool(get_rabbitmq_robust_connection_async, max_size=5, loop=loop) self._rmq_channel_pool = Pool( partial(get_rabbitmq_channel, self._rmq_connection_pool), max_size=20, @@ -95,126 +338,138 @@ async def _rabbitmq_consumer(self, loop): async def _send_payload_commit_service_queue( self, - type_: str, + task_type: str, project_id: str, epoch: Union[ PowerloomSnapshotProcessMessage, PowerloomSnapshotSubmittedMessage, PowerloomCalculateAggregateMessage, ], - snapshot: Union[BaseModel, AggregateBase, None], - storage_flag: bool, + snapshot_cid: str, ): + """ + Sends a commit payload message to the commit payload queue via RabbitMQ. - if not snapshot: - self._logger.info( - ( - 'No snapshot to commit or Construction of snapshot' - ' failed for {} against epoch {}' - ), - type_, - epoch, - ) - else: - try: - source_chain_details = await get_source_chain_id( - redis_conn=self._redis_conn, - rpc_helper=self._anchor_rpc_helper, - state_contract_obj=self.protocol_state_contract, - ) - except Exception as e: - self._logger.opt(exception=True).error( - 'Exception getting source chain id: {}', e, - ) - raise e - finally: - await self._redis_conn.close() + Args: + task_type (str): The type of task being performed. + project_id (str): The ID of the project. + epoch (Union[PowerloomSnapshotProcessMessage, PowerloomSnapshotSubmittedMessage, PowerloomCalculateAggregateMessage]): The epoch object. + snapshot_cid (str): The CID of the snapshot. - payload = snapshot.dict(by_alias=True) + Raises: + Exception: If there is an error getting the source chain ID or sending the message to the commit payload queue. - commit_payload = PayloadCommitMessage( - message=payload, - web3Storage=storage_flag, - sourceChainId=source_chain_details, - projectId=project_id, - epochId=epoch.epochId, + Returns: + None + """ + try: + source_chain_details = await get_source_chain_id( + redis_conn=self._redis_conn, + rpc_helper=self._anchor_rpc_helper, + state_contract_obj=self._protocol_state_contract, + ) + except Exception as e: + self._logger.opt(exception=True).error( + 'Exception getting source chain id: {}', e, ) + raise e + commit_payload = PayloadCommitMessage( + sourceChainId=source_chain_details, + projectId=project_id, + epochId=epoch.epochId, + snapshotCID=snapshot_cid, + ) - # send through rabbitmq - try: - async with self._rmq_connection_pool.acquire() as connection: - async with self._rmq_channel_pool.acquire() as channel: - # Prepare a message to send - commit_payload_exchange = await channel.get_exchange( - name=self._commit_payload_exchange, - ) - message_data = commit_payload.json().encode() + # send through rabbitmq + try: + async with self._rmq_connection_pool.acquire() as connection: + async with self._rmq_channel_pool.acquire() as channel: + # Prepare a message to send + commit_payload_exchange = await channel.get_exchange( + name=self._commit_payload_exchange, + ) + message_data = commit_payload.json().encode() - # Prepare a message to send - message = Message(message_data) + # Prepare a message to send + message = Message(message_data) - await commit_payload_exchange.publish( - message=message, - routing_key=self._commit_payload_routing_key, - ) + await commit_payload_exchange.publish( + message=message, + routing_key=self._commit_payload_routing_key, + ) - self._logger.info( - 'Sent message to commit payload queue: {}', commit_payload, - ) + self._logger.info( + 'Sent message to commit payload queue: {}', commit_payload, + ) - except Exception as e: - self._logger.opt(exception=True).error( - ( - 'Exception committing snapshot to commit payload queue:' - ' {} | dump: {}' - ), - snapshot, - e, - ) - await self._redis_conn.hset( - name=epoch_id_project_to_state_mapping( - epoch.epochId, SnapshotterStates.SNAPSHOT_SUBMIT_PAYLOAD_COMMIT.value, - ), - mapping={ - project_id: SnapshotterStateUpdate( - status='failed', error=str(e), timestamp=int(time.time()), - ).json(), - }, - ) - else: - await self._redis_conn.hset( - name=epoch_id_project_to_state_mapping( - epoch.epochId, SnapshotterStates.SNAPSHOT_SUBMIT_PAYLOAD_COMMIT.value, - ), - mapping={ - project_id: SnapshotterStateUpdate( - status='success', timestamp=int(time.time()), - ).json(), - }, - ) + except Exception as e: + self._logger.opt(exception=True).error( + ( + 'Exception committing snapshot CID {} to commit payload queue:' + ' {} | dump: {}' + ), + snapshot_cid, + e, + ) + await self._redis_conn.hset( + name=epoch_id_project_to_state_mapping( + epoch.epochId, SnapshotterStates.SNAPSHOT_SUBMIT_PAYLOAD_COMMIT.value, + ), + mapping={ + project_id: SnapshotterStateUpdate( + status='failed', error=str(e), timestamp=int(time.time()), + ).json(), + }, + ) + else: + await self._redis_conn.hset( + name=epoch_id_project_to_state_mapping( + epoch.epochId, SnapshotterStates.SNAPSHOT_SUBMIT_PAYLOAD_COMMIT.value, + ), + mapping={ + project_id: SnapshotterStateUpdate( + status='success', timestamp=int(time.time()), + ).json(), + }, + ) async def _on_rabbitmq_message(self, message: IncomingMessage): + """ + Callback function that is called when a message is received from RabbitMQ. + + :param message: The incoming message from RabbitMQ. + """ pass async def _init_redis_pool(self): + """ + Initializes the Redis connection pool and sets the `_redis_conn` attribute to the created connection pool. + """ self._aioredis_pool = RedisPoolCache() await self._aioredis_pool.populate() self._redis_conn = self._aioredis_pool._aioredis_pool async def _init_rpc_helper(self): + """ + Initializes the RpcHelper objects for the worker and anchor chain, and sets up the protocol state contract. + """ self._rpc_helper = RpcHelper(rpc_settings=settings.rpc) self._anchor_rpc_helper = RpcHelper(rpc_settings=settings.anchor_chain_rpc) - self.protocol_state_contract = self._anchor_rpc_helper.get_current_node()['web3_client'].eth.contract( + self._protocol_state_contract = self._anchor_rpc_helper.get_current_node()['web3_client'].eth.contract( address=Web3.toChecksumAddress( self.protocol_state_contract_address, ), - abi=self.protocol_state_contract_abi, + abi=read_json_file( + settings.protocol_state.abi, + self._logger, + ), ) - # cleaning up ABI - self.protocol_state_contract_abi = None async def _init_httpx_client(self): + """ + Initializes the HTTPX client and transport objects for making HTTP requests. + """ self._async_transport = AsyncHTTPTransport( limits=Limits( max_connections=200, @@ -227,20 +482,75 @@ async def _init_httpx_client(self): follow_redirects=False, transport=self._async_transport, ) + self._web3_storage_upload_transport = AsyncHTTPTransport( + limits=Limits( + max_connections=200, + max_keepalive_connections=settings.web3storage.max_idle_conns, + keepalive_expiry=settings.web3storage.idle_conn_timeout, + ), + ) + self._web3_storage_upload_client = AsyncClient( + timeout=Timeout(timeout=settings.web3storage.timeout), + follow_redirects=False, + transport=self._web3_storage_upload_transport, + headers={'Authorization': 'Bearer ' + settings.web3storage.api_token}, + ) + + async def _init_protocol_meta(self): + # TODO: combine these into a single call + try: + source_block_time = await self._anchor_rpc_helper.web3_call( + [self._protocol_state_contract.functions.SOURCE_CHAIN_BLOCK_TIME()], + redis_conn=self._redis_conn, + ) + # source_block_time = self._protocol_state_contract.functions.SOURCE_CHAIN_BLOCK_TIME().call() + except Exception as e: + self._logger.exception( + 'Exception in querying protocol state for source chain block time: {}', + e, + ) + else: + source_block_time = source_block_time[0] + self._source_chain_block_time = source_block_time / 10 ** 4 + self._logger.debug('Set source chain block time to {}', self._source_chain_block_time) + try: + epoch_size = await self._anchor_rpc_helper.web3_call( + [self._protocol_state_contract.functions.EPOCH_SIZE()], + redis_conn=self._redis_conn, + ) + except Exception as e: + self._logger.exception( + 'Exception in querying protocol state for epoch size: {}', + e, + ) + else: + self._epoch_size = epoch_size[0] + self._logger.debug('Set epoch size to {}', self._epoch_size) async def init(self): + """ + Initializes the worker by initializing the Redis pool, HTTPX client, and RPC helper. + """ if not self._initialized: await self._init_redis_pool() await self._init_httpx_client() await self._init_rpc_helper() + await self._init_protocol_meta() self._initialized = True def run(self) -> None: + """ + Runs the worker by setting resource limits, registering signal handlers, starting the RabbitMQ consumer, and + running the event loop until it is stopped. + """ + self._logger = logger.bind(module=self.name) soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) resource.setrlimit( resource.RLIMIT_NOFILE, (settings.rlimit.file_descriptors, hard), ) + for signame in [SIGINT, SIGTERM, SIGQUIT]: + signal(signame, self._signal_handler) ev_loop = asyncio.get_event_loop() self._logger.debug( f'Starting asynchronous callback worker {self._unique_id}...', diff --git a/snapshotter/utils/gunicorn.py b/snapshotter/utils/gunicorn.py index 112d0ea2..06919877 100644 --- a/snapshotter/utils/gunicorn.py +++ b/snapshotter/utils/gunicorn.py @@ -10,6 +10,10 @@ class InterceptHandler(logging.Handler): + """ + A custom logging handler that intercepts log records and forwards them to Loguru logger. + """ + def emit(self, record): # Get corresponding Loguru level if it exists try: @@ -30,6 +34,14 @@ def emit(self, record): class StubbedGunicornLogger(Logger): + """ + A custom logger for Gunicorn that stubs out the error and access loggers. + + This logger sets up a NullHandler for both the error and access loggers, effectively + disabling them. It also sets the log level to the value of LOG_LEVEL, which is defined + elsewhere in the codebase. + """ + def setup(self, cfg): handler = logging.NullHandler() self.error_logger = logging.getLogger('gunicorn.error') @@ -41,14 +53,32 @@ def setup(self, cfg): class StandaloneApplication(BaseApplication): - """Our Gunicorn application.""" + """ + A standalone Gunicorn application that can be run without a Gunicorn server. + """ def __init__(self, app, options=None): + """ + Initialize the Gunicorn server with the given app and options. + + :param app: The WSGI application to run. + :type app: callable + :param options: Optional dictionary of configuration options. + :type options: dict + """ self.options = options or {} self.application = app super().__init__() def load_config(self): + """ + Load the configuration for the Gunicorn server. + + This function loads the configuration for the Gunicorn server from the options + provided by the user. It sets the configuration values in the `cfg` object. + + :return: None + """ config = { key: value for key, value in self.options.items() @@ -58,4 +88,7 @@ def load_config(self): self.cfg.set(key.lower(), value) def load(self): + """ + Load the application and return it. + """ return self.application diff --git a/snapshotter/utils/helper_functions.py b/snapshotter/utils/helper_functions.py index dcf473b6..bbc5cc70 100644 --- a/snapshotter/utils/helper_functions.py +++ b/snapshotter/utils/helper_functions.py @@ -2,7 +2,6 @@ import sys from functools import wraps -import psutil import web3.datastructures from snapshotter.settings.config import settings @@ -14,6 +13,16 @@ def cleanup_proc_hub_children(fn): + """ + A decorator that wraps a function and handles cleanup of any child processes + spawned by the function in case of an exception. + + Args: + fn (function): The function to be wrapped. + + Returns: + function: The wrapped function. + """ @wraps(fn) def wrapper(self, *args, **kwargs): try: @@ -33,49 +42,7 @@ def wrapper(self, *args, **kwargs): # for p in alive: # logger.error(f'killing process: {p.name()}') # p.kill() - logger.error('Waiting on spawned callback workers to join...') - for ( - worker_class_name, - unique_worker_entries, - ) in self._spawned_cb_processes_map.items(): - for ( - worker_unique_id, - worker_unique_process_details, - ) in unique_worker_entries.items(): - if worker_unique_process_details is not None and worker_unique_process_details.pid: - logger.error( - ( - 'Waiting on spawned callback worker {} | Unique' - ' ID {} | PID {} to join...' - ), - worker_class_name, - worker_unique_id, - worker_unique_process_details.pid, - ) - psutil.Process(pid=worker_unique_process_details.pid).wait() - - logger.error( - 'Waiting on spawned core workers to join... {}', - self._spawned_processes_map, - ) - for ( - worker_class_name, - worker_pid, - ) in self._spawned_processes_map.items(): - logger.error( - 'spawned Process Pid to wait on {}', - worker_pid, - ) - if worker_pid is not None: - logger.error( - ( - 'Waiting on spawned core worker {} | PID {} to' - ' join...' - ), - worker_class_name, - worker_pid, - ) - psutil.Process(worker_pid).wait() + self._kill_all_children() logger.error('Finished waiting for all children...now can exit.') finally: logger.error('Finished waiting for all children...now can exit.') @@ -86,6 +53,15 @@ def wrapper(self, *args, **kwargs): def acquire_threading_semaphore(fn): + """ + A decorator function that acquires a threading semaphore before executing the decorated function and releases it after execution. + + Args: + fn (function): The function to be decorated. + + Returns: + function: The decorated function. + """ @wraps(fn) def semaphore_wrapper(*args, **kwargs): semaphore = kwargs['semaphore'] @@ -105,6 +81,15 @@ def semaphore_wrapper(*args, **kwargs): def preloading_entry_exit_logger(fn): + """ + Decorator function to log entry and exit of preloading worker functions. + + Args: + fn (Callable): The function to be decorated. + + Returns: + Callable: The decorated function. + """ @wraps(fn) async def wrapper(self, *args, **kwargs): epoch: EpochBase = kwargs['epoch'] @@ -126,6 +111,15 @@ async def wrapper(self, *args, **kwargs): async def as_completed_async(futures): + """ + A coroutine that iterates over given futures and yields their results as they complete. + + Args: + futures (List[asyncio.Future]): A list of asyncio.Future objects. + + Yields: + The result of each completed future as it completes. + """ loop = asyncio.get_event_loop() wrappers = [] for fut in futures: @@ -144,6 +138,15 @@ async def as_completed_async(futures): def attribute_dict_to_dict(dictToParse: web3.datastructures.AttributeDict): + """ + Converts an AttributeDict object to a regular dictionary object. + + Args: + dictToParse (web3.datastructures.AttributeDict): The AttributeDict object to be converted. + + Returns: + dict: The converted dictionary object. + """ # convert any 'AttributeDict' type found to 'dict' parsedDict = dict(dictToParse) for key, val in parsedDict.items(): @@ -155,6 +158,17 @@ def attribute_dict_to_dict(dictToParse: web3.datastructures.AttributeDict): def _parse_value(val): + """ + Parses the given value and returns a string representation of it. + If the value is a nested dictionary, it is converted to a regular dictionary. + If the value is of type 'HexBytes', it is converted to a string. + + Args: + val: The value to be parsed. + + Returns: + A string representation of the given value. + """ # check for nested dict structures to iterate through if 'dict' in str(type(val)).lower(): return attribute_dict_to_dict(val) diff --git a/snapshotter/utils/ipfs/__init__.py b/snapshotter/utils/ipfs/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/snapshotter/utils/ipfs/dag_utils.py b/snapshotter/utils/ipfs/dag_utils.py deleted file mode 100644 index 3563daf4..00000000 --- a/snapshotter/utils/ipfs/dag_utils.py +++ /dev/null @@ -1,52 +0,0 @@ -import io -import json -from typing import Optional - -from httpx import AsyncClient -from ipfs_client.main import AsyncIPFSClient - -from snapshotter.settings.config import settings -from snapshotter.utils.default_logger import logger -from snapshotter.utils.file_utils import read_text_file -from snapshotter.utils.file_utils import write_bytes_to_file - - -async def send_commit_callback(httpx_session: AsyncClient, url, payload): - if type(url) is bytes: - url = url.decode('utf-8') - resp = await httpx_session.post(url=url, json=payload) - json_response = resp.json() - return json_response - - -async def get_dag_block(dag_cid: str, project_id: str, ipfs_read_client: AsyncIPFSClient) -> Optional[dict]: - dag_ipfs_fetch = False - dag = read_text_file(settings.ipfs.local_cache_path + '/' + project_id + '/' + dag_cid + '.json') - try: - dag_json = json.loads(dag) - except: - dag_ipfs_fetch = True - else: - return dag_json - if dag_ipfs_fetch: - dag = await ipfs_read_client.dag.get(dag_cid) - # TODO: should be aiofiles - write_bytes_to_file( - settings.ipfs.local_cache_path + '/' + project_id, - '/' + dag_cid + '.json', str(dag).encode('utf-8'), - ) - return dag.as_json() - - -async def put_dag_block(dag_json: str, project_id: str, ipfs_write_client: AsyncIPFSClient): - dag_json = dag_json.encode('utf-8') - out = await ipfs_write_client.dag.put(io.BytesIO(dag_json), pin=True) - dag_cid = out['Cid']['/'] - try: - write_bytes_to_file(f'{settings.ipfs.local_cache_path}/project_id', f'/{dag_cid}.json', dag_json) - except Exception as exc: - logger.opt(exception=True).error( - 'Failed to write dag-block {} for project {} to local cache due to exception {}', - dag_json, project_id, exc, - ) - return dag_cid diff --git a/snapshotter/utils/ipfs/retrieval_utils.py b/snapshotter/utils/ipfs/retrieval_utils.py deleted file mode 100644 index 6b16b2b0..00000000 --- a/snapshotter/utils/ipfs/retrieval_utils.py +++ /dev/null @@ -1,152 +0,0 @@ -import json -from typing import Union - -from httpx import _exceptions as httpx_exceptions -from ipfs_client.main import AsyncIPFSClient -from tenacity import retry -from tenacity import stop_after_attempt -from tenacity import wait_random_exponential - -from snapshotter.settings.config import settings -from snapshotter.utils.default_logger import logger -from snapshotter.utils.file_utils import read_text_file -from snapshotter.utils.ipfs.dag_utils import get_dag_block -from snapshotter.utils.models.data_models import BlockRetrievalFlags -from snapshotter.utils.models.data_models import RetrievedDAGBlock -from snapshotter.utils.models.data_models import RetrievedDAGBlockPayload - - -@retry( - reraise=True, - wait=wait_random_exponential(multiplier=1, max=30), - stop=stop_after_attempt(3), -) -async def retrieve_block_data( - project_id: str, # only required for ease of local filesystem caching - block_dag_cid: str, - ipfs_read_client: AsyncIPFSClient, - data_flag: BlockRetrievalFlags = BlockRetrievalFlags.only_dag_block, -) -> Union[RetrievedDAGBlock, RetrievedDAGBlockPayload]: - """ - Get dag block from ipfs - Args: - block_dag_cid:str - The cid of the dag block that needs to be retrieved - data_flag:int - Refer enum data model `snapshotter.utils.data_models.BlockRetrievalFlags` - """ - block = await get_dag_block(block_dag_cid, project_id, ipfs_read_client=ipfs_read_client) - # handle case of no dag_block or null payload in dag_block - if not block: - if data_flag == BlockRetrievalFlags.only_dag_block or data_flag == BlockRetrievalFlags.dag_block_and_payload_data: - return RetrievedDAGBlock() - else: - return RetrievedDAGBlockPayload() - logger.trace('Retrieved dag block with CID %s: %s', block_dag_cid, block) - # the data field may not be present in the dag block because of the DAG finalizer omitting null fields in DAG block model while converting to JSON - if 'data' not in block.keys() or block['data'] is None: - if data_flag == BlockRetrievalFlags.dag_block_and_payload_data: - block['data'] = RetrievedDAGBlockPayload() - return RetrievedDAGBlock.parse_obj(block) - elif data_flag == BlockRetrievalFlags.only_dag_block: - return RetrievedDAGBlock.parse_obj(block) - else: - return RetrievedDAGBlockPayload() - if data_flag == BlockRetrievalFlags.only_dag_block: - return RetrievedDAGBlock.parse_obj(block) - else: - payload = dict() - payload_data = await retrieve_payload_data( - payload_cid=block['data']['cid']['/'], - project_id=project_id, - ipfs_read_client=ipfs_read_client, - ) - if payload_data: - try: - payload_data = json.loads(payload_data) - except json.JSONDecodeError: - logger.error( - 'Failed to JSON decode payload data for CID %s, project %s: %s', - block['data']['cid']['/'], project_id, payload_data, - ) - payload_data = None - payload['payload'] = payload_data - payload['cid'] = block['data']['cid']['/'] - - if data_flag == BlockRetrievalFlags.dag_block_and_payload_data: - block['data'] = RetrievedDAGBlockPayload.parse_obj(payload) - return RetrievedDAGBlock.parse_obj(block) - - if data_flag == BlockRetrievalFlags.only_payload_data: - return RetrievedDAGBlockPayload.parse_obj(payload) - - -async def retrieve_payload_data( - payload_cid, - ipfs_read_client: AsyncIPFSClient, - project_id, -): - """ - - Given a payload_cid, get its data from ipfs, at the same time increase its hit - """ - #payload_key = redis_keys.get_hits_payload_data_key() - # if writer_redis_conn: - # r = await writer_redis_conn.zincrby(payload_key, 1.0, payload_cid) - #retrieval_utils_logger.debug("Payload Data hit for: ") - # retrieval_utils_logger.debug(payload_cid) - payload_data = None - if project_id is not None: - payload_data = read_text_file(settings.ipfs.local_cache_path + '/' + project_id + '/' + payload_cid + '.json') - if payload_data is None: - logger.trace('Failed to read payload with CID %s for project %s from local cache ', payload_cid, project_id) - # Get the payload Data from ipfs - try: - _payload_data = await ipfs_read_client.cat(payload_cid) - except (httpx_exceptions.TransportError, httpx_exceptions.StreamError) as e: - logger.error('Failed to read payload with CID %s for project %s from IPFS : %s', payload_cid, project_id, e) - return None - else: - # retrieval_utils_logger.info("Successfully read payload with CID %s for project %s from IPFS: %s ", - # payload_cid,project_id, _payload_data) - if not isinstance(_payload_data, str): - return _payload_data.decode('utf-8') - else: - return _payload_data - else: - return payload_data - - -async def get_dag_chain(project_id: str, from_dag_cid: str, to_dag_cid: str, ipfs_read_client: AsyncIPFSClient): - chain = list() - cur_block: RetrievedDAGBlock = await retrieve_block_data( - project_id, - from_dag_cid, - ipfs_read_client, - BlockRetrievalFlags.dag_block_and_payload_data, - ) - cur_block_cid = from_dag_cid - chain.append(cur_block) - while True: - prev_dag_cid = cur_block.prevCid['/'] if cur_block.prevCid else None - if not prev_dag_cid: - if 'prevRoot' in cur_block and cur_block.prevRoot is not None: - prev_dag_cid = cur_block.prevRoot - if prev_dag_cid: - logger.trace( - 'Attempting to fetch prev DAG block at CID: {} | Current DAG block CID: {} height: {} ', - prev_dag_cid, cur_block_cid, cur_block.height, - ) - prev_dag_block: RetrievedDAGBlock = await retrieve_block_data( - project_id, - cur_block.prevCid['/'], - ipfs_read_client, - BlockRetrievalFlags.dag_block_and_payload_data, - ) - logger.trace( - 'Fetched prev DAG block at CID: {} height: {} | Current DAG block CID: {} height: {} | Payload: {}', - prev_dag_cid, prev_dag_block.height, cur_block_cid, cur_block.height, prev_dag_block.data.payload, - ) - chain.append(prev_dag_block) - if prev_dag_cid == to_dag_cid: - break - else: - break - return chain diff --git a/snapshotter/utils/models/data_models.py b/snapshotter/utils/models/data_models.py index dc7c7711..8d8dda51 100644 --- a/snapshotter/utils/models/data_models.py +++ b/snapshotter/utils/models/data_models.py @@ -11,11 +11,6 @@ from snapshotter.utils.callback_helpers import GenericPreloader -class SnapshotWorkerDetails(BaseModel): - unique_name: str - pid: Union[int, None] = None - - class ProcessorWorkerDetails(BaseModel): unique_name: str pid: Union[None, int] = None @@ -34,6 +29,8 @@ class SnapshotterReportState(Enum): SUBMITTED_INCORRECT_SNAPSHOT = 'SUBMITTED_INCORRECT_SNAPSHOT' SHUTDOWN_INITIATED = 'SHUTDOWN_INITIATED' CRASHED_CHILD_WORKER = 'CRASHED_CHILD_WORKER' + CRASHED_REPORTER_THREAD = 'CRASHED_REPORTER_THREAD' + UNHEALTHY_EPOCH_PROCESSING = 'UNHEALTHY_EPOCH_PROCESSING' class SnapshotterStates(Enum): @@ -52,9 +49,10 @@ class SnapshotterStateUpdate(BaseModel): class SnapshotterEpochProcessingReportItem(BaseModel): - epochId: int + epochId: int = 0 + epochEnd: int = 0 # map transition like EPOCH_RELEASED to its status - transitionStatus: Dict[str, Union[SnapshotterStateUpdate, None, Dict[str, SnapshotterStateUpdate]]] + transitionStatus: Dict[str, Union[SnapshotterStateUpdate, None, Dict[str, SnapshotterStateUpdate]]] = dict() class SnapshotterIssue(BaseModel): @@ -186,3 +184,13 @@ class SnapshotterIncorrectSnapshotSubmission(BaseModel): class SnapshotterProjectStatus(BaseModel): missedSubmissions: List[SnapshotterMissedSnapshotSubmission] incorrectSubmissions: List[SnapshotterIncorrectSnapshotSubmission] + + +class UnfinalizedSnapshot(BaseModel): + snapshotCid: str + snapshot: Dict[str, Any] + + +class TaskStatusRequest(BaseModel): + task_type: str + wallet_address: str diff --git a/snapshotter/utils/models/message_models.py b/snapshotter/utils/models/message_models.py index 26aa5000..9c7c83e9 100644 --- a/snapshotter/utils/models/message_models.py +++ b/snapshotter/utils/models/message_models.py @@ -47,6 +47,7 @@ class PowerloomSnapshotProcessMessage(EpochBase): data_source: Optional[str] = None primary_data_source: Optional[str] = None genesis: Optional[bool] = False + bulk_mode: Optional[bool] = False class PowerloomSnapshotFinalizedMessage(BaseModel): @@ -104,11 +105,10 @@ class AggregateBase(BaseModel): class PayloadCommitMessage(BaseModel): - message: Dict[Any, Any] - web3Storage: bool sourceChainId: int projectId: str epochId: int + snapshotCID: str class PayloadCommitFinalizedMessage(BaseModel): diff --git a/snapshotter/utils/models/settings_model.py b/snapshotter/utils/models/settings_model.py index a9e9d9d0..7c3cc46c 100644 --- a/snapshotter/utils/models/settings_model.py +++ b/snapshotter/utils/models/settings_model.py @@ -3,6 +3,7 @@ from typing import Optional from typing import Union +from ipfs_client.settings.data_models import IPFSConfig from pydantic import BaseModel from pydantic import Field @@ -128,20 +129,16 @@ class ExternalAPIAuth(BaseModel): apiSecret: str = '' -class IPFSconfig(BaseModel): - url: str - url_auth: Optional[ExternalAPIAuth] = None - reader_url: str - reader_url_auth: Optional[ExternalAPIAuth] = None - write_rate_limit: IPFSWriterRateLimit - timeout: int - local_cache_path: str - connection_limits: ConnectionLimits - - class Web3Storage(BaseModel): upload_snapshots: bool upload_aggregates: bool + url: str + upload_url_suffix: str + api_token: str + timeout: int + max_idle_conns: int + idle_conn_timeout: int + # rate_limit: Optional[IPFSWriterRateLimit] = None class Settings(BaseModel): @@ -161,7 +158,7 @@ class Settings(BaseModel): aggregator_config_path: str protocol_state: EventContract callback_worker_config: CallbackWorkerConfig - ipfs: IPFSconfig + ipfs: IPFSConfig web3storage: Web3Storage anchor_chain_rpc: RPCConfigBase @@ -177,6 +174,7 @@ class ProjectConfig(BaseModel): projects: Optional[List[str]] = None processor: ProcessorConfig preload_tasks: List[str] + bulk_mode: Optional[bool] = False class ProjectsConfig(BaseModel): diff --git a/snapshotter/utils/preloaders/tx_receipts/preloader.py b/snapshotter/utils/preloaders/tx_receipts/preloader.py index 6b88dc96..af4ebe90 100644 --- a/snapshotter/utils/preloaders/tx_receipts/preloader.py +++ b/snapshotter/utils/preloaders/tx_receipts/preloader.py @@ -2,7 +2,7 @@ from pydantic import ValidationError from redis import asyncio as aioredis - +from snapshotter.settings.config import settings from snapshotter.utils.default_logger import logger from snapshotter.utils.generic_delegator_preloader import DelegatorPreloaderAsyncWorker from snapshotter.utils.helper_functions import preloading_entry_exit_logger @@ -18,7 +18,6 @@ class TxPreloadWorker(DelegatorPreloaderAsyncWorker): def __init__(self) -> None: super(TxPreloadWorker, self).__init__() self._task_type = 'txreceipt' - self._logger = logger.bind(module='TxPreloadWorker') async def _handle_filter_worker_response_message(self, message: bytes): try: @@ -26,7 +25,7 @@ async def _handle_filter_worker_response_message(self, message: bytes): PowerloomDelegateTxReceiptWorkerResponseMessage.parse_raw(message) ) except ValidationError: - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( 'Bad message structure of txreceiptResponse', ) return @@ -53,22 +52,23 @@ async def _handle_filter_worker_response_message(self, message: bytes): return async with self._rw_lock.writer_lock: self._awaited_delegated_response_ids.remove(msg_obj.requestId) - self._collected_response_objects[msg_obj.epochId].update( + self._collected_response_objects.update( {msg_obj.txHash: msg_obj.txReceipt}, ) async def _on_delegated_responses_complete(self): - if self._collected_response_objects[self._epoch.epochId]: + if self._collected_response_objects: await self._redis_conn.hset( name=epoch_txs_htable(epoch_id=self._epoch.epochId), mapping={ k: json.dumps(v) - for k, v in self._collected_response_objects[self._epoch.epochId].items() + for k, v in self._collected_response_objects.items() }, ) @preloading_entry_exit_logger async def compute(self, epoch: EpochBase, redis_conn: aioredis.Redis, rpc_helper: RpcHelper): + self._logger = logger.bind(module='TxPreloadWorker') self._epoch = epoch self._redis_conn = redis_conn @@ -96,4 +96,4 @@ async def compute(self, epoch: EpochBase, redis_conn: aioredis.Redis, rpc_helper msg_obj.requestId: msg_obj for msg_obj in tx_receipt_query_messages } - return await super(TxPreloadWorker, self).compute(epoch, redis_conn, rpc_helper) + return await super(TxPreloadWorker, self).compute_with_retry(epoch, redis_conn, rpc_helper) diff --git a/snapshotter/utils/rabbitmq_helpers.py b/snapshotter/utils/rabbitmq_helpers.py index 278628ae..3e65bf11 100644 --- a/snapshotter/utils/rabbitmq_helpers.py +++ b/snapshotter/utils/rabbitmq_helpers.py @@ -21,6 +21,15 @@ def log_retry_callback(retry_state: RetryCallState) -> bool: + """ + Logs the attempt number of the retry state and returns True if the exception is an AMQPError and the attempt number is less than 5. + + Args: + retry_state (RetryCallState): The retry state object. + + Returns: + bool: True if the exception is an AMQPError and the attempt number is less than 5, False otherwise. + """ print( 'In rabbitmq reconnection helper decorator. attempt number: ', retry_state.attempt_number, @@ -32,6 +41,15 @@ def log_retry_callback(retry_state: RetryCallState) -> bool: def resume_on_rabbitmq_fail(fn) -> Any: + """ + Decorator function that retries the wrapped function in case of RabbitMQ failure. + + Args: + fn: The function to be wrapped. + + Returns: + The wrapped function. + """ @wraps(fn) def wrapper(*args, **kwargs): ret = None diff --git a/snapshotter/utils/redis/rate_limiter.py b/snapshotter/utils/redis/rate_limiter.py index 7401ad08..695a84de 100644 --- a/snapshotter/utils/redis/rate_limiter.py +++ b/snapshotter/utils/redis/rate_limiter.py @@ -52,6 +52,15 @@ # needs to be run only once async def load_rate_limiter_scripts(redis_conn: aioredis.Redis): + """ + Load rate limiter scripts into Redis and return their SHA hashes. + + Args: + redis_conn (aioredis.Redis): Redis connection object. + + Returns: + dict: A dictionary containing the SHA hashes of the loaded scripts. + """ script_clear_keys_sha = await redis_conn.script_load(SCRIPT_CLEAR_KEYS) script_incr_expire = await redis_conn.script_load(SCRIPT_INCR_EXPIRE) return { @@ -68,7 +77,15 @@ async def generic_rate_limiter( limit_incr_by=1, ): """ - return: tuple of (can_request, retry_after in case of false can_request, violated rate limit string if applicable) + A generic rate limiter that uses Redis as a storage backend. + + :param parsed_limits: A list of RateLimitItem objects that define the rate limits. + :param key_bits: A list of key bits to be used as part of the Redis key. + :param redis_conn: An instance of aioredis.Redis that is used to connect to Redis. + :param rate_limit_lua_script_shas: A dictionary containing the SHA hashes of the Lua scripts used by the rate limiter. + :param limit_incr_by: The amount by which to increment the rate limit counter. + :return: A tuple containing a boolean indicating whether the rate limit check passed, the retry-after time in seconds, + and a string representation of the rate limit that was checked. """ if not rate_limit_lua_script_shas: rate_limit_lua_script_shas = await load_rate_limiter_scripts(redis_conn) @@ -110,7 +127,23 @@ async def check_rpc_rate_limit( limit_incr_by=1, ): """ - rate limiter for rpc calls + Check if the RPC rate limit has been exceeded for the given app_id and request_payload. + + Args: + parsed_limits (list): List of parsed rate limit configurations. + app_id (str): The ID of the app making the request. + redis_conn (aioredis.Redis): The Redis connection object. + request_payload (dict): The payload of the request. + error_msg (str): The error message to include in the RPCException if the rate limit is exceeded. + logger (Logger): The logger object. + rate_limit_lua_script_shas (dict, optional): A dictionary of Lua script SHA1 hashes for rate limiting. + limit_incr_by (int, optional): The amount to increment the rate limit by. + + Returns: + bool: True if the rate limit has not been exceeded, False otherwise. + + Raises: + RPCException: If the rate limit has been exceeded. """ key_bits = [ app_id, diff --git a/snapshotter/utils/redis/redis_conn.py b/snapshotter/utils/redis/redis_conn.py index aab904b1..04fb9de0 100644 --- a/snapshotter/utils/redis/redis_conn.py +++ b/snapshotter/utils/redis/redis_conn.py @@ -1,5 +1,7 @@ import contextlib +from datetime import datetime from functools import wraps +from pydoc import cli import redis import redis.exceptions as redis_exc @@ -7,8 +9,13 @@ from redis import asyncio as aioredis from redis.asyncio.connection import ConnectionPool +from snapshotter.settings.config import settings from snapshotter.settings.config import settings as settings_conf +from snapshotter.utils.callback_helpers import send_failure_notifications_async +from snapshotter.utils.callback_helpers import send_failure_notifications_sync from snapshotter.utils.default_logger import logger +from snapshotter.utils.models.data_models import SnapshotterIssue +from snapshotter.utils.models.data_models import SnapshotterReportState # setup logging logger = logger.bind(module='Powerloom|RedisConn') @@ -23,6 +30,12 @@ def construct_redis_url(): + """ + Constructs a Redis URL based on the REDIS_CONN_CONF dictionary. + + Returns: + str: Redis URL constructed from REDIS_CONN_CONF dictionary. + """ if REDIS_CONN_CONF['password']: return ( f'redis://{REDIS_CONN_CONF["password"]}@{REDIS_CONN_CONF["host"]}:{REDIS_CONN_CONF["port"]}' @@ -35,6 +48,15 @@ def construct_redis_url(): async def get_aioredis_pool(pool_size=200): + """ + Returns an aioredis Redis connection pool. + + Args: + pool_size (int): Maximum number of connections to the Redis server. + + Returns: + aioredis.Redis: Redis connection pool. + """ pool = ConnectionPool.from_url( url=construct_redis_url(), retry_on_error=[redis.exceptions.ReadOnlyError], @@ -49,7 +71,16 @@ def create_redis_conn( connection_pool: redis.BlockingConnectionPool, ) -> redis.Redis: """ - Contextmanager that will create and teardown a session. + Context manager for creating a Redis connection using a connection pool. + + Args: + connection_pool (redis.BlockingConnectionPool): The connection pool to use. + + Yields: + redis.Redis: A Redis connection object. + + Raises: + redis_exc.RedisError: If there is an error connecting to Redis. """ try: redis_conn = redis.Redis(connection_pool=connection_pool) @@ -67,6 +98,11 @@ def create_redis_conn( reraise=True, ) def provide_redis_conn(fn): + """ + Decorator function that provides a Redis connection object to the decorated function. + If the decorated function already has a Redis connection object in its arguments or keyword arguments, + it will be used. Otherwise, a new connection object will be created and passed to the function. + """ @wraps(fn) def wrapper(*args, **kwargs): arg_conn = 'redis_conn' @@ -90,7 +126,58 @@ def wrapper(*args, **kwargs): return wrapper +def provide_redis_conn_repsawning_thread(fn): + @wraps(fn) + def wrapper(self, *args, **kwargs): + arg_conn = 'redis_conn' + func_params = fn.__code__.co_varnames + conn_in_args = arg_conn in func_params and func_params.index( + arg_conn, + ) < len(args) + conn_in_kwargs = arg_conn in kwargs + if conn_in_args or conn_in_kwargs: + return fn(*args, **kwargs) + else: + connection_pool = redis.BlockingConnectionPool(**REDIS_CONN_CONF) + while True: + try: + with create_redis_conn(connection_pool) as redis_obj: + kwargs[arg_conn] = redis_obj + logger.debug( + 'Returning after populating redis connection object', + ) + _ = fn(self, *args, **kwargs) + except Exception as e: + logger.opt(exception=True).error(e) + send_failure_notifications_sync( + client=self._httpx_client, + message=SnapshotterIssue( + instanceID=settings.instance_id, + issueType=SnapshotterReportState.CRASHED_REPORTER_THREAD.value, + projectID='', + epochId='', + timeOfReporting=datetime.now().isoformat(), + extra=str(e), + ), + ) + continue + # if no exception was caught and the thread returns normally, it is the sign of a shutdown event being set + else: + return _ + + return wrapper + + def provide_async_redis_conn(fn): + """ + Decorator function that provides an async Redis connection to the decorated function. + + Args: + fn: The function to be decorated. + + Returns: + The decorated function. + """ @wraps(fn) async def async_redis_conn_wrapper(*args, **kwargs): redis_conn_raw = await kwargs['request'].app.redis_pool.acquire() @@ -107,9 +194,17 @@ async def async_redis_conn_wrapper(*args, **kwargs): return async_redis_conn_wrapper -# TODO: check wherever this is used and instead -# attempt to supply the aioredis.Redis object from an instantiated connection pool def provide_async_redis_conn_insta(fn): + """ + A decorator function that provides an async Redis connection instance to the decorated function. + + Args: + fn: The function to be decorated. + + Returns: + The decorated function with an async Redis connection instance. + + """ @wraps(fn) async def wrapped(*args, **kwargs): arg_conn = 'redis_conn' @@ -155,10 +250,19 @@ async def wrapped(*args, **kwargs): class RedisPoolCache: def __init__(self, pool_size=2000): + """ + Initializes a Redis connection object with the specified connection pool size. + + Args: + pool_size (int): The maximum number of connections to keep in the pool. + """ self._aioredis_pool = None self._pool_size = pool_size async def populate(self): + """ + Populates the Redis connection pool with the specified number of connections. + """ if not self._aioredis_pool: self._aioredis_pool: aioredis.Redis = await get_aioredis_pool( self._pool_size, diff --git a/snapshotter/utils/redis/redis_keys.py b/snapshotter/utils/redis/redis_keys.py index 27f04ec7..329af3a0 100644 --- a/snapshotter/utils/redis/redis_keys.py +++ b/snapshotter/utils/redis/redis_keys.py @@ -28,6 +28,10 @@ 'rpc:blocknumber:' + settings.namespace + ':calls' ) +rpc_get_block_number_calls = ( + 'rpc:blockNumber:' + settings.namespace + ':calls' +) + rpc_get_transaction_receipt_calls = ( 'rpc:transactionReceipt:' + settings.namespace + ':calls' ) @@ -100,5 +104,21 @@ def epoch_id_project_to_state_mapping(epoch_id, state_id): return f'epochID:{epoch_id}:stateID:{state_id}:processingStatus' +def last_snapshot_processing_complete_timestamp_key(): + return f'lastSnapshotProcessingCompleteTimestamp:{settings.namespace}' + + +def last_epoch_detected_timestamp_key(): + return f'lastEpochDetectedTimestamp:{settings.namespace}' + + def submitted_base_snapshots_key(epoch_id, project_id): return f'submittedBaseSnapshots:{epoch_id}:{project_id}' + + +def submitted_unfinalized_snapshot_cids(project_id): + return f'projectID:{project_id}:unfinalizedSnapshots' + + +def process_hub_core_start_timestamp(): + return f'processHubCoreStartTimestamp:{settings.namespace}' diff --git a/snapshotter/utils/rpc.py b/snapshotter/utils/rpc.py index a60c29c7..801548f6 100644 --- a/snapshotter/utils/rpc.py +++ b/snapshotter/utils/rpc.py @@ -6,9 +6,6 @@ import eth_abi import tenacity -from aiohttp import ClientSession -from aiohttp import ClientTimeout -from aiohttp import TCPConnector from async_limits import parse_many as limit_parse_many from eth_abi.codec import ABICodec from eth_utils import keccak @@ -36,6 +33,7 @@ from snapshotter.utils.redis.rate_limiter import check_rpc_rate_limit from snapshotter.utils.redis.rate_limiter import load_rate_limiter_scripts from snapshotter.utils.redis.redis_keys import rpc_blocknumber_calls +from snapshotter.utils.redis.redis_keys import rpc_get_block_number_calls from snapshotter.utils.redis.redis_keys import rpc_get_event_logs_calls from snapshotter.utils.redis.redis_keys import rpc_get_transaction_receipt_calls from snapshotter.utils.redis.redis_keys import rpc_json_rpc_calls @@ -44,7 +42,13 @@ def get_contract_abi_dict(abi): """ - Create dictionary of ABI {function_name -> {signature, abi, input, output}} + Returns a dictionary of function signatures, inputs, outputs and full ABI for a given contract ABI. + + Args: + abi (list): List of dictionaries representing the contract ABI. + + Returns: + dict: Dictionary containing function signatures, inputs, outputs and full ABI. """ abi_dict = {} for abi_obj in [obj for obj in abi if obj['type'] == 'function']: @@ -63,7 +67,15 @@ def get_contract_abi_dict(abi): def get_encoded_function_signature(abi_dict, function_name, params: Union[List, None]): """ - get function encoded signature with params + Returns the encoded function signature for a given function name and parameters. + + Args: + abi_dict (dict): The ABI dictionary for the contract. + function_name (str): The name of the function. + params (list or None): The list of parameters for the function. + + Returns: + str: The encoded function signature. """ function_signature = abi_dict.get(function_name)['signature'] encoded_signature = '0x' + keccak(text=function_signature).hex()[:8] @@ -76,6 +88,11 @@ def get_encoded_function_signature(abi_dict, function_name, params: Union[List, def get_event_sig_and_abi(event_signatures, event_abis): + """ + Given a dictionary of event signatures and a dictionary of event ABIs, + returns a tuple containing a list of event signatures and a dictionary of + event ABIs keyed by their corresponding signature hash. + """ event_sig = [ '0x' + keccak(text=sig).hex() for name, sig in event_signatures.items() ] @@ -91,10 +108,15 @@ def get_event_sig_and_abi(event_signatures, event_abis): class RpcHelper(object): - _aiohttp_tcp_connector: TCPConnector - _web3_aiohttp_client: ClientSession def __init__(self, rpc_settings: RPCConfigBase = settings.rpc, archive_mode=False): + """ + Initializes an instance of the RpcHelper class. + + Args: + rpc_settings (RPCConfigBase, optional): The RPC configuration settings to use. Defaults to settings.rpc. + archive_mode (bool, optional): Whether to operate in archive mode. Defaults to False. + """ self._archive_mode = archive_mode self._rpc_settings = rpc_settings self._nodes = list() @@ -107,10 +129,18 @@ def __init__(self, rpc_settings: RPCConfigBase = settings.rpc, archive_mode=Fals self._client = None self._async_transport = None self._rate_limit_lua_script_shas = None - self._aiohttp_tcp_connector = None - self._web3_aiohttp_client = None async def _load_rate_limit_shas(self, redis_conn): + """ + Loads the rate limit Lua script SHA values from Redis if they haven't already been loaded. + + Args: + redis_conn: Redis connection object. + + Returns: + None + """ + if self._rate_limit_lua_script_shas is not None: return self._rate_limit_lua_script_shas = await load_rate_limiter_scripts( @@ -118,6 +148,13 @@ async def _load_rate_limit_shas(self, redis_conn): ) async def _init_http_clients(self): + """ + Initializes the HTTP clients for making RPC requests. + + If the client has already been initialized, this function returns immediately. + + :return: None + """ if self._client is not None: return self._async_transport = AsyncHTTPTransport( @@ -132,18 +169,12 @@ async def _init_http_clients(self): follow_redirects=False, transport=self._async_transport, ) - if self._aiohttp_tcp_connector is not None: - return - self._aiohttp_tcp_connector = TCPConnector( - keepalive_timeout=self._rpc_settings.connection_limits.keepalive_expiry, - limit=1000, - ) - self._web3_aiohttp_client = ClientSession( - connector=self._aiohttp_tcp_connector, - timeout=ClientTimeout(total=self._rpc_settings.request_time_out), - ) async def _load_async_web3_providers(self): + """ + Loads async web3 providers for each node in the list of nodes. + If a node already has a web3 client, it is skipped. + """ for node in self._nodes: if node['web3_client_async'] is not None: continue @@ -152,9 +183,19 @@ async def _load_async_web3_providers(self): modules={'eth': (AsyncEth,)}, middlewares=[], ) - await node['web3_client_async'].provider.cache_async_session(self._web3_aiohttp_client) async def init(self, redis_conn): + """ + Initializes the RPC client by loading web3 providers and rate limits, + loading rate limit SHAs, initializing HTTP clients, and loading async + web3 providers. + + Args: + redis_conn: Redis connection object. + + Returns: + None + """ if not self._sync_nodes_initialized: self._load_web3_providers_and_rate_limits() self._sync_nodes_initialized = True @@ -164,6 +205,10 @@ async def init(self, redis_conn): self._initialized = True def _load_web3_providers_and_rate_limits(self): + """ + Load web3 providers and rate limits based on the archive mode. + If archive mode is True, load archive nodes, otherwise load full nodes. + """ if self._archive_mode: nodes = self._rpc_settings.archive_nodes else: @@ -180,8 +225,7 @@ def _load_web3_providers_and_rate_limits(self): }, ) except Exception as exc: - - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( ( 'Error while initialising one of the web3 providers,' f' err_msg: {exc}' @@ -192,6 +236,15 @@ def _load_web3_providers_and_rate_limits(self): self._node_count = len(self._nodes) def get_current_node(self): + """ + Returns the current node to use for RPC calls. + + If the sync nodes have not been initialized, it initializes them by loading web3 providers and rate limits. + If there are no full nodes available, it raises an exception. + + Returns: + The current node to use for RPC calls. + """ if not self._sync_nodes_initialized: self._load_web3_providers_and_rate_limits() self._sync_nodes_initialized = True @@ -201,6 +254,16 @@ def get_current_node(self): return self._nodes[self._current_node_index] def _on_node_exception(self, retry_state: tenacity.RetryCallState): + """ + Callback function to handle exceptions raised during RPC calls to nodes. + It updates the node index to retry the RPC call on the next node. + + Args: + retry_state (tenacity.RetryCallState): The retry state object containing information about the retry. + + Returns: + None + """ exc_idx = retry_state.kwargs['node_idx'] next_node_idx = (retry_state.kwargs.get('node_idx', 0) + 1) % self._node_count retry_state.kwargs['node_idx'] = next_node_idx @@ -211,9 +274,88 @@ def _on_node_exception(self, retry_state: tenacity.RetryCallState): next_node_idx, retry_state.outcome.exception(), ) + async def get_current_block_number(self, redis_conn): + """ + Returns the current block number of the Ethereum blockchain. + + Args: + redis_conn: Redis connection object. + + Returns: + The current block number of the Ethereum blockchain. + + Raises: + RPCException: If an error occurs while making the RPC call. + """ + @retry( + reraise=True, + retry=retry_if_exception_type(RPCException), + wait=wait_random_exponential(multiplier=1, max=10), + stop=stop_after_attempt(settings.rpc.retry), + before_sleep=self._on_node_exception, + ) + async def f(node_idx): + if not self._initialized: + await self.init(redis_conn=redis_conn) + node = self._nodes[node_idx] + rpc_url = node.get('rpc_url') + web3_provider = node['web3_client_async'] + + await check_rpc_rate_limit( + parsed_limits=node.get('rate_limit', []), + app_id=rpc_url.split('/')[-1], + redis_conn=redis_conn, + request_payload='get_current_block_number', + error_msg={ + 'msg': 'exhausted_api_key_rate_limit inside get_current_blocknumber', + }, + logger=self._logger, + rate_limit_lua_script_shas=self._rate_limit_lua_script_shas, + limit_incr_by=1, + ) + try: + cur_time = time.time() + await asyncio.gather( + redis_conn.zadd( + name=rpc_get_block_number_calls, + mapping={ + json.dumps( + 'get_current_block_number', + ): cur_time, + }, + ), + redis_conn.zremrangebyscore( + name=rpc_get_block_number_calls, + min=0, + max=cur_time - 3600, + ), + ) + current_block = await web3_provider.eth.block_number + except Exception as e: + exc = RPCException( + request='get_current_block_number', + response=None, + underlying_exception=e, + extra_info=f'RPC_GET_CURRENT_BLOCKNUMBER ERROR: {str(e)}', + ) + self._logger.trace('Error in get_current_block_number, error {}', str(exc)) + raise exc + else: + return current_block + return await f(node_idx=0) + async def _async_web3_call(self, contract_function, redis_conn, from_address=None): - """Make async web3 call""" + """ + Executes a web3 call asynchronously. + + Args: + contract_function: The contract function to call. + redis_conn: The Redis connection object. + from_address: The address to send the transaction from. + Returns: + The result of the web3 call. + """ @retry( reraise=True, retry=retry_if_exception_type(RPCException), @@ -298,8 +440,7 @@ async def f(node_idx): underlying_exception=e, extra_info={'msg': str(e)}, ) - - self._logger.opt(lazy=True).trace( + self._logger.opt(exception=settings.logs.trace_enabled).error( ( 'Error while making web3 batch call' ), @@ -310,6 +451,20 @@ async def f(node_idx): return await f(node_idx=0) async def get_transaction_receipt(self, tx_hash, redis_conn): + """ + Retrieves the transaction receipt for a given transaction hash. + + Args: + tx_hash (str): The transaction hash for which to retrieve the receipt. + redis_conn: Redis connection object. + + Returns: + The transaction receipt details as a dictionary. + + Raises: + RPCException: If an error occurs while retrieving the transaction receipt. + """ + @retry( reraise=True, retry=retry_if_exception_type(RPCException), @@ -318,7 +473,7 @@ async def get_transaction_receipt(self, tx_hash, redis_conn): before_sleep=self._on_node_exception, ) async def f(node_idx): - if self._node_count == 0: + if not self._initialized: await self.init(redis_conn=redis_conn) node = self._nodes[node_idx] rpc_url = node.get('rpc_url') @@ -375,10 +530,18 @@ async def f(node_idx): return await f(node_idx=0) async def get_current_block(self, redis_conn, node_idx=0): - """Get the current block number. + """ + Returns the current block number from the Ethereum node at the specified index. + + Args: + redis_conn (redis.Redis): Redis connection object. + node_idx (int): Index of the Ethereum node to use. Defaults to 0. Returns: - int : the current block number + int: The current block number. + + Raises: + ExhaustedApiKeyRateLimitError: If the API key rate limit for the node is exhausted. """ node = self._nodes[node_idx] rpc_url = node.get('rpc_url') @@ -416,7 +579,15 @@ async def get_current_block(self, redis_conn, node_idx=0): async def web3_call(self, tasks, redis_conn, from_address=None): """ - Call web3 functions in parallel + Calls the given tasks asynchronously using web3 and returns the response. + + Args: + tasks (list): List of contract functions to call. + redis_conn: Redis connection object. + from_address (str, optional): Address to use as the transaction sender. Defaults to None. + + Returns: + list: List of responses from the contract function calls. """ if not self._initialized: await self.init(redis_conn) @@ -433,8 +604,19 @@ async def web3_call(self, tasks, redis_conn, from_address=None): raise e async def _make_rpc_jsonrpc_call(self, rpc_query, redis_conn): - """Make a jsonrpc call to the given rpc_url""" + """ + Makes an RPC JSON-RPC call to a node in the pool. + + Args: + rpc_query (dict): The JSON-RPC query to be sent. + redis_conn (Redis): The Redis connection object. + + Returns: + dict: The JSON-RPC response data. + Raises: + RPCException: If there is an error in making the JSON-RPC call. + """ @retry( reraise=True, retry=retry_if_exception_type(RPCException), @@ -525,9 +707,17 @@ async def batch_eth_get_balance_on_block_range( to_block, ): """ - Batch call eth_getBalance for given block-range + Batch retrieves the Ethereum balance of an address for a range of blocks. + + Args: + address (str): The Ethereum address to retrieve the balance for. + redis_conn (redis.Redis): The Redis connection object. + from_block (int): The starting block number. + to_block (int): The ending block number. - RPC_BATCH: for_each_block -> eth_getBalance + Returns: + list: A list of Ethereum balances for each block in the range. If a balance could not be retrieved for a block, + None is returned in its place. """ if not self._initialized: await self.init(redis_conn) @@ -574,9 +764,20 @@ async def batch_eth_call_on_block_range( from_address=Web3.toChecksumAddress('0x0000000000000000000000000000000000000000'), ): """ - Batch call "single-function" on a contract for given block-range + Batch executes an Ethereum contract function call on a range of blocks. + + Args: + abi_dict (dict): The ABI dictionary of the contract. + function_name (str): The name of the function to call. + contract_address (str): The address of the contract. + redis_conn (redis.Redis): The Redis connection object. + from_block (int): The starting block number. + to_block (int): The ending block number. + params (list, optional): The list of parameters to pass to the function. Defaults to None. + from_address (str, optional): The address to use as the sender of the transaction. Defaults to '0x0000000000000000000000000000000000000000'. - RPC_BATCH: for_each_block -> call_function_x + Returns: + list: A list of decoded results from the function call. """ if not self._initialized: await self.init(redis_conn) @@ -625,9 +826,15 @@ async def batch_eth_call_on_block_range( async def batch_eth_get_block(self, from_block, to_block, redis_conn): """ - Batch call "eth_getBlockByNumber" in a range of block numbers + Batch retrieves Ethereum blocks using eth_getBlockByNumber JSON-RPC method. - RPC_BATCH: for_each_block -> eth_getBlockByNumber + Args: + from_block (int): The block number to start retrieving from. + to_block (int): The block number to stop retrieving at. + redis_conn (redis.Redis): Redis connection object. + + Returns: + dict: A dictionary containing the response data from the JSON-RPC call. """ if not self._initialized: await self.init(redis_conn) @@ -655,6 +862,20 @@ async def batch_eth_get_block(self, from_block, to_block, redis_conn): async def get_events_logs( self, contract_address, to_block, from_block, topics, event_abi, redis_conn, ): + """ + Returns all events logs for a given contract address, within a specified block range and with specified topics. + + Args: + contract_address (str): The address of the contract to get events logs for. + to_block (int): The highest block number to retrieve events logs from. + from_block (int): The lowest block number to retrieve events logs from. + topics (List[str]): A list of topics to filter the events logs by. + event_abi (Dict): The ABI of the event to decode the logs with. + redis_conn (Redis): The Redis connection object to use for rate limiting. + + Returns: + List[Dict]: A list of dictionaries representing the decoded events logs. + """ if not self._initialized: await self.init(redis_conn) @@ -688,7 +909,7 @@ async def f(node_idx): }, logger=self._logger, rate_limit_lua_script_shas=self._rate_limit_lua_script_shas, - limit_incr_by=to_block - from_block + 1, + limit_incr_by=1, ) try: cur_time = time.time() diff --git a/snapshotter/utils/snapshot_utils.py b/snapshotter/utils/snapshot_utils.py index 194fdaf7..89aea32d 100644 --- a/snapshotter/utils/snapshot_utils.py +++ b/snapshotter/utils/snapshot_utils.py @@ -41,9 +41,17 @@ async def get_eth_price_usd( rpc_helper: RpcHelper, ): """ - returns the price of eth in usd at a given block height - """ + Fetches the ETH price in USD for a given block range using Uniswap DAI/ETH, USDC/ETH and USDT/ETH pairs. + + Args: + from_block (int): The starting block number. + to_block (int): The ending block number. + redis_conn (aioredis.Redis): The Redis connection object. + rpc_helper (RpcHelper): The RPC helper object. + Returns: + dict: A dictionary containing the ETH price in USD for each block in the given range. + """ try: eth_price_usd_dict = dict() redis_cache_mapping = dict() @@ -170,7 +178,7 @@ async def get_eth_price_usd( return eth_price_usd_dict except Exception as err: - snapshot_util_logger.opt(exception=True).error( + snapshot_util_logger.opt(exception=settings.logs.trace_enabled).error( f'RPC ERROR failed to fetch ETH price, error_msg:{err}', ) raise err @@ -183,8 +191,16 @@ async def get_block_details_in_block_range( rpc_helper: RpcHelper, ): """ - Fetch block-details for a range of block number or a single block + Fetches block details for a given range of block numbers. + Args: + from_block (int): The starting block number. + to_block (int): The ending block number. + redis_conn (aioredis.Redis): The Redis connection object. + rpc_helper (RpcHelper): The RPC helper object. + + Returns: + dict: A dictionary containing block details for each block number in the given range. """ try: cached_details = await redis_conn.zrangebyscore( @@ -249,7 +265,7 @@ async def get_block_details_in_block_range( return block_details_dict except Exception as e: - snapshot_util_logger.opt(exception=True, lazy=True).trace( + snapshot_util_logger.opt(exception=settings.logs.trace_enabled, lazy=True).trace( 'Unable to fetch block details, error_msg:{err}', err=lambda: str(e), ) @@ -264,10 +280,17 @@ async def warm_up_cache_for_snapshot_constructors( rpc_helper: RpcHelper, ): """ - This function warm-up cache for uniswap helper functions. Generated cache will be used across - snapshot constructors or in multiple pair-contract calculations. - : cache block details for epoch - : cache ETH USD price for epoch + Warms up the cache for snapshot constructors by fetching Ethereum price and block details + in the given block range. + + Args: + from_block (int): The starting block number. + to_block (int): The ending block number. + redis_conn (aioredis.Redis): The Redis connection object. + rpc_helper (RpcHelper): The RPC helper object. + + Returns: + None """ await asyncio.gather( get_eth_price_usd( diff --git a/snapshotter/utils/snapshot_worker.py b/snapshotter/utils/snapshot_worker.py index c48a1610..49ad65ca 100644 --- a/snapshotter/utils/snapshot_worker.py +++ b/snapshotter/utils/snapshot_worker.py @@ -2,8 +2,11 @@ import importlib import json import time +from typing import Optional from aio_pika import IncomingMessage +from ipfs_client.main import AsyncIPFSClient +from ipfs_client.main import AsyncIPFSClientSingleton from pydantic import ValidationError from snapshotter.settings.config import projects_config @@ -17,66 +20,77 @@ from snapshotter.utils.models.message_models import PowerloomSnapshotProcessMessage from snapshotter.utils.redis.rate_limiter import load_rate_limiter_scripts from snapshotter.utils.redis.redis_keys import epoch_id_project_to_state_mapping +from snapshotter.utils.redis.redis_keys import last_snapshot_processing_complete_timestamp_key from snapshotter.utils.redis.redis_keys import snapshot_submission_window_key from snapshotter.utils.redis.redis_keys import submitted_base_snapshots_key class SnapshotAsyncWorker(GenericAsyncWorker): + _ipfs_singleton: AsyncIPFSClientSingleton + _ipfs_writer_client: AsyncIPFSClient + _ipfs_reader_client: AsyncIPFSClient + def __init__(self, name, **kwargs): + """ + Initializes a SnapshotAsyncWorker object. + + Args: + name (str): The name of the worker. + **kwargs: Additional keyword arguments to be passed to the AsyncWorker constructor. + """ self._q = f'powerloom-backend-cb-snapshot:{settings.namespace}:{settings.instance_id}' self._rmq_routing = f'powerloom-backend-callback:{settings.namespace}:{settings.instance_id}:EpochReleased.*' super(SnapshotAsyncWorker, self).__init__(name=name, **kwargs) self._project_calculation_mapping = None self._task_types = [] for project_config in projects_config: - type_ = project_config.project_type - self._task_types.append(type_) + task_type = project_config.project_type + self._task_types.append(task_type) self._submission_window = None - def _gen_project_id(self, type_: str, epoch): - if not epoch.data_source: + def _gen_project_id(self, task_type: str, data_source: Optional[str] = None, primary_data_source: Optional[str] = None): + """ + Generates a project ID based on the given task type, data source, and primary data source. + + Args: + task_type (str): The type of task. + data_source (Optional[str], optional): The data source. Defaults to None. + primary_data_source (Optional[str], optional): The primary data source. Defaults to None. + + Returns: + str: The generated project ID. + """ + if not data_source: # For generic use cases that don't have a data source like block details - project_id = f'{type_}:{settings.namespace}' + project_id = f'{task_type}:{settings.namespace}' else: - if epoch.primary_data_source: - project_id = f'{type_}:{epoch.primary_data_source}_{epoch.data_source}:{settings.namespace}' + if primary_data_source: + project_id = f'{task_type}:{primary_data_source.lower()}_{data_source.lower()}:{settings.namespace}' else: - project_id = f'{type_}:{epoch.data_source}:{settings.namespace}' + project_id = f'{task_type}:{data_source.lower()}:{settings.namespace}' return project_id - async def _processor_task(self, msg_obj: PowerloomSnapshotProcessMessage, task_type: str): - """Function used to process the received message object.""" - self._logger.debug( - 'Processing callback: {}', msg_obj, - ) - if task_type not in self._project_calculation_mapping: - self._logger.error( - ( - 'No project calculation mapping found for task type' - f' {task_type}. Skipping...' - ), - ) - return + async def _process_single_mode(self, msg_obj: PowerloomSnapshotProcessMessage, task_type: str): + """ + Processes a single mode snapshot task for a given message object and task type. - if not self._submission_window: - submission_window = await self._redis_conn.get( - name=snapshot_submission_window_key, - ) - if submission_window: - self._submission_window = int(submission_window) + Args: + msg_obj (PowerloomSnapshotProcessMessage): The message object containing the snapshot task details. + task_type (str): The type of task to be performed. - project_id = self._gen_project_id(type_=task_type, epoch=msg_obj) + Raises: + Exception: If an error occurs while processing the snapshot task. - try: - if not self._rate_limiting_lua_scripts: - self._rate_limiting_lua_scripts = await load_rate_limiter_scripts( - self._redis_conn, - ) - self._logger.debug( - 'Got epoch to process for {}: {}', - task_type, msg_obj, - ) + Returns: + None + """ + project_id = self._gen_project_id( + task_type=task_type, + data_source=msg_obj.data_source, + primary_data_source=msg_obj.primary_data_source, + ) + try: task_processor = self._project_calculation_mapping[task_type] snapshot = await task_processor.compute( @@ -85,12 +99,17 @@ async def _processor_task(self, msg_obj: PowerloomSnapshotProcessMessage, task_t rpc_helper=self._rpc_helper, ) - if task_processor.transformation_lambdas: + if snapshot is None: + self._logger.debug( + 'No snapshot data for: {}, skipping...', msg_obj, + ) + + if task_processor.transformation_lambdas and snapshot: for each_lambda in task_processor.transformation_lambdas: snapshot = each_lambda(snapshot, msg_obj.data_source, msg_obj.begin, msg_obj.end) except Exception as e: - self._logger.opt(exception=True).error( + self._logger.opt(exception=settings.logs.trace_enabled).error( 'Exception processing callback for epoch: {}, Error: {},' 'sending failure notifications', msg_obj, e, ) @@ -127,8 +146,8 @@ async def _processor_task(self, msg_obj: PowerloomSnapshotProcessMessage, task_t # block time is about 2 seconds on anchor chain, keeping it around ten times the submission window ex=self._submission_window * 10 * 2, ) - - await self._redis_conn.hset( + p = self._redis_conn.pipeline() + p.hset( name=epoch_id_project_to_state_mapping( epoch_id=msg_obj.epochId, state_id=SnapshotterStates.SNAPSHOT_BUILD.value, ), @@ -138,16 +157,204 @@ async def _processor_task(self, msg_obj: PowerloomSnapshotProcessMessage, task_t ).json(), }, ) - await self._send_payload_commit_service_queue( - type_=task_type, + + await self._redis_conn.set( + name=last_snapshot_processing_complete_timestamp_key(), + value=int(time.time()), + ) + + if not snapshot: + self._logger.debug( + 'No snapshot data for: {}, skipping...', msg_obj, + ) + return + + await p.execute() + await self._commit_payload( + task_type=task_type, + _ipfs_writer_client=self._ipfs_writer_client, project_id=project_id, epoch=msg_obj, snapshot=snapshot, storage_flag=settings.web3storage.upload_snapshots, ) + + async def _process_bulk_mode(self, msg_obj: PowerloomSnapshotProcessMessage, task_type: str): + """ + Processes the given PowerloomSnapshotProcessMessage object in bulk mode. + + Args: + msg_obj (PowerloomSnapshotProcessMessage): The message object to process. + task_type (str): The type of task to perform. + + Raises: + Exception: If an error occurs while processing the message. + + Returns: + None + """ + try: + task_processor = self._project_calculation_mapping[task_type] + + snapshots = await task_processor.compute( + epoch=msg_obj, + redis_conn=self._redis_conn, + rpc_helper=self._rpc_helper, + ) + + if not snapshots: + self._logger.debug( + 'No snapshot data for: {}, skipping...', msg_obj, + ) + + # No transformation lambdas in bulk mode for now. + # Planning to deprecate transformation lambdas in future. + # if task_processor.transformation_lambdas: + # for each_lambda in task_processor.transformation_lambdas: + # snapshot = each_lambda(snapshot, msg_obj.data_source, msg_obj.begin, msg_obj.end) + + except Exception as e: + self._logger.opt(exception=True).error( + 'Exception processing callback for epoch: {}, Error: {},' + 'sending failure notifications', msg_obj, e, + ) + + notification_message = SnapshotterIssue( + instanceID=settings.instance_id, + issueType=SnapshotterReportState.MISSED_SNAPSHOT.value, + projectID=f'{task_type}:{settings.namespace}', + epochId=str(msg_obj.epochId), + timeOfReporting=str(time.time()), + extra=json.dumps({'issueDetails': f'Error : {e}'}), + ) + + await send_failure_notifications_async( + client=self._client, message=notification_message, + ) + + await self._redis_conn.hset( + name=epoch_id_project_to_state_mapping( + epoch_id=msg_obj.epochId, state_id=SnapshotterStates.SNAPSHOT_BUILD.value, + ), + mapping={ + f'{task_type}:{settings.namespace}': SnapshotterStateUpdate( + status='failed', error=str(e), timestamp=int(time.time()), + ).json(), + }, + ) + else: + + await self._redis_conn.set( + name=last_snapshot_processing_complete_timestamp_key(), + value=int(time.time()), + ) + + if not snapshots: + self._logger.debug( + 'No snapshot data for: {}, skipping...', msg_obj, + ) + return + + self._logger.info('Sending snapshots to commit service: {}', snapshots) + + for project_data_source, snapshot in snapshots: + data_sources = project_data_source.split('_') + if len(data_sources) == 1: + data_source = data_sources[0] + primary_data_source = None + else: + primary_data_source, data_source = data_sources + + project_id = self._gen_project_id( + task_type=task_type, data_source=data_source, primary_data_source=primary_data_source, + ) + + await self._redis_conn.set( + name=submitted_base_snapshots_key( + epoch_id=msg_obj.epochId, project_id=project_id, + ), + value=snapshot.json(), + # block time is about 2 seconds on anchor chain, keeping it around ten times the submission window + ex=self._submission_window * 10 * 2, + ) + p = self._redis_conn.pipeline() + p.hset( + name=epoch_id_project_to_state_mapping( + epoch_id=msg_obj.epochId, state_id=SnapshotterStates.SNAPSHOT_BUILD.value, + ), + mapping={ + project_id: SnapshotterStateUpdate( + status='success', timestamp=int(time.time()), + ).json(), + }, + ) + await p.execute() + await self._commit_payload( + task_type=task_type, + _ipfs_writer_client=self._ipfs_writer_client, + project_id=project_id, + epoch=msg_obj, + snapshot=snapshot, + storage_flag=settings.web3storage.upload_snapshots, + ) + + async def _processor_task(self, msg_obj: PowerloomSnapshotProcessMessage, task_type: str): + """ + Process a PowerloomSnapshotProcessMessage object for a given task type. + + Args: + msg_obj (PowerloomSnapshotProcessMessage): The message object to process. + task_type (str): The type of task to perform. + + Returns: + None + """ + self._logger.debug( + 'Processing callback: {}', msg_obj, + ) + if task_type not in self._project_calculation_mapping: + self._logger.error( + ( + 'No project calculation mapping found for task type' + f' {task_type}. Skipping...' + ), + ) + return + + if not self._submission_window: + submission_window = await self._redis_conn.get( + name=snapshot_submission_window_key, + ) + if submission_window: + self._submission_window = int(submission_window) + + if not self._rate_limiting_lua_scripts: + self._rate_limiting_lua_scripts = await load_rate_limiter_scripts( + self._redis_conn, + ) + self._logger.debug( + 'Got epoch to process for {}: {}', + task_type, msg_obj, + ) + + # bulk mode + if msg_obj.bulk_mode: + await self._process_bulk_mode(msg_obj=msg_obj, task_type=task_type) + else: + await self._process_single_mode(msg_obj=msg_obj, task_type=task_type) await self._redis_conn.close() async def _on_rabbitmq_message(self, message: IncomingMessage): + """ + Callback function that is called when a message is received from RabbitMQ. + It processes the message and starts the processor task. + + Args: + message (IncomingMessage): The incoming message from RabbitMQ. + + Returns: + None + """ task_type = message.routing_key.split('.')[-1] if task_type not in self._task_types: return @@ -182,6 +389,13 @@ async def _on_rabbitmq_message(self, message: IncomingMessage): asyncio.ensure_future(self._processor_task(msg_obj=msg_obj, task_type=task_type)) async def _init_project_calculation_mapping(self): + """ + Initializes the project calculation mapping by generating a dictionary that maps project types to their corresponding + calculation classes. + + Raises: + Exception: If a duplicate project type is found in the projects configuration. + """ if self._project_calculation_mapping is not None: return # Generate project function mapping @@ -194,7 +408,21 @@ async def _init_project_calculation_mapping(self): class_ = getattr(module, project_config.processor.class_name) self._project_calculation_mapping[key] = class_() + async def _init_ipfs_client(self): + """ + Initializes the IPFS client by creating a singleton instance of AsyncIPFSClientSingleton + and initializing its sessions. The write and read clients are then assigned to instance variables. + """ + self._ipfs_singleton = AsyncIPFSClientSingleton(settings.ipfs) + await self._ipfs_singleton.init_sessions() + self._ipfs_writer_client = self._ipfs_singleton._ipfs_write_client + self._ipfs_reader_client = self._ipfs_singleton._ipfs_read_client + async def init_worker(self): + """ + Initializes the worker by initializing project calculation mapping, IPFS client, and other necessary components. + """ if not self._initialized: await self._init_project_calculation_mapping() + await self._init_ipfs_client() await self.init() diff --git a/snapshotter/utils/utility_functions.py b/snapshotter/utils/utility_functions.py index 2392864c..1e75d2c2 100644 --- a/snapshotter/utils/utility_functions.py +++ b/snapshotter/utils/utility_functions.py @@ -1,11 +1,20 @@ import asyncio from functools import wraps -from math import floor from snapshotter.utils.default_logger import logger def acquire_bounded_semaphore(fn): + """ + A decorator function that acquires a bounded semaphore before executing the decorated function and releases it + after the function is executed. This decorator is intended to be used with async functions. + + Args: + fn: The async function to be decorated. + + Returns: + The decorated async function. + """ @wraps(fn) async def wrapped(self, *args, **kwargs): sem: asyncio.BoundedSemaphore = kwargs['semaphore'] diff --git a/snapshotter/worker_process_report.py b/snapshotter/worker_process_report.py new file mode 100644 index 00000000..6d4e9fef --- /dev/null +++ b/snapshotter/worker_process_report.py @@ -0,0 +1,92 @@ +import json + +import psutil +import redis + +from snapshotter.settings.config import settings +from snapshotter.utils.redis.redis_conn import REDIS_CONN_CONF + + +def process_up(pid): + """ + Check if a process with given PID is running or not. + + Args: + pid (int): Process ID to check. + + Returns: + bool: True if process is running, False otherwise. + """ + p_ = psutil.Process(pid) + return p_.is_running() + # try: + # return os.waitpid(pid, os.WNOHANG) is not None + # except ChildProcessError: # no child processes + # return False + # try: + # call = subprocess.check_output("pidof '{}'".format(self.processName), shell=True) + # return True + # except subprocess.CalledProcessError: + # return False + + +def main(): + """ + Retrieves process details from Redis cache and prints their running status. + + Retrieves process details from Redis cache and prints their running status. The process details include the System Event + Detector, Worker Processor Distributor, and Worker Processes. The running status of each process is determined using the + `process_up` function. + """ + connection_pool = redis.BlockingConnectionPool(**REDIS_CONN_CONF) + redis_conn = redis.Redis(connection_pool=connection_pool) + map_raw = redis_conn.hgetall( + name=f'powerloom:snapshotter:{settings.namespace}:{settings.instance_id}:Processes', + ) + event_det_pid = map_raw[b'SystemEventDetector'] + print('\n' + '=' * 20 + 'System Event Detector' + '=' * 20) + try: + event_det_pid = int(event_det_pid) + except ValueError: + print('Event detector pid found in process map not a PID: ', event_det_pid) + else: + # event_detector_proc = psutil.Process(event_det_pid) + print('Event detector process running status: ', process_up(event_det_pid)) + + print('\n' + '=' * 20 + 'Worker Processor Distributor' + '=' * 20) + proc_dist_pid = map_raw[b'ProcessorDistributor'] + try: + proc_dist_pid = int(proc_dist_pid) + except ValueError: + print('Processor distributor pid found in process map not a PID: ', proc_dist_pid) + else: + # proc_dist_proc = psutil.Process(proc_dist_pid) + print('Processor distributor process running status: ', process_up(proc_dist_pid)) + + print('\n' + '=' * 20 + 'Worker Processes' + '=' * 20) + cb_worker_map = map_raw[b'callback_workers'] + try: + cb_worker_map = json.loads(cb_worker_map) + except json.JSONDecodeError: + print('Callback worker entries in cache corrupted...', cb_worker_map) + return + for worker_type, worker_details in cb_worker_map.items(): + section_name = worker_type.capitalize() + print('\n' + '*' * 10 + section_name + '*' * 10) + if not worker_details or not isinstance(worker_details, dict): + print(f'No {section_name} workers found in process map: ', worker_details) + continue + for short_id, worker_details in worker_details.items(): + print('\n' + '-' * 5 + short_id + '-' * 5) + proc_pid = worker_details['pid'] + try: + proc_pid = int(proc_pid) + except ValueError: + print(f'Process name {worker_details["id"]} pid found in process map not a PID: ', proc_pid) + else: + # proc = psutil.Process(proc_pid) + print('Process name ' + worker_details['id'] + ' running status: ', process_up(proc_pid)) + + +if __name__ == '__main__': + main() diff --git a/snapshotter_autofill.sh b/snapshotter_autofill.sh index 635e9736..88857985 100755 --- a/snapshotter_autofill.sh +++ b/snapshotter_autofill.sh @@ -42,17 +42,24 @@ if [ "$POWERLOOM_REPORTING_URL" ]; then echo "Found SLACK_REPORTING_URL ${POWERLOOM_REPORTING_URL}"; fi +if [ "$WEB3_STORAGE_TOKEN" ]; then + echo "Found WEB3_STORAGE_TOKEN ${WEB3_STORAGE_TOKEN}"; +fi + +if [ "$NAMESPACE" ]; then + echo "Found NAMESPACE ${NAMESPACE}"; +fi cp config/projects.example.json config/projects.json cp config/aggregator.example.json config/aggregator.json cp config/auth_settings.example.json config/auth_settings.json cp config/settings.example.json config/settings.json -export namespace=UNISWAPV2 - +export namespace="${NAMESPACE:-UNISWAPV2}" export ipfs_url="${IPFS_URL:-/dns/ipfs/tcp/5001}" export ipfs_api_key="${IPFS_API_KEY:-}" export ipfs_api_secret="${IPFS_API_SECRET:-}" +export web3_storage_token="${WEB3_STORAGE_TOKEN:-}" export slack_reporting_url="${SLACK_REPORTING_URL:-}" export powerloom_reporting_url="${POWERLOOM_REPORTING_URL:-}" @@ -72,6 +79,7 @@ echo "Using IPFS API KEY: ${ipfs_api_key}" echo "Using protocol state contract: ${PROTOCOL_STATE_CONTRACT}" echo "Using slack reporting url: ${slack_reporting_url}" echo "Using powerloom reporting url: ${powerloom_reporting_url}" +echo "Using web3 storage token: ${web3_storage_token}" sed -i'.backup' "s#relevant-namespace#$namespace#" config/settings.json @@ -81,6 +89,7 @@ sed -i'.backup' "s#https://rpc-url#$SOURCE_RPC_URL#" config/settings.json sed -i'.backup' "s#https://prost-rpc-url#$PROST_RPC_URL#" config/settings.json +sed -i'.backup' "s#web3-storage-token#$web3_storage_token#" config/settings.json sed -i'.backup' "s#ipfs-writer-url#$ipfs_url#" config/settings.json sed -i'.backup' "s#ipfs-writer-key#$ipfs_api_key#" config/settings.json sed -i'.backup' "s#ipfs-writer-secret#$ipfs_api_secret#" config/settings.json