diff --git a/.eslintrc.js b/.eslintrc.js index 44c1726af2..b70d80148e 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -118,14 +118,14 @@ module.exports = { { format: ['camelCase', 'PascalCase', 'UPPER_CASE'], leadingUnderscore: 'allow', - prefix: ['IS_', 'is', 'can', 'does', 'has', 'should', 'are', 'was', 'show', 'hide'], + prefix: ['IS_', 'is', 'can', 'does', 'did', 'has', 'should', 'are', 'was', 'show', 'hide'], selector: 'variable', types: ['boolean'], }, { format: ['camelCase', 'PascalCase', 'UPPER_CASE'], leadingUnderscore: 'allow', - prefix: ['IS_', 'is', 'can', 'does', 'has', 'should', 'are', 'was', 'show', 'hide'], + prefix: ['IS_', 'is', 'can', 'does', 'did', 'has', 'should', 'are', 'was', 'show', 'hide'], selector: 'parameter', types: ['boolean'], }, diff --git a/.github/workflows/ci-run.yml b/.github/workflows/ci-run.yml index 9b4468cdc8..5742dac959 100644 --- a/.github/workflows/ci-run.yml +++ b/.github/workflows/ci-run.yml @@ -45,6 +45,9 @@ jobs: GC_PASSPORT_SCORER_API_KEY: "${{ secrets.GITCOIN_SCORER_API_KEY }}" DELEGATION_SALT: "${{ secrets.DELEGATION_SALT }}" DELEGATION_SALT_PRIMARY: "${{ secrets.DELEGATION_SALT_PRIMARY }}" + SABLIER_MAINNET_SUBGRAPH_URL: "${{ secrets.SABLIER_MAINNET_SUBGRAPH_URL }}" + SABLIER_SEPOLIA_SUBGRAPH_URL: "${{ secrets.SABLIER_SEPOLIA_SUBGRAPH_URL }}" + SABLIER_SENDER_ADDRESS: "0x4BEbdb9792ed50eBB00611083ba384F05791a9FB" steps: - uses: actions/checkout@v4.1.0 - uses: actions/cache/restore@v4 diff --git a/backend/.env.template b/backend/.env.template index c01c25192e..143458ce2c 100644 --- a/backend/.env.template +++ b/backend/.env.template @@ -25,3 +25,5 @@ GLM_SENDER_ADDRESS= GLM_SENDER_PRIVATE_KEY= GLM_SENDER_NONCE= MAINNET_PROPOSAL_CIDS= + +SABLIER_MAINNET_SUBGRAPH_URL= diff --git a/backend/app/engine/epochs_settings.py b/backend/app/engine/epochs_settings.py index c944ce7812..1b87e68ad0 100644 --- a/backend/app/engine/epochs_settings.py +++ b/backend/app/engine/epochs_settings.py @@ -7,6 +7,7 @@ NotSupportedCFCalculator, ) from app.engine.octant_rewards.leftover.default import PreliminaryLeftover +from app.engine.octant_rewards.leftover.with_ppf import LeftoverWithPPF from app.engine.octant_rewards.matched.preliminary import ( PreliminaryMatchedRewards, ) @@ -23,12 +24,14 @@ from app.engine.projects.rewards.threshold.preliminary import ( PreliminaryProjectThreshold, ) +from app.engine.user import UserSettings from app.engine.user.budget.preliminary import PreliminaryUserBudget -from app.engine.user import UserSettings, DefaultWeightedAverageEffectiveDeposit +from app.engine.user.effective_deposit.weighted_average.default import ( + DefaultWeightedAverageEffectiveDeposit, +) from app.engine.user.effective_deposit.weighted_average.weights.timebased.default import ( DefaultTimebasedWeights, ) -from app.engine.octant_rewards.leftover.with_ppf import LeftoverWithPPF @dataclass @@ -76,7 +79,10 @@ def register_epoch_settings(): community_fund=NotSupportedCFCalculator(), leftover=PreliminaryLeftover(), ), - user=UserSettings(budget=PreliminaryUserBudget()), + user=UserSettings( + budget=PreliminaryUserBudget(), + effective_deposit=DefaultWeightedAverageEffectiveDeposit(), + ), project=ProjectSettings( rewards=PreliminaryProjectRewards( projects_threshold=PreliminaryProjectThreshold(2), @@ -86,6 +92,13 @@ def register_epoch_settings(): SETTINGS[3] = EpochSettings( octant_rewards=OctantRewardsSettings(leftover=LeftoverWithPPF()), + user=UserSettings(effective_deposit=DefaultWeightedAverageEffectiveDeposit()), project=ProjectSettings(rewards=PreliminaryProjectRewards()), ) - SETTINGS[4] = EpochSettings() + SETTINGS[4] = EpochSettings( + user=UserSettings(effective_deposit=DefaultWeightedAverageEffectiveDeposit()) + ) + SETTINGS[5] = EpochSettings( + user=UserSettings(effective_deposit=DefaultWeightedAverageEffectiveDeposit()) + ) + SETTINGS[6] = EpochSettings() diff --git a/backend/app/engine/user/__init__.py b/backend/app/engine/user/__init__.py index cf16de6551..4eaa5a52df 100644 --- a/backend/app/engine/user/__init__.py +++ b/backend/app/engine/user/__init__.py @@ -3,14 +3,14 @@ from app.engine.user.budget import UserBudget from app.engine.user.budget.with_ppf import UserBudgetWithPPF from app.engine.user.effective_deposit import UserEffectiveDeposit -from app.engine.user.effective_deposit.weighted_average.default import ( - DefaultWeightedAverageEffectiveDeposit, +from app.engine.user.effective_deposit.weighted_average.default_with_sablier_timebox import ( + DefaultWeightedAverageWithSablierTimebox, ) @dataclass class UserSettings: effective_deposit: UserEffectiveDeposit = field( - default_factory=DefaultWeightedAverageEffectiveDeposit + default_factory=DefaultWeightedAverageWithSablierTimebox ) budget: UserBudget = field(default_factory=UserBudgetWithPPF) diff --git a/backend/app/engine/user/effective_deposit/__init__.py b/backend/app/engine/user/effective_deposit/__init__.py index 33abf1e3b0..b82fd3ed2b 100644 --- a/backend/app/engine/user/effective_deposit/__init__.py +++ b/backend/app/engine/user/effective_deposit/__init__.py @@ -12,6 +12,17 @@ class EventType(StrEnum): UNLOCK = "Unlocked" +class SablierEventType(StrEnum): + CREATE = "Create" + WITHDRAW = "Withdraw" + CANCEL = "Cancel" + + +class DepositSource(StrEnum): + OCTANT = "Octant" + SABLIER = "Sablier" + + def _calculate_deposit_after_event( event_type: EventType, before: int, amount: int ) -> int: @@ -32,6 +43,8 @@ class DepositEvent: amount: int deposit_before: int deposit_after: int + source: DepositSource + mapped_event: Optional[SablierEventType] def __init__( self, @@ -40,6 +53,8 @@ def __init__( timestamp: int, amount: int, deposit_before: int, + source: DepositSource = DepositSource.OCTANT, + mapped_event: Optional[SablierEventType] = None, ): self.user = user self.type = type @@ -49,10 +64,17 @@ def __init__( self.deposit_after = _calculate_deposit_after_event( type, deposit_before, amount ) + self.source = source + self.mapped_event = mapped_event @staticmethod def from_dict(event: Dict): event_type = EventType(event["__typename"]) + source = DepositSource.OCTANT + mapped_event = None + if event.get("__source") == DepositSource.SABLIER: + mapped_event = event["type"] + source = DepositSource.SABLIER user = to_checksum_address(event["user"]) timestamp = int(event["timestamp"]) amount = int(event["amount"]) @@ -64,6 +86,8 @@ def from_dict(event: Dict): timestamp=timestamp, amount=amount, deposit_before=deposit_before, + source=source, + mapped_event=mapped_event, ) @@ -79,11 +103,14 @@ def __iter__(self): yield self.deposit +LockEventsByAddr = Dict[str, List[DepositEvent]] + + @dataclass class UserEffectiveDepositPayload: epoch_start: int = None epoch_end: int = None - lock_events_by_addr: Dict[str, List[DepositEvent]] = None + lock_events_by_addr: LockEventsByAddr = None @dataclass diff --git a/backend/app/engine/user/effective_deposit/weighted_average/default_with_sablier_timebox.py b/backend/app/engine/user/effective_deposit/weighted_average/default_with_sablier_timebox.py new file mode 100644 index 0000000000..2c5d7964cc --- /dev/null +++ b/backend/app/engine/user/effective_deposit/weighted_average/default_with_sablier_timebox.py @@ -0,0 +1,68 @@ +from dataclasses import dataclass +from typing import Tuple, List + +from app.engine.user.effective_deposit import ( + UserEffectiveDepositPayload, + LockEventsByAddr, + DepositSource, + EventType, + UserDeposit, +) +from app.engine.user.effective_deposit.weighted_average.default import ( + DefaultWeightedAverageEffectiveDeposit, +) + + +@dataclass +class DefaultWeightedAverageWithSablierTimebox(DefaultWeightedAverageEffectiveDeposit): + def calculate_users_effective_deposits( + self, payload: UserEffectiveDepositPayload + ) -> Tuple[List[UserDeposit], int]: + payload.lock_events_by_addr = self._remove_unlock_and_lock_within_24_hours( + payload.lock_events_by_addr + ) + + return super().calculate_users_effective_deposits(payload) + + def _remove_unlock_and_lock_within_24_hours( + self, events: LockEventsByAddr + ) -> LockEventsByAddr: + """ + Removes the unlock event from Sablier if it is followed by a lock event in Octant within 24 hours. + """ + TWENTY_FOUR_HOURS_PERIOD = 24 * 60 * 60 + + for address, user_events in events.items(): + if not user_events: + continue + + filtered_events = [] + skip_next = False + + for prev_event, next_event in zip(user_events, user_events[1:]): + if skip_next: + # Skip adding the next_event as it was part of a pair that should be ignored. + skip_next = False + continue + + if ( + prev_event.source == DepositSource.SABLIER + and prev_event.type == EventType.UNLOCK + and next_event.source == DepositSource.OCTANT + and next_event.type == EventType.LOCK + and next_event.timestamp - prev_event.timestamp + < TWENTY_FOUR_HOURS_PERIOD + ): + # Skip both the unlock and the following lock. + skip_next = True + continue + + filtered_events.append(prev_event) + + # Add the last event if it was not skipped. + if not skip_next: + filtered_events.append(user_events[-1]) + + events[address] = filtered_events + + return events diff --git a/backend/app/extensions.py b/backend/app/extensions.py index 8a749b7c8a..0ac92f5d1e 100644 --- a/backend/app/extensions.py +++ b/backend/app/extensions.py @@ -14,7 +14,7 @@ from app.infrastructure.contracts.erc20 import ERC20 from app.infrastructure.contracts.projects import Projects from app.infrastructure.contracts.vault import Vault -from app.infrastructure import GQLConnectionFactory +from app.infrastructure import GQLConnectionFactory, SubgraphEndpoints # Flask extensions api = Api( @@ -39,7 +39,8 @@ vault = Vault(abi=abi.VAULT) # GQL extensions -gql_factory = GQLConnectionFactory() +gql_octant_factory = GQLConnectionFactory() +gql_sablier_factory = GQLConnectionFactory() def init_web3(app): @@ -55,7 +56,8 @@ def init_web3(app): def init_subgraph(app): - gql_factory.set_url(app.config) + gql_octant_factory.set_url(app.config, SubgraphEndpoints.OCTANT_SUBGRAPH) + gql_sablier_factory.set_url(app.config, SubgraphEndpoints.SABLIER_SUBGRAPH) def init_scheduler(app): diff --git a/backend/app/infrastructure/__init__.py b/backend/app/infrastructure/__init__.py index 509cdecc49..f2e0b9bb02 100644 --- a/backend/app/infrastructure/__init__.py +++ b/backend/app/infrastructure/__init__.py @@ -22,6 +22,11 @@ } +class SubgraphEndpoints: + OCTANT_SUBGRAPH = "SUBGRAPH_ENDPOINT" + SABLIER_SUBGRAPH = "SABLIER_MAINNET_SUBGRAPH_URL" + + class OctantResource(Resource): def __init__(self, *args, **kwargs): Resource.__init__(self, *args, *kwargs) @@ -108,8 +113,8 @@ class GQLConnectionFactory: def __init__(self): self._url = None - def set_url(self, config: Config): - self._url = config["SUBGRAPH_ENDPOINT"] + def set_url(self, config: Config, key: SubgraphEndpoints): + self._url = config[key] def build(self): if not self._url: diff --git a/backend/app/infrastructure/graphql/epochs.py b/backend/app/infrastructure/graphql/epochs.py index 74770981c5..a5454554de 100644 --- a/backend/app/infrastructure/graphql/epochs.py +++ b/backend/app/infrastructure/graphql/epochs.py @@ -1,7 +1,7 @@ from flask import current_app as app from gql import gql -from app.extensions import gql_factory +from app.extensions import gql_octant_factory from app import exceptions @@ -9,23 +9,25 @@ def get_epoch_by_number(epoch_number): query = gql( """ -query GetEpoch($epochNo: Int!) { - epoches(where: {epoch: $epochNo}) { - epoch - fromTs - toTs - duration - decisionWindow - } -} - """ + query GetEpoch($epochNo: Int!) { + epoches(where: {epoch: $epochNo}) { + epoch + fromTs + toTs + duration + decisionWindow + } + } + """ ) variables = {"epochNo": epoch_number} app.logger.debug( f"[Subgraph] Getting epoch properties for epoch number: {epoch_number}" ) - data = gql_factory.build().execute(query, variable_values=variables)["epoches"] + data = gql_octant_factory.build().execute(query, variable_values=variables)[ + "epoches" + ] if data: app.logger.debug(f"[Subgraph] Received epoch properties: {data[0]}") @@ -56,5 +58,5 @@ def get_epochs(): ) app.logger.debug("[Subgraph] Getting list of all epochs") - data = gql_factory.build().execute(query) + data = gql_octant_factory.build().execute(query) return data diff --git a/backend/app/infrastructure/graphql/info.py b/backend/app/infrastructure/graphql/info.py index 2ea3d39969..c9db09d5cf 100644 --- a/backend/app/infrastructure/graphql/info.py +++ b/backend/app/infrastructure/graphql/info.py @@ -1,6 +1,6 @@ from gql import gql -from app.extensions import gql_factory +from app.extensions import gql_octant_factory def get_indexed_block_num() -> int: @@ -15,7 +15,7 @@ def get_indexed_block_num() -> int: } """ ) - data = gql_factory.build().execute(query) + data = gql_octant_factory.build().execute(query) if data: return data["_meta"]["block"]["number"] else: diff --git a/backend/app/infrastructure/graphql/locks.py b/backend/app/infrastructure/graphql/locks.py index 12c4491467..f2c14efb8b 100644 --- a/backend/app/infrastructure/graphql/locks.py +++ b/backend/app/infrastructure/graphql/locks.py @@ -2,7 +2,7 @@ from flask import current_app as app from gql import gql -from app.extensions import gql_factory +from app.extensions import gql_octant_factory class LockEvent(TypedDict): @@ -43,9 +43,9 @@ def get_user_locks_history( } app.logger.debug(f"[Subgraph] Getting user {user_address} locks") - partial_result = gql_factory.build().execute(query, variable_values=variables)[ - "lockeds" - ] + partial_result = gql_octant_factory.build().execute( + query, variable_values=variables + )["lockeds"] result = [] @@ -90,7 +90,9 @@ def get_locks_by_timestamp_range(from_ts: int, to_ts: int) -> list[LockEvent]: "toTimestamp": to_ts, } app.logger.debug(f"[Subgraph] Getting locks in timestamp range {from_ts} - {to_ts}") - result = gql_factory.build().execute(query, variable_values=variables)["lockeds"] + result = gql_octant_factory.build().execute(query, variable_values=variables)[ + "lockeds" + ] app.logger.debug(f"[Subgraph] Received locks: {result}") return result @@ -124,7 +126,9 @@ def get_last_lock_before(user_address: str, before: int) -> LockEvent | None: app.logger.debug( f"[Subgraph] Getting user {user_address} last lock before {before}" ) - locks = gql_factory.build().execute(query, variable_values=variables)["lockeds"] + locks = gql_octant_factory.build().execute(query, variable_values=variables)[ + "lockeds" + ] app.logger.debug(f"[Subgraph] Received locks: {locks}") return locks[0] if locks else None @@ -160,7 +164,9 @@ def get_locks_by_address_and_timestamp_range( app.logger.debug( f"[Subgraph] Getting user {user_address} locks in timestamp range {from_ts} - {to_ts}" ) - result = gql_factory.build().execute(query, variable_values=variables)["lockeds"] + result = gql_octant_factory.build().execute(query, variable_values=variables)[ + "lockeds" + ] app.logger.debug(f"[Subgraph] Received locks: {result}") return result diff --git a/backend/app/infrastructure/graphql/merkle_roots.py b/backend/app/infrastructure/graphql/merkle_roots.py index 1c47aefbf5..d54d4f5b97 100644 --- a/backend/app/infrastructure/graphql/merkle_roots.py +++ b/backend/app/infrastructure/graphql/merkle_roots.py @@ -1,7 +1,7 @@ from flask import current_app as app from gql import gql -from app.extensions import gql_factory +from app.extensions import gql_octant_factory def get_all_vault_merkle_roots(): @@ -18,7 +18,7 @@ def get_all_vault_merkle_roots(): ) app.logger.debug("[Subgraph] Getting all vault merkle roots") - result = gql_factory.build().execute(query)["vaultMerkleRoots"] + result = gql_octant_factory.build().execute(query)["vaultMerkleRoots"] app.logger.debug(f"[Subgraph] Received merkle roots: {result}") return result diff --git a/backend/app/infrastructure/graphql/unlocks.py b/backend/app/infrastructure/graphql/unlocks.py index e51535cc1d..6b2cdc0813 100644 --- a/backend/app/infrastructure/graphql/unlocks.py +++ b/backend/app/infrastructure/graphql/unlocks.py @@ -1,8 +1,8 @@ -from typing import Literal, TypedDict +from typing import Literal, TypedDict, List from flask import current_app as app from gql import gql -from app.extensions import gql_factory +from app.extensions import gql_octant_factory class UnlockEvent(TypedDict): @@ -16,7 +16,7 @@ class UnlockEvent(TypedDict): def get_user_unlocks_history( user_address: str, from_timestamp: int, limit: int -) -> list[UnlockEvent]: +) -> List[UnlockEvent]: query = gql( """ query GetUnlocks($userAddress: Bytes!, $fromTimestamp: Int!, $limit: Int!) { @@ -44,9 +44,9 @@ def get_user_unlocks_history( } app.logger.debug(f"[Subgraph] Getting user {user_address} unlocks") - partial_result = gql_factory.build().execute(query, variable_values=variables)[ - "unlockeds" - ] + partial_result = gql_octant_factory.build().execute( + query, variable_values=variables + )["unlockeds"] result = [] @@ -94,7 +94,9 @@ def get_unlocks_by_timestamp_range(from_ts, to_ts) -> list[UnlockEvent]: app.logger.debug( f"[Subgraph] Getting unlocks in timestamp range {from_ts} - {to_ts}" ) - result = gql_factory.build().execute(query, variable_values=variables)["unlockeds"] + result = gql_octant_factory.build().execute(query, variable_values=variables)[ + "unlockeds" + ] app.logger.debug(f"[Subgraph] Received unlocks: {result}") return result @@ -130,7 +132,9 @@ def get_unlocks_by_address_and_timestamp_range( app.logger.debug( f"[Subgraph] Getting user {user_address} unlocks in timestamp range {from_ts} - {to_ts}" ) - result = gql_factory.build().execute(query, variable_values=variables)["unlockeds"] + result = gql_octant_factory.build().execute(query, variable_values=variables)[ + "unlockeds" + ] app.logger.debug(f"[Subgraph] Received unlocks: {result}") return result @@ -164,7 +168,9 @@ def get_last_unlock_before(user_address: str, before: int) -> UnlockEvent | None app.logger.debug( f"[Subgraph] Getting user {user_address} last unlock before {before}" ) - unlocks = gql_factory.build().execute(query, variable_values=variables)["unlockeds"] + unlocks = gql_octant_factory.build().execute(query, variable_values=variables)[ + "unlockeds" + ] app.logger.debug(f"[Subgraph] Received unlocks: {unlocks}") return unlocks[0] if unlocks else None diff --git a/backend/app/infrastructure/graphql/withdrawals.py b/backend/app/infrastructure/graphql/withdrawals.py index cf81133dba..de2f84b879 100644 --- a/backend/app/infrastructure/graphql/withdrawals.py +++ b/backend/app/infrastructure/graphql/withdrawals.py @@ -1,7 +1,7 @@ from flask import current_app as app from gql import gql -from app.extensions import gql_factory +from app.extensions import gql_octant_factory def get_user_withdrawals_history(user_address: str, from_timestamp: int, limit: int): @@ -32,9 +32,9 @@ def get_user_withdrawals_history(user_address: str, from_timestamp: int, limit: app.logger.debug( f"[Subgraph] Getting user {user_address} withdrawals before ts {from_timestamp}" ) - partial_result = gql_factory.build().execute(query, variable_values=variables)[ - "withdrawals" - ] + partial_result = gql_octant_factory.build().execute( + query, variable_values=variables + )["withdrawals"] result = [] @@ -81,7 +81,7 @@ def get_withdrawals_by_address_and_timestamp_range( f"[Subgraph] Getting user {user_address} withdrawals in timestamp range {from_timestamp} - {to_timestamp}" ) - result = gql_factory.build().execute(query, variable_values=variables)[ + result = gql_octant_factory.build().execute(query, variable_values=variables)[ "withdrawals" ] diff --git a/backend/app/infrastructure/routes/deposits.py b/backend/app/infrastructure/routes/deposits.py index 87a2adfd70..6ce30abc70 100644 --- a/backend/app/infrastructure/routes/deposits.py +++ b/backend/app/infrastructure/routes/deposits.py @@ -93,7 +93,7 @@ def get(self, epoch): @ns.route("/users//") @ns.doc( - description="Returns user's effective deposit for a finialized or pending epoch.", + description="Returns user's effective deposit for a finalized or pending epoch.", params={ "epoch": "Epoch number", "user_address": "User ethereum address in hexadecimal form (case-insensitive, prefixed with 0x)", diff --git a/backend/app/infrastructure/routes/history.py b/backend/app/infrastructure/routes/history.py index 22fb0eb545..2c6fd6eb75 100644 --- a/backend/app/infrastructure/routes/history.py +++ b/backend/app/infrastructure/routes/history.py @@ -90,7 +90,10 @@ class History(OctantResource): @ns.param("cursor", description="History page cursor", _in="query") @ns.param("limit", description="History page size", _in="query") @ns.marshal_with(user_history_model) - @ns.response(200, "User history successfully retrieved") + @ns.response( + 200, + "User history from the Octant's and Sablier's Subgraphes successfully retrieved", + ) def get(self, user_address): page_cursor = request.args.get("cursor", type=str) page_limit = request.args.get("limit", type=int) diff --git a/backend/app/infrastructure/routes/user.py b/backend/app/infrastructure/routes/user.py index 046b63fcf0..0b8eeb0d81 100644 --- a/backend/app/infrastructure/routes/user.py +++ b/backend/app/infrastructure/routes/user.py @@ -5,17 +5,18 @@ import app.legacy.controllers.user as user_controller from app.extensions import api from app.infrastructure import OctantResource +from app.modules.uq import controller as uq_controller +from app.modules.user.antisybil.controller import ( + get_user_antisybil_status, + update_user_antisybil_status, +) from app.modules.user.patron_mode.controller import get_patrons_addresses from app.modules.user.tos.controller import ( post_user_terms_of_service_consent, get_user_terms_of_service_consent_status, ) -from app.modules.user.antisybil.controller import ( - get_user_antisybil_status, - update_user_antisybil_status, -) +from app.modules.user.winnings.controller import get_user_winnings from app.settings import config -from app.modules.uq import controller as uq_controller ns = Namespace("user", description="Octant user settings") api.add_namespace(ns) @@ -97,6 +98,31 @@ }, ) +user_winning_model = api.model( + "UserWinning", + { + "amount": fields.String( + required=True, + description="Amount in WEI", + ), + "dateAvailableForWithdrawal": fields.String( + required=True, + description="Date when winning is available for withdrawal as unix timestamp", + ), + }, +) + +user_winnings_model = api.model( + "UserWinnings", + { + "winnings": fields.List( + fields.Nested(user_winning_model), + required=True, + description="User winnings", + ), + }, +) + @ns.route("//tos") @ns.doc( @@ -316,3 +342,31 @@ def get(self, epoch: int): for user_address, uq_score in uq_scores ] } + + +@ns.route("//raffle/winnings") +@ns.doc( + params={ + "user_address": "User ethereum address in hexadecimal format (case-insensitive, prefixed with 0x)", + } +) +class UserWinnings(OctantResource): + @ns.doc( + description="Returns an array of user's winnings with amounts and availability dates", + ) + @ns.marshal_with(user_winnings_model) + @ns.response(200, "User's winnings retrieved successfully") + def get(self, user_address: str): + app.logger.debug(f"Getting winnings for user {user_address}.") + winnings = get_user_winnings(user_address) + app.logger.debug(f"Retrieved {len(winnings)} winnings for user {user_address}.") + + return { + "winnings": [ + { + "amount": winning.amount, + "dateAvailableForWithdrawal": winning.date_available_for_withdrawal, + } + for winning in winnings + ] + } diff --git a/backend/app/infrastructure/sablier/__init__.py b/backend/app/infrastructure/sablier/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/app/infrastructure/sablier/events.py b/backend/app/infrastructure/sablier/events.py new file mode 100644 index 0000000000..fdbd5de1f3 --- /dev/null +++ b/backend/app/infrastructure/sablier/events.py @@ -0,0 +1,186 @@ +from typing import TypedDict, List, Dict + +from flask import current_app as app +from gql import gql + +from app.extensions import gql_sablier_factory + + +class SablierAction(TypedDict): + category: str + addressA: str + addressB: str + amountA: int + amountB: int + timestamp: int + hash: str + + +class SablierStream(TypedDict): + id: str + actions: List[SablierAction] + intactAmount: str + + +class SablierStreamForTrackingWinner(TypedDict): + id: str + endTime: str + depositAmount: str + intactAmount: str + + +def fetch_streams(query: str, variables: Dict) -> List[SablierStream]: + """ + Fetch streams with retry logic for pagination. + """ + all_streams = [] + has_more = True + limit = 1000 + skip = 0 + + while has_more: + variables.update({"limit": limit, "skip": skip}) + + app.logger.debug(f"[Sablier Subgraph] Querying streams with skip: {skip}") + result = gql_sablier_factory.build().execute( + gql(query), variable_values=variables + ) + + streams = result.get("streams", []) + + app.logger.debug(f"[Sablier Subgraph] Received {len(streams)} streams.") + + for stream in streams: + actions = stream.get("actions", []) + final_intact_amount = stream.get("intactAmount", 0) + all_streams.append( + SablierStream(actions=actions, intactAmount=final_intact_amount) + ) + + if len(streams) < limit: + has_more = False + else: + skip += limit + + return all_streams + + +def get_user_events_history(user_address: str) -> List[SablierStream]: + """ + Get all the locks and unlocks for a user. + """ + query = """ + query GetEvents($sender: String!, $recipient: String!, $tokenAddress: String!, $limit: Int!, $skip: Int!) { + streams( + where: { + sender: $sender + recipient: $recipient + asset_: {address: $tokenAddress} + transferable: false + } + first: $limit + skip: $skip + orderBy: timestamp + ) { + id + intactAmount + actions(where: {category_in: [Cancel, Withdraw, Create]}, orderBy: timestamp) { + category + addressA + addressB + amountA + amountB + timestamp + hash + } + } + } + """ + variables = { + "sender": _get_sender(), + "recipient": user_address, + "tokenAddress": _get_token_address(), + } + + streams = fetch_streams(query, variables) + return streams + + +def get_all_streams_history() -> List[SablierStream]: + """ + Get all the locks and unlocks in history. + """ + query = """ + query GetAllEvents($sender: String!, $tokenAddress: String!, $limit: Int!, $skip: Int!) { + streams( + where: { + sender: $sender + asset_: {address: $tokenAddress} + transferable: false + } + first: $limit + skip: $skip + orderBy: timestamp + ) { + id + intactAmount + actions(where: {category_in: [Cancel, Withdraw, Create]}, orderBy: timestamp) { + category + addressA + addressB + amountA + amountB + timestamp + hash + } + } + } + """ + variables = { + "sender": _get_sender(), + "tokenAddress": _get_token_address(), + } + + return fetch_streams(query, variables) + + +def get_streams_with_create_events_to_user( + user_address: str, +) -> List[SablierStreamForTrackingWinner]: + """ + Get all the create events for a user. + """ + query = """ + query GetCreateEvents($sender: String!, $recipient: String!, $tokenAddress: String!) { + streams( + where: { + sender: $sender + recipient: $recipient + asset_: {address: $tokenAddress} + transferable: false + } + orderBy: timestamp + ) { + id + intactAmount + endTime + depositAmount + } + } + """ + variables = { + "sender": "0x76273DCC41356e5f0c49bB68e525175DC7e83417", # _get_sender(), + "recipient": user_address, + "tokenAddress": "0x6b175474e89094c44da98b954eedeac495271d0f", # _get_token_address(), + } + + result = gql_sablier_factory.build().execute(gql(query), variable_values=variables) + return result.get("streams", []) + + +def _get_sender(): + return app.config["SABLIER_SENDER_ADDRESS"] + + +def _get_token_address(): + return app.config["GLM_TOKEN_ADDRESS"] diff --git a/backend/app/modules/common/sablier_events_mapper.py b/backend/app/modules/common/sablier_events_mapper.py new file mode 100644 index 0000000000..60bbedcd1c --- /dev/null +++ b/backend/app/modules/common/sablier_events_mapper.py @@ -0,0 +1,196 @@ +from copy import deepcopy +from dataclasses import dataclass +from typing import List, TypedDict, Literal, Tuple + +from app.engine.user.effective_deposit import ( + SablierEventType, + DepositSource, + EventType, +) +from app.infrastructure.sablier.events import SablierStream, SablierAction + + +class FlattenStrategy: + LOCKS = 0 + UNLOCKS = 1 + ALL = 2 + + +class SablierEvent(TypedDict): + __source: DepositSource + depositBefore: int + amount: int + timestamp: int + user: str + transactionHash: str + type: SablierEventType + + +class SablierEventLock(SablierEvent): + __typename: Literal["Locked"] + + +class SablierEventUnlock(SablierEvent): + __typename: Literal["Unlocked"] + + +@dataclass +class MappedEvents: + locks: List[SablierEvent] + unlocks: List[SablierEvent] + + +def process_to_locks_and_unlocks( + sablier_streams: List[SablierStream], + *, + from_timestamp: int = None, + to_timestamp: int = None, + inclusively: bool = False, +) -> List[MappedEvents]: + """ + Returns TypedDict with locks and unlocks from Sablier stream. + We assume that the sender sends GLMs only to eligible users. + """ + if len(sablier_streams) == 0: + return [MappedEvents(locks=[], unlocks=[])] + + mapped_streams = [] + for sablier_stream in sablier_streams: + event_items = _convert(sablier_stream["actions"]) + lock_items_with_filters = _apply_filters( + event_items, + from_timestamp=from_timestamp, + to_timestamp=to_timestamp, + inclusively=inclusively, + ) + + mapped_streams.append( + MappedEvents( + locks=list( + filter( + lambda lock: lock["__typename"] == EventType.LOCK, + lock_items_with_filters, + ) + ), + unlocks=list( + filter( + lambda lock: lock["__typename"] == EventType.UNLOCK, + lock_items_with_filters, + ) + ), + ) + ) + return mapped_streams + + +def _apply_filters( + event_items: List[SablierEvent], + *, + from_timestamp: int, + to_timestamp: int, + inclusively: bool, +) -> List[SablierEvent]: + copy_event_items = deepcopy(event_items) + + if inclusively is True: + to_timestamp += 1 + + for item in event_items: + if from_timestamp and item["timestamp"] < from_timestamp: + copy_event_items.remove(item) + if to_timestamp and item["timestamp"] > to_timestamp: + copy_event_items.remove(item) + + return copy_event_items + + +def _process_create( + action: SablierAction, starting_deposit: int +) -> Tuple[SablierEvent, int]: + amount = int(action["amountA"]) + deposit_before = starting_deposit + starting_deposit += amount + lock_item = SablierEventLock( + __source=DepositSource.SABLIER, + __typename=EventType.LOCK.value, + amount=amount, + timestamp=int(action["timestamp"]), + transactionHash=action["hash"], + depositBefore=deposit_before, + user=action["addressB"], + type=SablierEventType.CREATE, + ) + return lock_item, starting_deposit + + +def _process_withdraw( + action: SablierAction, starting_deposit: int +) -> Tuple[SablierEvent, int]: + amount = int(action["amountB"]) + deposit_before = starting_deposit + starting_deposit -= amount + lock_item = SablierEventUnlock( + __source=DepositSource.SABLIER, + __typename=EventType.UNLOCK.value, + amount=amount, + timestamp=int(action["timestamp"]), + transactionHash=action["hash"], + depositBefore=deposit_before, + user=action["addressB"], + type=SablierEventType.WITHDRAW, + ) + return lock_item, starting_deposit + + +def _process_cancel( + action: SablierAction, starting_deposit: int +) -> Tuple[SablierEvent, int]: + intact_amount = int(action["amountB"]) + cancelled_amount = int(action["amountA"]) + deposit_before = starting_deposit + starting_deposit = intact_amount + lock_item = SablierEventUnlock( + __source=DepositSource.SABLIER, + __typename=EventType.UNLOCK.value, + amount=cancelled_amount, + timestamp=int(action["timestamp"]), + transactionHash=action["hash"], + depositBefore=deposit_before, + user=action["addressB"], + type=SablierEventType.CANCEL, + ) + return lock_item, starting_deposit + + +def _convert(actions: List[SablierAction]) -> List[SablierEvent]: + lock_items = [] + action_strategy = { + SablierEventType.CREATE.value: _process_create, + SablierEventType.WITHDRAW.value: _process_withdraw, + SablierEventType.CANCEL.value: _process_cancel, + } + starting_deposit = 0 + + for action in actions: + category = action["category"] + if category in action_strategy: + process_func = action_strategy[category] + lock_item, starting_deposit = process_func(action, starting_deposit) + lock_items.append(lock_item) + + return lock_items + + +def flatten_sablier_events( + streams: List[MappedEvents], flatten_strategy: FlattenStrategy +): + output = [] + for mapped_events in streams: + if flatten_strategy == FlattenStrategy.LOCKS: + output += mapped_events.locks + elif flatten_strategy == FlattenStrategy.UNLOCKS: + output += mapped_events.unlocks + elif flatten_strategy == FlattenStrategy.ALL: + output += mapped_events.locks + mapped_events.unlocks + + return output diff --git a/backend/app/modules/common/time.py b/backend/app/modules/common/time.py index 3a9735b09b..a7738c7a8f 100644 --- a/backend/app/modules/common/time.py +++ b/backend/app/modules/common/time.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from dataclasses import dataclass from datetime import datetime as DateTime, timezone @@ -30,7 +32,7 @@ def __str__(self): def __repr__(self): return f"Timestamp({str(self.timestamp_us())})" - def __eq__(self, o): + def __eq__(self, o: Timestamp): if isinstance(o, Timestamp): return self._timestamp_us == o._timestamp_us elif isinstance(o, int): @@ -38,7 +40,7 @@ def __eq__(self, o): else: return False - def __le__(self, o): + def __le__(self, o: Timestamp): if isinstance(o, Timestamp): return self._timestamp_us <= o._timestamp_us else: @@ -46,6 +48,14 @@ def __le__(self, o): f"'<=' not supported between instances of type '{type(self)}' and '{type(o)}'" ) + def __lt__(self, o: Timestamp): + if isinstance(o, Timestamp): + return self._timestamp_us < o._timestamp_us + else: + raise TypeError( + f"'<' not supported between instances of type '{type(self)}' and '{type(o)}'" + ) + def from_timestamp_s(timestamp_s: float) -> Timestamp: return Timestamp(int(timestamp_s * 10**6)) diff --git a/backend/app/modules/modules_factory/current.py b/backend/app/modules/modules_factory/current.py index 7ad35cbe47..9e3db6bead 100644 --- a/backend/app/modules/modules_factory/current.py +++ b/backend/app/modules/modules_factory/current.py @@ -6,6 +6,7 @@ from app.modules.history.service.full import FullHistory from app.modules.modules_factory.protocols import ( OctantRewards, + WinningsService, UserEffectiveDeposits, TotalEffectiveDeposits, HistoryService, @@ -54,6 +55,7 @@ from app.modules.projects.details.service.projects_details import ( StaticProjectsDetailsService, ) +from app.modules.user.winnings.service.raffle import RaffleWinningsService class CurrentUserDeposits(UserEffectiveDeposits, TotalEffectiveDeposits, Protocol): @@ -66,6 +68,7 @@ class CurrentServices(Model): user_tos_service: UserTos user_antisybil_service: GitcoinPassportAntisybil octant_rewards_service: OctantRewards + user_winnings_service: WinningsService history_service: HistoryService simulated_pending_snapshot_service: SimulatePendingSnapshots multisig_signatures_service: MultisigSignatures @@ -157,6 +160,7 @@ def create(chain_id: int) -> "CurrentServices": multisig_signatures_service=multisig_signatures, user_tos_service=user_tos, user_antisybil_service=user_antisybil_service, + user_winnings_service=RaffleWinningsService(), projects_metadata_service=StaticProjectsMetadataService(), projects_details_service=StaticProjectsDetailsService(), user_budgets_service=user_budgets, diff --git a/backend/app/modules/modules_factory/finalized.py b/backend/app/modules/modules_factory/finalized.py index 20cb433f2b..2ce44132a6 100644 --- a/backend/app/modules/modules_factory/finalized.py +++ b/backend/app/modules/modules_factory/finalized.py @@ -16,6 +16,12 @@ ProjectsDetailsService, ) from app.modules.octant_rewards.general.service.finalized import FinalizedOctantRewards +from app.modules.projects.details.service.projects_details import ( + StaticProjectsDetailsService, +) +from app.modules.projects.metadata.service.projects_metadata import ( + StaticProjectsMetadataService, +) from app.modules.projects.rewards.service.saved import SavedProjectRewards from app.modules.user.allocations.service.saved import SavedUserAllocations from app.modules.user.budgets.service.saved import SavedUserBudgets @@ -23,13 +29,7 @@ from app.modules.user.patron_mode.service.events_based import EventsBasedUserPatronMode from app.modules.user.rewards.service.saved import SavedUserRewards from app.modules.withdrawals.service.finalized import FinalizedWithdrawals -from app.modules.projects.metadata.service.projects_metadata import ( - StaticProjectsMetadataService, -) from app.pydantic import Model -from app.modules.projects.details.service.projects_details import ( - StaticProjectsDetailsService, -) class FinalizedOctantRewardsProtocol(OctantRewards, Leverage, Protocol): diff --git a/backend/app/modules/modules_factory/finalizing.py b/backend/app/modules/modules_factory/finalizing.py index 3c38584c7f..4f9699c330 100644 --- a/backend/app/modules/modules_factory/finalizing.py +++ b/backend/app/modules/modules_factory/finalizing.py @@ -17,6 +17,12 @@ ) from app.modules.octant_rewards.general.service.pending import PendingOctantRewards from app.modules.octant_rewards.matched.pending import PendingOctantMatchedRewards +from app.modules.projects.details.service.projects_details import ( + StaticProjectsDetailsService, +) +from app.modules.projects.metadata.service.projects_metadata import ( + StaticProjectsMetadataService, +) from app.modules.projects.rewards.service.finalizing import FinalizingProjectRewards from app.modules.projects.rewards.service.saved import SavedProjectRewards from app.modules.snapshots.finalized.service.finalizing import FinalizingSnapshots @@ -26,13 +32,7 @@ from app.modules.user.patron_mode.service.events_based import EventsBasedUserPatronMode from app.modules.user.rewards.service.calculated import CalculatedUserRewards from app.modules.withdrawals.service.pending import PendingWithdrawals -from app.modules.projects.metadata.service.projects_metadata import ( - StaticProjectsMetadataService, -) from app.pydantic import Model -from app.modules.projects.details.service.projects_details import ( - StaticProjectsDetailsService, -) class FinalizingOctantRewards(OctantRewards, Leverage, Protocol): diff --git a/backend/app/modules/modules_factory/future.py b/backend/app/modules/modules_factory/future.py index 64ea69165c..81afd077d6 100644 --- a/backend/app/modules/modules_factory/future.py +++ b/backend/app/modules/modules_factory/future.py @@ -6,17 +6,17 @@ from app.modules.octant_rewards.general.service.calculated import ( CalculatedOctantRewards, ) -from app.modules.staking.proceeds.service.estimated import EstimatedStakingProceeds -from app.modules.user.deposits.service.contract_balance import ( - ContractBalanceUserDeposits, +from app.modules.projects.details.service.projects_details import ( + StaticProjectsDetailsService, ) from app.modules.projects.metadata.service.projects_metadata import ( StaticProjectsMetadataService, ) -from app.pydantic import Model -from app.modules.projects.details.service.projects_details import ( - StaticProjectsDetailsService, +from app.modules.staking.proceeds.service.estimated import EstimatedStakingProceeds +from app.modules.user.deposits.service.contract_balance import ( + ContractBalanceUserDeposits, ) +from app.pydantic import Model class FutureServices(Model): diff --git a/backend/app/modules/modules_factory/pending.py b/backend/app/modules/modules_factory/pending.py index 6b76c46406..0ab061a9e5 100644 --- a/backend/app/modules/modules_factory/pending.py +++ b/backend/app/modules/modules_factory/pending.py @@ -1,5 +1,11 @@ from typing import Protocol +from app.constants import ( + UQ_THRESHOLD_MAINNET, + UQ_THRESHOLD_NOT_MAINNET, + TIMEOUT_LIST, + TIMEOUT_LIST_NOT_MAINNET, +) from app.modules.dto import SignatureOpType from app.modules.modules_factory.protocols import ( UserPatronMode, @@ -25,6 +31,9 @@ from app.modules.multisig_signatures.service.offchain import OffchainMultisigSignatures from app.modules.octant_rewards.general.service.pending import PendingOctantRewards from app.modules.octant_rewards.matched.pending import PendingOctantMatchedRewards +from app.modules.projects.details.service.projects_details import ( + StaticProjectsDetailsService, +) from app.modules.projects.metadata.service.projects_metadata import ( StaticProjectsMetadataService, ) @@ -49,15 +58,6 @@ from app.modules.withdrawals.service.pending import PendingWithdrawals from app.pydantic import Model from app.shared.blockchain_types import compare_blockchain_types, ChainTypes -from app.constants import ( - UQ_THRESHOLD_MAINNET, - UQ_THRESHOLD_NOT_MAINNET, - TIMEOUT_LIST, - TIMEOUT_LIST_NOT_MAINNET, -) -from app.modules.projects.details.service.projects_details import ( - StaticProjectsDetailsService, -) class PendingOctantRewardsService(OctantRewards, Leverage, Protocol): diff --git a/backend/app/modules/modules_factory/pre_pending.py b/backend/app/modules/modules_factory/pre_pending.py index e5bfd8c63b..2cb6849784 100644 --- a/backend/app/modules/modules_factory/pre_pending.py +++ b/backend/app/modules/modules_factory/pre_pending.py @@ -6,6 +6,7 @@ AllUserEffectiveDeposits, OctantRewards, PendingSnapshots, + WinningsService, UserEffectiveDeposits, SavedProjectRewardsService, ProjectsMetadataService, @@ -28,6 +29,7 @@ from app.modules.projects.details.service.projects_details import ( StaticProjectsDetailsService, ) +from app.modules.user.winnings.service.raffle import RaffleWinningsService class PrePendingUserDeposits(UserEffectiveDeposits, AllUserEffectiveDeposits, Protocol): @@ -41,6 +43,7 @@ class PrePendingServices(Model): project_rewards_service: SavedProjectRewardsService projects_metadata_service: ProjectsMetadataService projects_details_service: ProjectsDetailsService + user_winnings_service: WinningsService @staticmethod def create(chain_id: int) -> "PrePendingServices": @@ -69,4 +72,5 @@ def create(chain_id: int) -> "PrePendingServices": project_rewards_service=SavedProjectRewards(), projects_metadata_service=StaticProjectsMetadataService(), projects_details_service=StaticProjectsDetailsService(), + user_winnings_service=RaffleWinningsService(), ) diff --git a/backend/app/modules/modules_factory/protocols.py b/backend/app/modules/modules_factory/protocols.py index 0bb8f4957d..a19788e0f1 100644 --- a/backend/app/modules/modules_factory/protocols.py +++ b/backend/app/modules/modules_factory/protocols.py @@ -20,6 +20,7 @@ from app.modules.history.dto import UserHistoryDTO from app.modules.multisig_signatures.dto import Signature from app.modules.projects.details.service.projects_details import ProjectsDetailsDTO +from app.modules.user.winnings.service.raffle import UserWinningDTO @runtime_checkable @@ -125,6 +126,14 @@ def get_unused_rewards(self, context: Context) -> Dict[str, int]: ... +@runtime_checkable +class WinningsService(Protocol): + def get_user_winnings( + self, context: Context, user_address: str + ) -> List[UserWinningDTO]: + ... + + @runtime_checkable class PendingSnapshots(Protocol): def create_pending_epoch_snapshot(self, context: Context) -> int: diff --git a/backend/app/modules/user/deposits/service/calculated.py b/backend/app/modules/user/deposits/service/calculated.py index 3fac44847a..8e1d188a89 100644 --- a/backend/app/modules/user/deposits/service/calculated.py +++ b/backend/app/modules/user/deposits/service/calculated.py @@ -3,7 +3,13 @@ from app.context.manager import Context from app.engine.user.effective_deposit import UserDeposit, DepositEvent from app.infrastructure.graphql import locks, unlocks +from app.infrastructure.sablier.events import get_user_events_history from app.modules.common.effective_deposits import calculate_effective_deposits +from app.modules.common.sablier_events_mapper import ( + process_to_locks_and_unlocks, + FlattenStrategy, + flatten_sablier_events, +) from app.modules.common.time import Timestamp, from_timestamp_s from app.modules.history.dto import LockItem, OpType from app.pydantic import Model @@ -49,8 +55,28 @@ def get_user_effective_deposit(self, context: Context, user_address: str) -> int def get_locks( self, user_address: str, from_timestamp: Timestamp, limit: int - ) -> list[LockItem]: - return [ + ) -> List[LockItem]: + sablier_streams = get_user_events_history(user_address) + mapped_streams = process_to_locks_and_unlocks( + sablier_streams, + to_timestamp=int(from_timestamp.timestamp_s()), + inclusively=True, + ) + locks_from_sablier = flatten_sablier_events( + mapped_streams, FlattenStrategy.LOCKS + ) + + sablier_locks = [ + LockItem( + type=OpType.LOCK, + amount=int(r["amount"]), + timestamp=from_timestamp_s(r["timestamp"]), + transaction_hash=r["transactionHash"], + ) + for r in locks_from_sablier + ] + + octant_subgraph_locks = [ LockItem( type=OpType.LOCK, amount=int(r["amount"]), @@ -62,10 +88,29 @@ def get_locks( ) ] + return sablier_locks + octant_subgraph_locks + def get_unlocks( self, user_address: str, from_timestamp: Timestamp, limit: int - ) -> list[LockItem]: - return [ + ) -> List[LockItem]: + sablier_streams = get_user_events_history(user_address) + mapped_streams = process_to_locks_and_unlocks( + sablier_streams, to_timestamp=int(from_timestamp.timestamp_s()) + ) + unlocks_from_sablier = flatten_sablier_events( + mapped_streams, FlattenStrategy.UNLOCKS + ) + + sablier_unlocks = [ + LockItem( + type=OpType.UNLOCK, + amount=int(r["amount"]), + timestamp=from_timestamp_s(r["timestamp"]), + transaction_hash=r["transactionHash"], + ) + for r in unlocks_from_sablier + ] + octant_subgraph_unlocks = [ LockItem( type=OpType.UNLOCK, amount=int(r["amount"]), @@ -76,3 +121,4 @@ def get_unlocks( user_address, int(from_timestamp.timestamp_s()), limit ) ] + return sablier_unlocks + octant_subgraph_unlocks diff --git a/backend/app/modules/user/events_generator/core.py b/backend/app/modules/user/events_generator/core.py new file mode 100644 index 0000000000..87b2bc6b88 --- /dev/null +++ b/backend/app/modules/user/events_generator/core.py @@ -0,0 +1,44 @@ +from copy import deepcopy +from typing import List + +from app.engine.user.effective_deposit import DepositEvent, EventType, DepositSource + + +def unify_deposit_balances(events: List[DepositEvent]) -> List[DepositEvent]: + """ + Unify deposit balance for each event in the list of events. Events are expected to be sorted by timestamp. + The first event is always from Octant, but the first Sablier event may have a non-zero `deposit_before`, indicating a balance from the past. + + Returns: + List[DepositEvent]: A list of events with adjusted `deposit_before` and `deposit_after`. + """ + modified_events = deepcopy(events) + + acc_balance_sablier = 0 + acc_balance_octant = events[0].deposit_before # balance from previous epoch + + first_sablier_processed = False + for event in modified_events[1:]: + if event.source == DepositSource.SABLIER and not first_sablier_processed: + acc_balance_sablier = event.deposit_before + first_sablier_processed = True + + combined_balance = acc_balance_sablier + acc_balance_octant + event.deposit_before = combined_balance + + if event.type == EventType.LOCK: + if event.source == DepositSource.SABLIER: + acc_balance_sablier += event.amount + else: + acc_balance_octant += event.amount + + event.deposit_after = event.deposit_before + event.amount + elif event.type == EventType.UNLOCK: + if event.source == DepositSource.SABLIER: + acc_balance_sablier -= event.amount + else: + acc_balance_octant -= event.amount + + event.deposit_after = event.deposit_before - event.amount + + return modified_events diff --git a/backend/app/modules/user/events_generator/service/db_and_graph.py b/backend/app/modules/user/events_generator/service/db_and_graph.py index 1f80123f33..cebc41b6bb 100644 --- a/backend/app/modules/user/events_generator/service/db_and_graph.py +++ b/backend/app/modules/user/events_generator/service/db_and_graph.py @@ -15,6 +15,18 @@ get_unlocks_by_address_and_timestamp_range, get_unlocks_by_timestamp_range, ) +from app.infrastructure.sablier.events import ( + get_all_streams_history, +) +from app.infrastructure.sablier.events import ( + get_user_events_history, +) +from app.modules.common.sablier_events_mapper import ( + process_to_locks_and_unlocks, + flatten_sablier_events, + FlattenStrategy, +) +from app.modules.user.events_generator.core import unify_deposit_balances from app.pydantic import Model @@ -23,7 +35,7 @@ def get_user_events( self, context: Context, user_address: str ) -> List[DepositEvent]: """ - Get user lock and unlock events from the subgraph within the given timestamp range, sort them by timestamp, + Get user lock and unlock events from the subgraph & sablier within the given timestamp range, sort them by timestamp, Returns: A list of event dictionaries sorted by timestamp. @@ -44,6 +56,11 @@ def get_user_events( events.extend( get_unlocks_by_address_and_timestamp_range(user_address, start, end) ) + sablier_streams = get_user_events_history(user_address) + mapped_streams = process_to_locks_and_unlocks( + sablier_streams, from_timestamp=start, to_timestamp=end + ) + events += flatten_sablier_events(mapped_streams, FlattenStrategy.ALL) events = list(map(DepositEvent.from_dict, events)) sorted_events = sorted(events, key=attrgetter("timestamp")) @@ -53,7 +70,9 @@ def get_user_events( if len(sorted_events) == 1 and sorted_events[0].deposit_after == 0: return [] - return sorted_events + sorted_events_with_unified_deposits = unify_deposit_balances(sorted_events) + + return sorted_events_with_unified_deposits def get_all_users_events(self, context: Context) -> Dict[str, List[DepositEvent]]: """ @@ -69,8 +88,15 @@ def get_all_users_events(self, context: Context) -> Dict[str, List[DepositEvent] end = context.epoch_details.end_sec epoch_start_events = self._get_epoch_start_deposits(epoch_num, start) - epoch_events = get_locks_by_timestamp_range(start, end) + sablier_streams = get_all_streams_history() + mapped_streams = process_to_locks_and_unlocks( + sablier_streams, from_timestamp=start, to_timestamp=end + ) + epoch_events = [] + epoch_events += flatten_sablier_events(mapped_streams, FlattenStrategy.ALL) + epoch_events += get_locks_by_timestamp_range(start, end) epoch_events += get_unlocks_by_timestamp_range(start, end) + epoch_events = list(map(DepositEvent.from_dict, epoch_events)) sorted_events = sorted(epoch_events, key=attrgetter("user", "timestamp")) @@ -98,6 +124,10 @@ def get_all_users_events(self, context: Context) -> Dict[str, List[DepositEvent] ), ) + user_events[user_address] = unify_deposit_balances( + user_events[user_address] + ) + return user_events def _get_user_epoch_start_deposit( diff --git a/backend/app/modules/user/winnings/controller.py b/backend/app/modules/user/winnings/controller.py new file mode 100644 index 0000000000..63198ba635 --- /dev/null +++ b/backend/app/modules/user/winnings/controller.py @@ -0,0 +1,14 @@ +from typing import List + +from app.context.manager import state_context +from app.modules.registry import get_services +from app.modules.modules_factory.protocols import WinningsService +from app.modules.user.winnings.service.raffle import UserWinningDTO +from app.context.epoch_state import EpochState + + +def get_user_winnings(user_address: str) -> List[UserWinningDTO]: + context = state_context(EpochState.CURRENT) + service: WinningsService = get_services(context.epoch_state).user_winnings_service + + return service.get_user_winnings(context, user_address) diff --git a/backend/app/modules/user/winnings/service/raffle.py b/backend/app/modules/user/winnings/service/raffle.py new file mode 100644 index 0000000000..03d0c1e2f0 --- /dev/null +++ b/backend/app/modules/user/winnings/service/raffle.py @@ -0,0 +1,30 @@ +from app.pydantic import Model +from typing import List +from dataclasses import dataclass + +from app.infrastructure.sablier.events import get_streams_with_create_events_to_user +from app.context.manager import Context + + +@dataclass +class UserWinningDTO: + amount: str + date_available_for_withdrawal: str + + +class RaffleWinningsService(Model): + def get_user_winnings(self, _: Context, user_address: str) -> List[UserWinningDTO]: + streams = get_streams_with_create_events_to_user(user_address) + user_winnings = [] + + for stream in streams: + date_available_for_withdrawal = stream["endTime"] + amount = stream["depositAmount"] + user_winnings.append( + UserWinningDTO( + amount=amount, + date_available_for_withdrawal=date_available_for_withdrawal, + ) + ) + + return user_winnings diff --git a/backend/app/settings.py b/backend/app/settings.py index 75c37cb388..a8b165f953 100644 --- a/backend/app/settings.py +++ b/backend/app/settings.py @@ -71,6 +71,13 @@ class Config(object): "MAINNET_PROPOSAL_CIDS", DEFAULT_MAINNET_PROJECT_CIDS ) + # Sablier + SABLIER_MAINNET_SUBGRAPH_URL = os.getenv("SABLIER_MAINNET_SUBGRAPH_URL") + SABLIER_SENDER_ADDRESS = os.getenv("SABLIER_SENDER_ADDRESS", "") + GLM_TOKEN_ADDRESS = os.getenv( + "GLM_TOKEN_ADDRESS", "0x7DD9c5Cba05E151C895FDe1CF355C9A1D5DA6429" + ) + class ProdConfig(Config): """Production configuration.""" diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index 285bd962ab..63ceed9eea 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -22,8 +22,17 @@ from app import create_app from app.engine.user.effective_deposit import DepositEvent, EventType, UserDeposit from app.exceptions import ExternalApiException -from app.extensions import db, deposits, glm, gql_factory, w3, vault, epochs -from app.infrastructure import Client as GQLClient +from app.extensions import ( + db, + deposits, + glm, + gql_octant_factory, + w3, + vault, + epochs, + gql_sablier_factory, +) +from app.infrastructure import Client as GQLClient, SubgraphEndpoints from app.infrastructure import database from app.infrastructure.contracts.epochs import Epochs from app.infrastructure.contracts.erc20 import ERC20 @@ -71,7 +80,7 @@ LOW_UQ_SCORE, ) from tests.helpers.context import get_context -from tests.helpers.gql_client import MockGQLClient +from tests.helpers.gql_client import MockGQLClient, MockSablierGQLClient from tests.helpers.mocked_epoch_details import EPOCH_EVENTS from tests.helpers.octant_rewards import octant_rewards from tests.helpers.pending_snapshot import create_pending_snapshot @@ -1523,6 +1532,12 @@ def _split_deposit_events(deposit_events): return locks_events, unlocks_events +def mock_sablier_graphql(mocker): + mock_client = MockSablierGQLClient() + mocker.patch.object(gql_sablier_factory, "build") + gql_sablier_factory.build.return_value = mock_client + + def mock_graphql( mocker, deposit_events=None, @@ -1539,8 +1554,8 @@ def mock_graphql( withdrawals=withdrawals_events, merkle_roots=merkle_roots_events, ) - mocker.patch.object(gql_factory, "build") - gql_factory.build.return_value = mock_client + mocker.patch.object(gql_octant_factory, "build") + gql_octant_factory.build.return_value = mock_client @pytest.fixture(scope="function") @@ -1550,7 +1565,10 @@ def mock_failing_gql( monkeypatch, ): # this URL is not called in this test, but it needs to be a proper URL - gql_factory.set_url({"SUBGRAPH_ENDPOINT": "http://domain.example:12345"}) + gql_octant_factory.set_url( + {"SUBGRAPH_ENDPOINT": "http://domain.example:12345"}, + SubgraphEndpoints.OCTANT_SUBGRAPH, + ) mocker.patch.object(GQLClient, "execute_sync") GQLClient.execute_sync.side_effect = TransportQueryError( diff --git a/backend/tests/engine/test_epoch_settings.py b/backend/tests/engine/test_epoch_settings.py index 9cf65d9ee0..1f6ff56aa3 100644 --- a/backend/tests/engine/test_epoch_settings.py +++ b/backend/tests/engine/test_epoch_settings.py @@ -34,9 +34,6 @@ from app.engine.projects.rewards.threshold.preliminary import ( PreliminaryProjectThreshold, ) -from app.engine.user import ( - DefaultWeightedAverageEffectiveDeposit, -) from app.engine.user.budget.preliminary import PreliminaryUserBudget from app.engine.user.budget.with_ppf import UserBudgetWithPPF from app.engine.user.effective_deposit.cut_off.cutoff_10glm import CutOff10GLM @@ -52,6 +49,10 @@ from app.engine.octant_rewards import LeftoverWithPPFAndUnusedMR from app.engine.octant_rewards.leftover.with_ppf import LeftoverWithPPF from app.engine.octant_rewards.leftover.default import PreliminaryLeftover +from app.engine.user import DefaultWeightedAverageWithSablierTimebox +from app.engine.user.effective_deposit.weighted_average.default import ( + DefaultWeightedAverageEffectiveDeposit, +) def test_default_epoch_settings(): @@ -63,7 +64,6 @@ def test_default_epoch_settings(): IRE_PERCENT=OctantRewardsDefaultValues.IRE_PERCENT, TR_PERCENT=OctantRewardsDefaultValues.TR_PERCENT, ), - timebased_weights=TimebasedWithoutUnlocksWeights(), operational_cost=OpCostPercent(Decimal("0.25")), ppf=PPFCalculatorFromRewards(), community_fund=CommunityFundPercent(OctantRewardsDefaultValues.COMMUNITY_FUND), @@ -74,6 +74,9 @@ def test_default_epoch_settings(): projects_rewards=CappedQuadraticFundingProjectRewards(), projects_allocations=QuadraticFundingAllocations(), leftover=LeftoverWithPPFAndUnusedMR(), + effective_deposit=DefaultWeightedAverageWithSablierTimebox( + timebased_weights=TimebasedWithoutUnlocksWeights() + ), ) @@ -83,7 +86,6 @@ def test_epoch_1_settings(): check_settings( settings=settings, total_and_vanilla_individual_rewards=AllProceedsWithOperationalCost(), - timebased_weights=DefaultTimebasedWeights(), operational_cost=OpCostPercent(Decimal("0.20")), matched_rewards=PreliminaryMatchedRewards(), ppf=NotSupportedPPFCalculator(), @@ -94,6 +96,9 @@ def test_epoch_1_settings(): ), projects_allocations=PreliminaryProjectAllocations(), leftover=PreliminaryLeftover(), + effective_deposit=DefaultWeightedAverageEffectiveDeposit( + timebased_weights=DefaultTimebasedWeights() + ), ) @@ -103,7 +108,6 @@ def test_epoch_2_settings(): check_settings( settings=settings, total_and_vanilla_individual_rewards=PreliminaryTotalAndAllIndividualRewards(), - timebased_weights=TimebasedWithoutUnlocksWeights(), operational_cost=OpCostPercent(Decimal("0.25")), matched_rewards=PreliminaryMatchedRewards(), ppf=NotSupportedPPFCalculator(), @@ -114,6 +118,9 @@ def test_epoch_2_settings(): ), projects_allocations=PreliminaryProjectAllocations(), leftover=PreliminaryLeftover(), + effective_deposit=DefaultWeightedAverageEffectiveDeposit( + timebased_weights=TimebasedWithoutUnlocksWeights() + ), ) @@ -131,7 +138,6 @@ def test_epoch_3_settings(): matched_rewards=PercentageMatchedRewards( OctantRewardsDefaultValues.MATCHED_REWARDS_PERCENT ), - timebased_weights=TimebasedWithoutUnlocksWeights(), community_fund=CommunityFundPercent(OctantRewardsDefaultValues.COMMUNITY_FUND), ppf=PPFCalculatorFromRewards(), user_budget=UserBudgetWithPPF(), @@ -140,6 +146,9 @@ def test_epoch_3_settings(): ), projects_allocations=PreliminaryProjectAllocations(), leftover=LeftoverWithPPF(), + effective_deposit=DefaultWeightedAverageEffectiveDeposit( + timebased_weights=TimebasedWithoutUnlocksWeights() + ), ) @@ -157,7 +166,6 @@ def test_epoch_4_settings(): matched_rewards=PercentageMatchedRewards( OctantRewardsDefaultValues.MATCHED_REWARDS_PERCENT ), - timebased_weights=TimebasedWithoutUnlocksWeights(), community_fund=CommunityFundPercent(OctantRewardsDefaultValues.COMMUNITY_FUND), ppf=PPFCalculatorFromRewards(), user_budget=UserBudgetWithPPF(), @@ -166,6 +174,65 @@ def test_epoch_4_settings(): ), projects_allocations=QuadraticFundingAllocations(), leftover=LeftoverWithPPFAndUnusedMR(), + effective_deposit=DefaultWeightedAverageEffectiveDeposit( + timebased_weights=TimebasedWithoutUnlocksWeights() + ), + ) + + +def test_epoch_5_settings(): + register_epoch_settings() + settings = get_epoch_settings(5) + + check_settings( + settings=settings, + operational_cost=OpCostPercent(Decimal("0.25")), + total_and_vanilla_individual_rewards=PercentTotalAndAllIndividualRewards( + IRE_PERCENT=OctantRewardsDefaultValues.IRE_PERCENT, + TR_PERCENT=OctantRewardsDefaultValues.TR_PERCENT, + ), + matched_rewards=PercentageMatchedRewards( + OctantRewardsDefaultValues.MATCHED_REWARDS_PERCENT + ), + community_fund=CommunityFundPercent(OctantRewardsDefaultValues.COMMUNITY_FUND), + ppf=PPFCalculatorFromRewards(), + user_budget=UserBudgetWithPPF(), + projects_rewards=CappedQuadraticFundingProjectRewards( + projects_allocations=QuadraticFundingAllocations(), + ), + projects_allocations=QuadraticFundingAllocations(), + leftover=LeftoverWithPPFAndUnusedMR(), + effective_deposit=DefaultWeightedAverageEffectiveDeposit( + timebased_weights=TimebasedWithoutUnlocksWeights() + ), + ) + + +def test_epoch_6_settings(): + register_epoch_settings() + settings = get_epoch_settings(6) + + check_settings( + settings=settings, + operational_cost=OpCostPercent(Decimal("0.25")), + total_and_vanilla_individual_rewards=PercentTotalAndAllIndividualRewards( + IRE_PERCENT=OctantRewardsDefaultValues.IRE_PERCENT, + TR_PERCENT=OctantRewardsDefaultValues.TR_PERCENT, + ), + matched_rewards=PercentageMatchedRewards( + OctantRewardsDefaultValues.MATCHED_REWARDS_PERCENT + ), + community_fund=CommunityFundPercent(OctantRewardsDefaultValues.COMMUNITY_FUND), + ppf=PPFCalculatorFromRewards(), + user_budget=UserBudgetWithPPF(), + projects_rewards=CappedQuadraticFundingProjectRewards( + projects_allocations=QuadraticFundingAllocations(), + ), + projects_allocations=QuadraticFundingAllocations(), + leftover=LeftoverWithPPFAndUnusedMR(), + effective_deposit=DefaultWeightedAverageWithSablierTimebox( + timebased_weights=TimebasedWithoutUnlocksWeights() + ), ) @@ -174,14 +241,14 @@ def check_settings( settings, total_and_vanilla_individual_rewards, operational_cost, - timebased_weights, matched_rewards, ppf, community_fund, user_budget, projects_rewards, projects_allocations, - leftover + leftover, + effective_deposit, ): assert settings.octant_rewards.locked_ratio == DefaultLockedRatio() assert ( @@ -196,9 +263,7 @@ def check_settings( assert settings.user.budget == user_budget assert settings.user.effective_deposit.cut_off == CutOff10GLM() - assert settings.user.effective_deposit == DefaultWeightedAverageEffectiveDeposit( - timebased_weights=timebased_weights - ) + assert settings.user.effective_deposit == effective_deposit assert settings.project.rewards == projects_rewards assert settings.project.rewards.projects_allocations == projects_allocations diff --git a/backend/tests/engine/user/effective_deposit/test_weighted_average_with_sablier_timebox.py b/backend/tests/engine/user/effective_deposit/test_weighted_average_with_sablier_timebox.py new file mode 100644 index 0000000000..c1714ee40a --- /dev/null +++ b/backend/tests/engine/user/effective_deposit/test_weighted_average_with_sablier_timebox.py @@ -0,0 +1,359 @@ +from app.constants import ZERO_ADDRESS +from app.engine.user.effective_deposit import ( + UserEffectiveDepositPayload, + UserDeposit, + DepositSource, +) +from app.engine.user.effective_deposit.cut_off.cutoff_100glm import CutOff100GLM +from app.engine.user.effective_deposit.weighted_average.default_with_sablier_timebox import ( + DefaultWeightedAverageWithSablierTimebox, +) +from app.engine.user.effective_deposit.weighted_average.weights.timebased.default import ( + DefaultTimebasedWeights, +) +from tests.helpers.constants import ( + USER1_ADDRESS, + USER2_ADDRESS, + TWENTY_FOUR_HOURS_PERIOD, +) +from tests.helpers.mock_events_generator import MockEventGeneratorFactory + +EPOCH_START = 123 +EPOCH_END = 323 +EPOCH_DURATION = EPOCH_END - EPOCH_START +EVENT_GENERATOR_FACTORY = MockEventGeneratorFactory() + + +def create_payload( + deposits, epoch_start=EPOCH_START, epoch_end=EPOCH_END +) -> UserEffectiveDepositPayload: + generator = EVENT_GENERATOR_FACTORY.build(deposits) + events = generator.get_all_users_events() + return UserEffectiveDepositPayload(epoch_start, epoch_end, events) + + +def test_empty_deposits(): + deposits = {} + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [] + assert result[1] == 0 + + +def test_user_with_empty_events(): + events = {USER1_ADDRESS: []} + payload = UserEffectiveDepositPayload(EPOCH_START, EPOCH_END, events) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [UserDeposit(USER1_ADDRESS, 0, 0)] + assert result[1] == 0 + + +def test_lock_0GLM(): + deposits = {USER1_ADDRESS: [(EPOCH_START, 0, DepositSource.OCTANT)]} + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [UserDeposit(USER1_ADDRESS, 0, 0)] + assert result[1] == 0 + + +def test_lock_and_unlock_everything(): + deposits = { + USER1_ADDRESS: [ + (EPOCH_START, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 50, -100_000000000_000000000, DepositSource.OCTANT), + ] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [UserDeposit(USER1_ADDRESS, 0, 0)] + assert result[1] == 0 + + +def test_one_user_deposit_at_the_beginning_of_an_epoch(): + deposits = { + USER1_ADDRESS: [(EPOCH_START, 100_000000000_000000000, DepositSource.OCTANT)] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [ + UserDeposit(USER1_ADDRESS, 100_000000000_000000000, 100_000000000_000000000) + ] + assert result[1] == 100_000000000_000000000 + + +def test_user_with_zero_address(): + deposits = { + ZERO_ADDRESS: [(EPOCH_START, 100_000000000_000000000, DepositSource.OCTANT)] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [ + UserDeposit(ZERO_ADDRESS, 100_000000000_000000000, 100_000000000_000000000) + ] + assert result[1] == 100_000000000_000000000 + + +def test_one_user_deposit_below_locked_amount_cutoff(): + deposits = { + USER1_ADDRESS: [(EPOCH_START, 90_000000000_000000000, DepositSource.OCTANT)] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [UserDeposit(USER1_ADDRESS, 0, 90_000000000_000000000)] + assert result[1] == 0 + + +def test_one_user_deposit_below_effective_deposit_10glm_cutoff(): + deposits = { + USER1_ADDRESS: [ + (EPOCH_START + 190, 100_000000000_000000000, DepositSource.OCTANT) + ] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [UserDeposit(USER1_ADDRESS, 0, 100_000000000_000000000)] + assert result[1] == 0 + + +def test_one_user_deposit_below_effective_deposit_100glm_cutoff(): + deposits = { + USER1_ADDRESS: [(EPOCH_START, 90_000000000_000000000, DepositSource.OCTANT)] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox(cut_off=CutOff100GLM()) + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [UserDeposit(USER1_ADDRESS, 0, 90_000000000_000000000)] + assert result[1] == 0 + + +def test_one_user_deposit_above_effective_deposit_100glm_cutoff(): + deposits = { + USER1_ADDRESS: [ + (EPOCH_START, 1000_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 190, -950_000000000_000000000, DepositSource.OCTANT), + ] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox( + timebased_weights=DefaultTimebasedWeights(), cut_off=CutOff100GLM() + ) + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [ + UserDeposit(USER1_ADDRESS, 952_500000000_000000000, 50_000000000_000000000) + ] + assert result[1] == 952_500000000_000000000 + + +def test_user_deposits_cumulative_locks(): + deposits = { + USER1_ADDRESS: [ + (EPOCH_START, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, 200_000000000_000000000, DepositSource.OCTANT), + ] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [ + UserDeposit(USER1_ADDRESS, 200_000000000_000000000, 300_000000000_000000000) + ] + assert result[1] == 200_000000000_000000000 + + +def test_user_deposits_with_locks_and_unlocks(): + deposits = { + USER1_ADDRESS: [ + (EPOCH_START, 200_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, -100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 200_000000000_000000000, DepositSource.OCTANT), + ] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [ + UserDeposit(USER1_ADDRESS, 150_000000000_000000000, 300_000000000_000000000) + ] + assert result[1] == 150_000000000_000000000 + + +def test_user_deposits_with_locks_and_unlocks_and_default_timebased_weights(): + deposits = { + USER1_ADDRESS: [ + (EPOCH_START, 200_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, -100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 200_000000000_000000000, DepositSource.OCTANT), + ] + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox( + timebased_weights=DefaultTimebasedWeights() + ) + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [ + UserDeposit(USER1_ADDRESS, 200_000000000_000000000, 300_000000000_000000000) + ] + assert result[1] == 200_000000000_000000000 + + +def test_multiple_users_deposits_timebased_without_unlocks(): + deposits = { + USER1_ADDRESS: [ + (EPOCH_START, 200_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, -100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 200_000000000_000000000, DepositSource.OCTANT), + ], + USER2_ADDRESS: [ + (EPOCH_START, 1000_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 2000_000000000_000000000, DepositSource.OCTANT), + ], + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox() + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [ + UserDeposit(USER1_ADDRESS, 150_000000000_000000000, 300_000000000_000000000), + UserDeposit(USER2_ADDRESS, 1550_000000000_000000000, 3100_000000000_000000000), + ] + assert result[1] == 1700_000000000_000000000 + + +def test_multiple_users_deposits_default_timebased(): + deposits = { + USER1_ADDRESS: [ + (EPOCH_START, 200_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, -100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 200_000000000_000000000, DepositSource.OCTANT), + ], + USER2_ADDRESS: [ + (EPOCH_START, 1000_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 2000_000000000_000000000, DepositSource.OCTANT), + ], + } + payload = create_payload(deposits) + uut = DefaultWeightedAverageWithSablierTimebox( + timebased_weights=DefaultTimebasedWeights() + ) + + result = uut.calculate_users_effective_deposits(payload) + + assert result[0] == [ + UserDeposit(USER1_ADDRESS, 200_000000000_000000000, 300_000000000_000000000), + UserDeposit(USER2_ADDRESS, 1550_000000000_000000000, 3100_000000000_000000000), + ] + assert result[1] == 1750_000000000_000000000 + + +def test_user_deposits_with_locks_and_unlocks_but_events_within_timebox(): + deposits_with_timebox = { + USER1_ADDRESS: [ + (EPOCH_START + 100, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, -100_000000000_000000000, DepositSource.SABLIER), + ( + EPOCH_START + 200, + 100_000000000_000000000, + DepositSource.OCTANT, + ), # Locks back within 24 hours, so no effect + (EPOCH_START + 300, 100_000000000_000000000, DepositSource.OCTANT), + ], + USER2_ADDRESS: [ + (EPOCH_START, 1000_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 2000_000000000_000000000, DepositSource.OCTANT), + ], + } + + counterpart_deposits = { + USER1_ADDRESS: [ + (EPOCH_START + 100, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 300, 100_000000000_000000000, DepositSource.OCTANT), + ], + USER2_ADDRESS: [ + (EPOCH_START, 1000_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 2000_000000000_000000000, DepositSource.OCTANT), + ], + } + + payload_with_timebox = create_payload(deposits_with_timebox) + counterpart_payload = create_payload(counterpart_deposits) + + uut = DefaultWeightedAverageWithSablierTimebox( + timebased_weights=DefaultTimebasedWeights() + ) + + result_with_timebox = uut.calculate_users_effective_deposits(payload_with_timebox) + result_counterpart = uut.calculate_users_effective_deposits(counterpart_payload) + + assert result_with_timebox[0] == result_counterpart[0] + + +def test_user_deposits_with_locks_and_unlocks_but_events_not_within_timebox(): + deposits_with_timebox = { + USER1_ADDRESS: [ + (EPOCH_START, 200_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, -100_000000000_000000000, DepositSource.SABLIER), + ( + EPOCH_START + 100 + TWENTY_FOUR_HOURS_PERIOD, + 100_000000000_000000000, + DepositSource.OCTANT, + ), # Locks back but after 24 hours period so no effect + ], + USER2_ADDRESS: [ + (EPOCH_START, 1000_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 100, 100_000000000_000000000, DepositSource.OCTANT), + (EPOCH_START + 150, 2000_000000000_000000000, DepositSource.OCTANT), + ], + } + + payload_with_timebox = create_payload(deposits_with_timebox) + + uut = DefaultWeightedAverageWithSablierTimebox( + timebased_weights=DefaultTimebasedWeights() + ) + + result = uut.calculate_users_effective_deposits(payload_with_timebox) + + assert result[0] == [ + UserDeposit(USER1_ADDRESS, 0, 200000000000000000000), + UserDeposit(USER2_ADDRESS, 1550000000000000000000, 3100_000000000_000000000), + ] + assert result[1] == 1550000000000000000000 diff --git a/backend/tests/engine/user/effective_deposit/weighted_average/test_default_weighted_average.py b/backend/tests/engine/user/effective_deposit/weighted_average/test_default_weighted_average.py index 55bd6a2e6a..4f5e999b33 100644 --- a/backend/tests/engine/user/effective_deposit/weighted_average/test_default_weighted_average.py +++ b/backend/tests/engine/user/effective_deposit/weighted_average/test_default_weighted_average.py @@ -1,5 +1,4 @@ from app.constants import ZERO_ADDRESS -from app.engine.user import DefaultWeightedAverageEffectiveDeposit from app.engine.user.effective_deposit import ( UserEffectiveDepositPayload, UserDeposit, @@ -8,6 +7,9 @@ from app.engine.user.effective_deposit.weighted_average.weights.timebased.default import ( DefaultTimebasedWeights, ) +from app.engine.user.effective_deposit.weighted_average.default import ( + DefaultWeightedAverageEffectiveDeposit, +) from tests.helpers.constants import USER1_ADDRESS, USER2_ADDRESS from tests.helpers.mock_events_generator import MockEventGeneratorFactory diff --git a/backend/tests/helpers/constants.py b/backend/tests/helpers/constants.py index d38f05ab40..a36aedf85b 100644 --- a/backend/tests/helpers/constants.py +++ b/backend/tests/helpers/constants.py @@ -74,3 +74,8 @@ UQ_THRESHOLD_MAINNET = 15 TIMEOUT_LIST = set() + +ALICE_SABLIER_LOCKING_ADDRESS = "0xC8ef823f4f154415Bc4931071F53c61B4F979152" +BOB_SABLIER_LOCKING_ADDRESS = "0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f" + +TWENTY_FOUR_HOURS_PERIOD = 24 * 60 * 60 diff --git a/backend/tests/helpers/gql_client.py b/backend/tests/helpers/gql_client.py index fcaad7ef75..0413b04715 100644 --- a/backend/tests/helpers/gql_client.py +++ b/backend/tests/helpers/gql_client.py @@ -1,5 +1,10 @@ from graphql import DocumentNode +from tests.helpers.constants import ( + ALICE_SABLIER_LOCKING_ADDRESS, + BOB_SABLIER_LOCKING_ADDRESS, +) + filters = { "lte": (lambda compared_value: (lambda v: v <= compared_value)), "lt": (lambda compared_value: (lambda v: v < compared_value)), @@ -175,3 +180,75 @@ def _bytes_to_lowercase(self): self.withdrawals = list(map(MockGQLClient._user_to_lower, self.withdrawals)) self.lockeds = list(map(MockGQLClient._user_to_lower, self.lockeds)) self.unlockeds = list(map(MockGQLClient._user_to_lower, self.unlockeds)) + + +class MockSablierGQLClient: + def execute(self, query: DocumentNode, variable_values=None): + recipient = variable_values.get("recipient") + if recipient is None: + payload = self._prepare_payload(ALICE_SABLIER_LOCKING_ADDRESS) + payload["streams"] += self._prepare_payload(BOB_SABLIER_LOCKING_ADDRESS)[ + "streams" + ] + else: + payload = self._prepare_payload(recipient) + + return payload + + def _prepare_payload(self, recipient): + result = {"streams": []} + + actions = [ + { + "addressA": "0x76273dcc41356e5f0c49bb68e525175dc7e83417", + "addressB": recipient, + "amountA": "10000000000000000000", + "amountB": None, + "category": "Create", + "hash": "0xe4395aa03aaf8bb3d2d8009106cc2a5049f9afde8a5b19bb70d3e19660eae43b", + "timestamp": "1726833047", + }, + { + "addressA": recipient, + "addressB": recipient, + "amountA": None, + "amountB": "355443302891933020", + "category": "Withdraw", + "hash": "0x685ec53bdcbaca88d87438b33f6c82b3720937126db1d3982cfd62a9bf71b138", + "timestamp": "1729075199", + }, + { + "addressA": "0x76273dcc41356e5f0c49bb68e525175dc7e83417", + "addressB": recipient, + "amountA": "9644339802130898030", + "amountB": "216894977168950", + "category": "Cancel", + "hash": "0x244c9de88860320b89575a0d8f62f9eb5c7ba4597947ac63f94b6ef0db354b83", + "timestamp": "1729076267", + }, + { + "addressA": recipient, + "addressB": recipient, + "amountA": None, + "amountB": "216894977168950", + "category": "Withdraw", + "hash": "0xcdf10032cf3bc74a255510e632d4e7fe876503bc2ec04c8d79dce714492ad11d", + "timestamp": "1729077035", + }, + ] + + if recipient in [ALICE_SABLIER_LOCKING_ADDRESS, BOB_SABLIER_LOCKING_ADDRESS]: + result = { + "streams": [ + { + "actions": actions, + "id": "0x3962f6585946823440d274ad7c719b02b49de51e-1-1147", + "intactAmount": "0", + "transferable": False, + "endTime": "1729077035", + "depositAmount": "10000000000000000000", + } + ] + } + + return result diff --git a/backend/tests/helpers/mock_events_generator.py b/backend/tests/helpers/mock_events_generator.py index 52feb40dd6..cef6026833 100644 --- a/backend/tests/helpers/mock_events_generator.py +++ b/backend/tests/helpers/mock_events_generator.py @@ -1,9 +1,13 @@ from collections import defaultdict from operator import attrgetter -from typing import List, Dict, Tuple +from typing import List, Dict from app.engine.user.effective_deposit import DepositEvent -from tests.helpers.subgraph.events import create_deposit_events +from tests.helpers.subgraph.events import ( + create_deposit_events, + EventDetailsWithSource, + EventDetails, +) class MockEventGenerator: @@ -25,7 +29,7 @@ def get_all_users_events(self) -> Dict[str, List[DepositEvent]]: class MockEventGeneratorFactory: def build( self, - events: Dict[str, List[Tuple[int, int]]], + events: Dict[str, List[EventDetails | EventDetailsWithSource]], ) -> MockEventGenerator: events_by_user = defaultdict(list) for event in create_deposit_events(events): diff --git a/backend/tests/helpers/subgraph/events.py b/backend/tests/helpers/subgraph/events.py index 891541a21c..1af2b6e1de 100644 --- a/backend/tests/helpers/subgraph/events.py +++ b/backend/tests/helpers/subgraph/events.py @@ -2,8 +2,12 @@ from datetime import datetime from itertools import chain, repeat, accumulate +from app.engine.user.effective_deposit import DepositSource from tests.helpers.constants import USER1_ADDRESS +EventDetails = Tuple[int, int] +EventDetailsWithSource = Tuple[int, int, DepositSource] + def generate_epoch_events( start=None, duration=1000, decision_window=500, first_epoch=1, epoches=5, **kwargs @@ -28,8 +32,10 @@ def generate_epoch_events( return events -# pass mapping user_address => [(timestamp, amount)] -def create_deposit_events(events: Dict[str, List[Tuple[int, int]]]): +# pass mapping user_address => [(timestamp, amount, source), ...] +def create_deposit_events( + events: Dict[str, List[EventDetails | EventDetailsWithSource]] +): def flatten(list_of_lists): return list(chain(*list_of_lists)) @@ -51,7 +57,7 @@ def create_user_deposit_events(user, events): return list(map(_tuple_to_event_dict, events_and_deposits)) -def _tuple_to_event_dict(tuple: Tuple[str, Tuple[int, int], int]): +def _tuple_to_event_dict(tuple: Tuple[str, EventDetailsWithSource | EventDetails, int]): user, event, deposit_before = tuple return create_deposit_event( user=user, @@ -59,6 +65,7 @@ def _tuple_to_event_dict(tuple: Tuple[str, Tuple[int, int], int]): timestamp=event[0], amount=str(abs(event[1])), deposit_before=str(deposit_before), + source=event[2] if len(event) == 3 else DepositSource.OCTANT, ) diff --git a/backend/tests/infrastracture/graph/test_gql_retry_backoff.py b/backend/tests/infrastracture/graph/test_gql_retry_backoff.py index f949720c83..9024c3e6c4 100644 --- a/backend/tests/infrastracture/graph/test_gql_retry_backoff.py +++ b/backend/tests/infrastracture/graph/test_gql_retry_backoff.py @@ -3,7 +3,7 @@ from gql import gql from gql.transport.exceptions import TransportQueryError -from app.extensions import gql_factory +from app.extensions import gql_octant_factory from app.infrastructure import Client as GQLClient @@ -30,7 +30,7 @@ def test_with_failure(mock_failing_gql): with pytest.raises( TransportQueryError, match="the chain was reorganized while executing the query" ): - gql_factory.build().execute(query) + gql_octant_factory.build().execute(query) assert ( GQLClient.execute_sync.call_count > 2 diff --git a/backend/tests/modules/common/test_sablier_events_mapper.py b/backend/tests/modules/common/test_sablier_events_mapper.py new file mode 100644 index 0000000000..9b408f38b8 --- /dev/null +++ b/backend/tests/modules/common/test_sablier_events_mapper.py @@ -0,0 +1,273 @@ +from typing import Dict + +import pytest + +from app.engine.user.effective_deposit import SablierEventType, DepositSource +from app.infrastructure.sablier.events import SablierStream +from app.modules.common.sablier_events_mapper import ( + process_to_locks_and_unlocks, + MappedEvents, + SablierEvent, + flatten_sablier_events, + FlattenStrategy, +) + + +@pytest.fixture +def sample_streams(): + return [ + MappedEvents( + locks=[ + SablierEvent( + __source=DepositSource.OCTANT, + depositBefore=100, + amount=50, + timestamp=123456789, + user="user1", + transactionHash="tx1", + type=SablierEventType.CREATE, + ), + SablierEvent( + __source=DepositSource.SABLIER, + depositBefore=200, + amount=75, + timestamp=123456790, + user="user2", + transactionHash="tx2", + type=SablierEventType.WITHDRAW, + ), + ], + unlocks=[ + SablierEvent( + __source=DepositSource.OCTANT, + depositBefore=300, + amount=25, + timestamp=123456791, + user="user3", + transactionHash="tx3", + type=SablierEventType.CANCEL, + ) + ], + ), + MappedEvents( + locks=[ + SablierEvent( + __source=DepositSource.SABLIER, + depositBefore=150, + amount=60, + timestamp=123456792, + user="user4", + transactionHash="tx4", + type=SablierEventType.CREATE, + ) + ], + unlocks=[ + SablierEvent( + __source=DepositSource.OCTANT, + depositBefore=250, + amount=30, + timestamp=123456793, + user="user5", + transactionHash="tx5", + type=SablierEventType.CANCEL, + ), + SablierEvent( + __source=DepositSource.SABLIER, + depositBefore=350, + amount=40, + timestamp=123456794, + user="user6", + transactionHash="tx6", + type=SablierEventType.WITHDRAW, + ), + ], + ), + ] + + +def create_action( + category: str, timestamp: int, amountA: int = 0, amountB: int = 0 +) -> Dict: + return { + "category": category, + "addressA": "0xSender", + "addressB": "0xReceiver", + "amountA": amountA, + "amountB": amountB, + "timestamp": timestamp, + "hash": f"hash_{timestamp}", + } + + +def test_empty_actions(): + sablier_streams = [SablierStream(actions=[], intactAmount=0)] + result = process_to_locks_and_unlocks(sablier_streams)[0] + assert len(result.locks) == 0 + assert len(result.unlocks) == 0 + + +def test_create_action(): + action = create_action(SablierEventType.CREATE, timestamp=100, amountA=100) + sablier_streams = [SablierStream(actions=[action], intactAmount=0)] + result = process_to_locks_and_unlocks(sablier_streams)[0] + + assert len(result.locks) == 1 + assert len(result.unlocks) == 0 + lock = result.locks[0] + assert lock["amount"] == 100 + assert lock["__typename"] == "Locked" + assert lock["depositBefore"] == 0 + assert lock["__source"] == "Sablier" + + +def test_withdraw_action(): + create_action_item = create_action( + SablierEventType.CREATE, timestamp=100, amountA=200 + ) + withdraw_action_item = create_action( + SablierEventType.WITHDRAW, timestamp=200, amountB=50 + ) + sablier_streams = [ + SablierStream( + actions=[create_action_item, withdraw_action_item], + intactAmount=0, + ) + ] + result = process_to_locks_and_unlocks(sablier_streams)[0] + + assert len(result.locks) == 1 + assert len(result.unlocks) == 1 + + lock = result.locks[0] + assert lock["amount"] == 200 + assert lock["depositBefore"] == 0 + assert lock["__source"] == "Sablier" + + unlock = result.unlocks[0] + assert unlock["amount"] == 50 + assert unlock["__typename"] == "Unlocked" + assert unlock["depositBefore"] == 200 + assert unlock["__source"] == "Sablier" + + +def test_cancel_action(): + create_action_item = create_action( + SablierEventType.CREATE, timestamp=100, amountA=150 + ) + cancel_action_item = create_action( + SablierEventType.CANCEL, timestamp=300, amountA=150, amountB=0 + ) + sablier_streams = [ + SablierStream( + actions=[create_action_item, cancel_action_item], + intactAmount=0, + ) + ] + result = process_to_locks_and_unlocks(sablier_streams)[0] + + assert len(result.locks) == 1 + assert len(result.unlocks) == 1 + + lock = result.locks[0] + assert lock["amount"] == 150 + assert lock["depositBefore"] == 0 + assert lock["__source"] == "Sablier" + + unlock = result.unlocks[0] + assert unlock["amount"] == 150 + assert unlock["__typename"] == "Unlocked" + assert unlock["depositBefore"] == 150 + assert unlock["__source"] == "Sablier" + + +def test_mixed_actions(): + actions = [ + create_action(SablierEventType.CREATE, timestamp=100, amountA=100), + create_action(SablierEventType.WITHDRAW, timestamp=150, amountB=50), + create_action(SablierEventType.CREATE, timestamp=200, amountA=200), + create_action(SablierEventType.CANCEL, timestamp=250, amountA=150, amountB=50), + ] + sablier_streams = [SablierStream(actions=actions, intactAmount=0)] + + result = process_to_locks_and_unlocks(sablier_streams)[0] + + assert len(result.locks) == 2 + assert len(result.unlocks) == 2 + + lock1 = result.locks[0] + assert lock1["amount"] == 100 + assert lock1["__typename"] == "Locked" + assert lock1["depositBefore"] == 0 + assert lock1["__source"] == "Sablier" + + unlock1 = result.unlocks[0] + assert unlock1["amount"] == 50 + assert unlock1["__typename"] == "Unlocked" + assert unlock1["depositBefore"] == 100 + assert unlock1["__source"] == "Sablier" + + lock2 = result.locks[1] + assert lock2["amount"] == 200 + assert lock2["__typename"] == "Locked" + assert lock2["depositBefore"] == 50 + assert lock2["__source"] == "Sablier" + + unlock2 = result.unlocks[1] + assert unlock2["amount"] == 150 + assert unlock2["__typename"] == "Unlocked" + assert unlock2["depositBefore"] == 250 + assert unlock2["__source"] == "Sablier" + + +def test_flatten_events_locks(sample_streams): + result = flatten_sablier_events(sample_streams, FlattenStrategy.LOCKS) + assert result == [ + sample_streams[0].locks[0], + sample_streams[0].locks[1], + sample_streams[1].locks[0], + ], "Should return only locks" + + +def test_flatten_events_unlocks(sample_streams): + result = flatten_sablier_events(sample_streams, FlattenStrategy.UNLOCKS) + assert result == [ + sample_streams[0].unlocks[0], + sample_streams[1].unlocks[0], + sample_streams[1].unlocks[1], + ], "Should return only unlocks" + + +def test_flatten_events_all(sample_streams): + result = flatten_sablier_events(sample_streams, FlattenStrategy.ALL) + + expected_result = [ + sample_streams[0].locks[0], + sample_streams[0].locks[1], + sample_streams[0].unlocks[0], + sample_streams[1].locks[0], + sample_streams[1].unlocks[0], + sample_streams[1].unlocks[1], + ] + + assert ( + result == expected_result + ), "Should return locks and unlocks in the order of each MappedEvents object" + + +def test_flatten_events_empty_stream(): + result = flatten_sablier_events([], FlattenStrategy.ALL) + assert result == [], "Should return an empty list for an empty input stream" + + +def test_flatten_events_no_locks(sample_streams): + for event in sample_streams: + event.locks = [] + result = flatten_sablier_events(sample_streams, FlattenStrategy.LOCKS) + assert result == [], "Should return an empty list when there are no locks" + + +def test_flatten_events_no_unlocks(sample_streams): + for event in sample_streams: + event.unlocks = [] + result = flatten_sablier_events(sample_streams, FlattenStrategy.UNLOCKS) + assert result == [], "Should return an empty list when there are no unlocks" diff --git a/backend/tests/modules/user/deposits/test_calculated_user_deposits.py b/backend/tests/modules/user/deposits/test_calculated_user_deposits.py index cc36b3e694..315a9a6159 100644 --- a/backend/tests/modules/user/deposits/test_calculated_user_deposits.py +++ b/backend/tests/modules/user/deposits/test_calculated_user_deposits.py @@ -2,7 +2,7 @@ from app.modules.common.time import from_timestamp_s from app.modules.history.dto import LockItem, OpType from app.modules.user.deposits.service.calculated import CalculatedUserDeposits -from tests.conftest import USER1_ADDRESS, mock_graphql +from tests.conftest import USER1_ADDRESS, mock_graphql, mock_sablier_graphql from tests.helpers.context import get_context @@ -62,6 +62,7 @@ def test_get_locks_by_timestamp(app, mocker, alice, mock_events_generator): }, ], ) + mock_sablier_graphql(mocker) timestamp_before = from_timestamp_s(1710719999) timestamp_after = from_timestamp_s(1710720001) @@ -100,6 +101,7 @@ def test_get_unlocks_by_timestamp(app, mocker, alice, mock_events_generator): }, ], ) + mock_sablier_graphql(mocker) timestamp_before = from_timestamp_s(1710719999) timestamp_after = from_timestamp_s(1710720001) diff --git a/backend/tests/modules/user/events_generator/test_epoch_events_generator.py b/backend/tests/modules/user/events_generator/test_epoch_events_generator.py index 9d2029b186..e38cc5274b 100644 --- a/backend/tests/modules/user/events_generator/test_epoch_events_generator.py +++ b/backend/tests/modules/user/events_generator/test_epoch_events_generator.py @@ -1,12 +1,17 @@ import pytest +from app.engine.user.effective_deposit import ( + DepositEvent, + EventType, + SablierEventType, + DepositSource, +) from app.extensions import db -from app.engine.user.effective_deposit import DepositEvent, EventType from app.infrastructure import database from app.modules.user.events_generator.service.db_and_graph import ( DbAndGraphEventsGenerator, ) -from tests.conftest import mock_graphql +from tests.conftest import mock_graphql, mock_sablier_graphql from tests.helpers import create_deposit_events, generate_epoch_events from tests.helpers.constants import ( ALICE_ADDRESS, @@ -15,6 +20,8 @@ USER1_ED, USER2_ED, USER3_ED, + ALICE_SABLIER_LOCKING_ADDRESS, + BOB_SABLIER_LOCKING_ADDRESS, ) from tests.helpers.context import get_context @@ -47,8 +54,26 @@ def events(dave): ) +@pytest.fixture() +def events_with_sablier_users(): + return create_deposit_events( + { + ALICE_ADDRESS: [(1000, 3_300), (1300, USER1_ED - 3300), (2300, 100)], + BOB_ADDRESS: [ + (1050, 400), + (1200, -200), + (1800, USER2_ED - 200), + (2000, 300), + ], + ALICE_SABLIER_LOCKING_ADDRESS: [(2200, 300), (3200, -200)], + BOB_SABLIER_LOCKING_ADDRESS: [(2200, 300), (3200, -200)], + } + ) + + def test_returns_locks_and_unlocks_for_first_epoch(mocker, events): mock_graphql(mocker, events, EPOCHS) + mock_sablier_graphql(mocker) context = get_context() expected = { ALICE_ADDRESS: [ @@ -117,6 +142,7 @@ def test_returns_locks_and_unlocks_for_second_epoch( mocker, dave, events, mock_pending_epoch_snapshot_db ): mock_graphql(mocker, events, EPOCHS) + mock_sablier_graphql(mocker) context = get_context(2, start=2000) expected = { ALICE_ADDRESS: [ @@ -193,6 +219,7 @@ def test_returns_events_with_one_element_if_deposit_is_gt_0(mocker, dave, events database.deposits.add(2, user, 300, 300) db.session.commit() mock_graphql(mocker, events, EPOCHS) + mock_sablier_graphql(mocker) context = get_context(3, start=3000) generator = DbAndGraphEventsGenerator() @@ -221,6 +248,8 @@ def test_returns_empty_list_if_there_is_one_event_with_deposit_eq_0( user = database.user.add_user(dave) database.deposits.add(3, user, 0, 0) mock_graphql(mocker, events, EPOCHS) + mock_sablier_graphql(mocker) + mock_sablier_graphql(mocker) context = get_context(4, start=4000) generator = DbAndGraphEventsGenerator() @@ -230,6 +259,7 @@ def test_returns_empty_list_if_there_is_one_event_with_deposit_eq_0( def test_returned_events_are_sorted_by_timestamp(mocker, events): mock_graphql(mocker, events, EPOCHS) + mock_sablier_graphql(mocker) context = get_context() generator = DbAndGraphEventsGenerator() @@ -242,3 +272,325 @@ def test_returned_events_are_sorted_by_timestamp(mocker, events): for _user, user_events in generator.get_all_users_events(context).items(): for a, b in zip(user_events, user_events[1:]): assert a.timestamp <= b.timestamp + + +@pytest.mark.parametrize( + "epoch_num, start, duration, expected", + [ + ( + 6, + 1000, + 1729095199, + { + ALICE_SABLIER_LOCKING_ADDRESS: [ + DepositEvent( + ALICE_SABLIER_LOCKING_ADDRESS, + EventType.LOCK, + timestamp=1000, + amount=0, + deposit_before=0, + ), + DepositEvent( + ALICE_SABLIER_LOCKING_ADDRESS, + EventType.LOCK, + timestamp=2200, + amount=300, + deposit_before=0, + ), + DepositEvent( + ALICE_SABLIER_LOCKING_ADDRESS, + EventType.UNLOCK, + timestamp=3200, + amount=200, + deposit_before=300, + ), + DepositEvent( + ALICE_SABLIER_LOCKING_ADDRESS, + EventType.LOCK, + timestamp=1726833047, + amount=10000000000000000000, + deposit_before=100, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.CREATE, + ), + DepositEvent( + ALICE_SABLIER_LOCKING_ADDRESS, + EventType.UNLOCK, + timestamp=1729075199, + amount=355443302891933020, + deposit_before=10000000000000000100, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.WITHDRAW, + ), + DepositEvent( + ALICE_SABLIER_LOCKING_ADDRESS, + EventType.UNLOCK, + timestamp=1729076267, + amount=9644339802130898030, + deposit_before=9644556697108067080, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.CANCEL, + ), + DepositEvent( + ALICE_SABLIER_LOCKING_ADDRESS, + EventType.UNLOCK, + timestamp=1729077035, + amount=216894977168950, + deposit_before=216894977169050, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.WITHDRAW, + ), + ], + }, + ), + ], +) +def test_returns_sorted_events_from_sablier_and_octant_for_user( + mocker, events_with_sablier_users, epoch_num, start, duration, expected +): + events = events_with_sablier_users + + mock_graphql(mocker, events, EPOCHS) + mock_sablier_graphql(mocker) + context = get_context(epoch_num=epoch_num, start=start, duration=duration) + + generator = DbAndGraphEventsGenerator() + + assert ( + generator.get_user_events(context, ALICE_SABLIER_LOCKING_ADDRESS) + == expected[ALICE_SABLIER_LOCKING_ADDRESS] + ) + + +def test_returns_locks_and_unlocks_for_other_events(mocker, events_with_sablier_users): + events = events_with_sablier_users + + mock_graphql(mocker, events, EPOCHS) + mock_sablier_graphql(mocker) + context = get_context(epoch_num=6, start=0, duration=1829095199) + + expected = { + "0x70997970C51812dc3A010C7d01b50e0d17dc79C8": [ + DepositEvent( + user="0x70997970C51812dc3A010C7d01b50e0d17dc79C8", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=0, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0x70997970C51812dc3A010C7d01b50e0d17dc79C8", + type=EventType.LOCK, + timestamp=1050, + amount=400, + deposit_before=0, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0x70997970C51812dc3A010C7d01b50e0d17dc79C8", + type=EventType.UNLOCK, + timestamp=1200, + amount=200, + deposit_before=400, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0x70997970C51812dc3A010C7d01b50e0d17dc79C8", + type=EventType.LOCK, + timestamp=1800, + amount=5499999999999999999800, + deposit_before=200, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0x70997970C51812dc3A010C7d01b50e0d17dc79C8", + type=EventType.LOCK, + timestamp=2000, + amount=300, + deposit_before=5500000000000000000000, + source=DepositSource.OCTANT, + mapped_event=None, + ), + ], + "0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f": [ + DepositEvent( + user="0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=0, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f", + type=EventType.LOCK, + timestamp=2200, + amount=300, + deposit_before=0, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f", + type=EventType.UNLOCK, + timestamp=3200, + amount=200, + deposit_before=300, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f", + type=EventType.LOCK, + timestamp=1726833047, + amount=10000000000000000000, + deposit_before=100, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.CREATE, + ), + DepositEvent( + user="0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f", + type=EventType.UNLOCK, + timestamp=1729075199, + amount=355443302891933020, + deposit_before=10000000000000000100, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.WITHDRAW, + ), + DepositEvent( + user="0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f", + type=EventType.UNLOCK, + timestamp=1729076267, + amount=9644339802130898030, + deposit_before=9644556697108067080, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.CANCEL, + ), + DepositEvent( + user="0x7fc77B5C761F3FaE3BE3F2FA3F6fA3FA3fA3Fa3f", + type=EventType.UNLOCK, + timestamp=1729077035, + amount=216894977168950, + deposit_before=216894977169050, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.WITHDRAW, + ), + ], + "0xC8ef823f4f154415Bc4931071F53c61B4F979152": [ + DepositEvent( + user="0xC8ef823f4f154415Bc4931071F53c61B4F979152", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=0, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0xC8ef823f4f154415Bc4931071F53c61B4F979152", + type=EventType.LOCK, + timestamp=2200, + amount=300, + deposit_before=0, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0xC8ef823f4f154415Bc4931071F53c61B4F979152", + type=EventType.UNLOCK, + timestamp=3200, + amount=200, + deposit_before=300, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0xC8ef823f4f154415Bc4931071F53c61B4F979152", + type=EventType.LOCK, + timestamp=1726833047, + amount=10000000000000000000, + deposit_before=100, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.CREATE, + ), + DepositEvent( + user="0xC8ef823f4f154415Bc4931071F53c61B4F979152", + type=EventType.UNLOCK, + timestamp=1729075199, + amount=355443302891933020, + deposit_before=10000000000000000100, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.WITHDRAW, + ), + DepositEvent( + user="0xC8ef823f4f154415Bc4931071F53c61B4F979152", + type=EventType.UNLOCK, + timestamp=1729076267, + amount=9644339802130898030, + deposit_before=9644556697108067080, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.CANCEL, + ), + DepositEvent( + user="0xC8ef823f4f154415Bc4931071F53c61B4F979152", + type=EventType.UNLOCK, + timestamp=1729077035, + amount=216894977168950, + deposit_before=216894977169050, + source=DepositSource.SABLIER, + mapped_event=SablierEventType.WITHDRAW, + ), + ], + "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266": [ + DepositEvent( + user="0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=0, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + type=EventType.LOCK, + timestamp=1000, + amount=3300, + deposit_before=0, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + type=EventType.LOCK, + timestamp=1300, + amount=1500000055376999996700, + deposit_before=3300, + source=DepositSource.OCTANT, + mapped_event=None, + ), + DepositEvent( + user="0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + type=EventType.LOCK, + timestamp=2300, + amount=100, + deposit_before=1500000055377000000000, + source=DepositSource.OCTANT, + mapped_event=None, + ), + ], + } + + generator = DbAndGraphEventsGenerator() + + for user_address, expected_events in expected.items(): + user_events = generator.get_user_events(context, user_address) + assert user_events == expected_events + + assert generator.get_all_users_events(context) == expected diff --git a/backend/tests/modules/user/events_generator/test_unify_deposit_balances.py b/backend/tests/modules/user/events_generator/test_unify_deposit_balances.py new file mode 100644 index 0000000000..10f42dbdbf --- /dev/null +++ b/backend/tests/modules/user/events_generator/test_unify_deposit_balances.py @@ -0,0 +1,255 @@ +from typing import List + +import pytest + +from app.engine.user.effective_deposit import DepositEvent, EventType, DepositSource +from app.modules.user.events_generator.core import unify_deposit_balances + + +@pytest.mark.parametrize( + "events, expected", + [ + # Scenario 0: Just one lock is moved from the previous epoch + ( + [ + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=1000, + ), + ], + [ + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=1000, + ), + ], + ), + # Scenario 1: Simple lock from Octant followed by a lock from Sablier + ( + [ + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=1000, + source=DepositSource.OCTANT, + ), # Starting event + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=1, + amount=500, + deposit_before=1000, + source=DepositSource.OCTANT, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=2, + amount=1000, + deposit_before=1500, + source=DepositSource.SABLIER, + ), + ], + [ + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=1000, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=1, + amount=500, + deposit_before=1000, + source=DepositSource.OCTANT, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=2, + amount=1000, + deposit_before=3000, + source=DepositSource.SABLIER, + ), + ], + ), + # Scenario 2: Lock from Sablier followed by a lock from Octant + ( + [ + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=1000, + ), # Starting event + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=1, + amount=1000, + deposit_before=2000, + source=DepositSource.SABLIER, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=2, + amount=500, + deposit_before=1000, + # Takes the balance from the previous event from Octant, not Sablier + source=DepositSource.OCTANT, + ), + ], + [ + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=1000, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=1, + amount=1000, + deposit_before=3000, + source=DepositSource.SABLIER, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=2, + amount=500, + deposit_before=4000, + source=DepositSource.OCTANT, + ), + ], + ), + # Scenario 3: Mixed locks and unlocks from Sablier and Octant + ( + [ + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=500, + source=DepositSource.OCTANT, + ), # Starting event + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=1, + amount=500, + deposit_before=500, + source=DepositSource.OCTANT, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=2, + amount=1000, + deposit_before=1000, + source=DepositSource.SABLIER, + ), + DepositEvent( + user="0x123", + type=EventType.UNLOCK, + timestamp=3, + amount=400, + deposit_before=2000, + # Takes the balance from the previous event from Sablier, not Octant + source=DepositSource.SABLIER, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=4, + amount=200, + deposit_before=1500, + # Takes the balance from the previous event from Octant, not Sablier + source=DepositSource.OCTANT, + ), + ], + [ + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=0, + amount=0, + deposit_before=500, + source=DepositSource.OCTANT, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=1, + amount=500, + deposit_before=500, + source=DepositSource.OCTANT, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=2, + amount=1000, + deposit_before=2000, + source=DepositSource.SABLIER, + ), + DepositEvent( + user="0x123", + type=EventType.UNLOCK, + timestamp=3, + amount=400, + deposit_before=3000, + source=DepositSource.SABLIER, + ), + DepositEvent( + user="0x123", + type=EventType.LOCK, + timestamp=4, + amount=200, + deposit_before=2600, + source=DepositSource.OCTANT, + ), + ], + ), + ], +) +def test_unify_deposit_balances( + events: List[DepositEvent], expected: List[DepositEvent] +): + """ + Test the unify_deposit_balances function with various event orders and types. + """ + result = unify_deposit_balances(events) + assert len(result) == len( + expected + ), "Number of events in result does not match expected." + for r, e in zip(result, expected): + assert ( + r.deposit_before == e.deposit_before + ), f"deposit_before mismatch: {r.deposit_before} != {e.deposit_before}" + assert ( + r.deposit_after == e.deposit_after + ), f"deposit_after mismatch: {r.deposit_after} != {e.deposit_after}" + assert r.amount == e.amount, f"amount mismatch: {r.amount} != {e.amount}" + assert r.type == e.type, f"type mismatch: {r.type} != {e.type}" + assert r.source == e.source, f"source mismatch: {r.source} != {e.source}" + assert r.user == e.user, f"user mismatch: {r.user} != {e.user}" + assert ( + r.timestamp == e.timestamp + ), f"timestamp mismatch: {r.timestamp} != {e.timestamp}" diff --git a/ci/argocd/contracts/master.env b/ci/argocd/contracts/master.env index 37bbad3a5b..6b2828272a 100644 --- a/ci/argocd/contracts/master.env +++ b/ci/argocd/contracts/master.env @@ -1,8 +1,8 @@ -BLOCK_NUMBER=6922385 +BLOCK_NUMBER=6984279 GLM_CONTRACT_ADDRESS=0x71432DD1ae7DB41706ee6a22148446087BdD0906 -AUTH_CONTRACT_ADDRESS=0x74105046Cdc99C961E749F12A006be91402aC389 -DEPOSITS_CONTRACT_ADDRESS=0x8f73ae171b1399edD1383a18C3cF4c5C8F435721 -EPOCHS_CONTRACT_ADDRESS=0x67e7A37f882653e94C9717e3C63765Aa401E0fC2 -PROPOSALS_CONTRACT_ADDRESS=0xD5a4f7f332688915466403c4e9bfB3cDDbFf539c -WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0xF16b96e4707Cdf564e75eCB40E49Ce8BeB6cca47 -VAULT_CONTRACT_ADDRESS=0x04615C996871112BDd45492eF4e7740368c3e71e +AUTH_CONTRACT_ADDRESS=0x8F2d25b07B3035626dac9B77945E53672Ed6EBf6 +DEPOSITS_CONTRACT_ADDRESS=0x67A89181A6412b2216b7C6BbED8DE8195372144B +EPOCHS_CONTRACT_ADDRESS=0x06798fE71BC4fac13A470caD04f160246006968E +PROPOSALS_CONTRACT_ADDRESS=0x0f2cE1e68B5f6711E53da58C93833916fF56c72e +WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0xCA7b8e49d21b9DE4952700392Dc293d64113400C +VAULT_CONTRACT_ADDRESS=0xA757314A6b939ae1F333f276Dc43133844A2741c diff --git a/ci/argocd/contracts/uat.env b/ci/argocd/contracts/uat.env index bdfe11eccc..d7a8ea2624 100644 --- a/ci/argocd/contracts/uat.env +++ b/ci/argocd/contracts/uat.env @@ -1,8 +1,8 @@ -BLOCK_NUMBER=6851295 +BLOCK_NUMBER=6982586 GLM_CONTRACT_ADDRESS=0x71432DD1ae7DB41706ee6a22148446087BdD0906 -AUTH_CONTRACT_ADDRESS=0xC3312A14BdBF97Aae2320ae3F81D7710326E4766 -DEPOSITS_CONTRACT_ADDRESS=0x9422bbDa8ca726a1C8b3f16C08b9877cBdbD5100 -EPOCHS_CONTRACT_ADDRESS=0xe60942BD4C90B59EdADc222cc6CA8Fb28Ef697D2 -PROPOSALS_CONTRACT_ADDRESS=0xB09737E89102ECC6352BE7ddA42bCF84dcB9A811 -WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0x352AB9226750365E3d6d0256E494CE35110073cC -VAULT_CONTRACT_ADDRESS=0x89da228fE0bFe8D55ee5af86252BAa2Baa06fF80 +AUTH_CONTRACT_ADDRESS=0xDBf4afCC7c4394C3679b0A9E53d736294D63C886 +DEPOSITS_CONTRACT_ADDRESS=0x2524b5FA6Ae221a540A52744661277796aF36791 +EPOCHS_CONTRACT_ADDRESS=0xC3E155B1b382F6766bA22f949D996EA10E342Cd3 +PROPOSALS_CONTRACT_ADDRESS=0xF4ffF4D6061A44097222B1C2176627F84F7696C9 +WITHDRAWALS_TARGET_CONTRACT_ADDRESS=0xcE997E5e2114dD32B89dA78802f29748EABfC9f5 +VAULT_CONTRACT_ADDRESS=0x83aCd40D6f3fA6f8927A7ABc34a56DD58C2B518A diff --git a/ci/argocd/templates/octant-application.yaml b/ci/argocd/templates/octant-application.yaml index 6e39116b4b..9094d6b0f4 100644 --- a/ci/argocd/templates/octant-application.yaml +++ b/ci/argocd/templates/octant-application.yaml @@ -15,7 +15,7 @@ spec: namespace: $DEPLOYMENT_ID sources: - repoURL: 'https://gitlab.com/api/v4/projects/48137258/packages/helm/devel' - targetRevision: 0.2.66 + targetRevision: 0.2.68 chart: octant helm: parameters: diff --git a/client/src/api/calls/userWinnings.ts b/client/src/api/calls/userWinnings.ts new file mode 100644 index 0000000000..6c1af2e690 --- /dev/null +++ b/client/src/api/calls/userWinnings.ts @@ -0,0 +1,15 @@ +import env from 'env'; +import apiService from 'services/apiService'; + +export type Response = { + winnings: { + amount: string; + dateAvailableForWithdrawal: string; + }[]; +}; + +export async function apiGetUserRaffleWinnings(address: string): Promise { + return apiService + .get(`${env.serverEndpoint}user/${address}/raffle/winnings`) + .then(({ data }) => data); +} diff --git a/client/src/api/queryKeys/index.ts b/client/src/api/queryKeys/index.ts index 223c15c404..fe480a2ff6 100644 --- a/client/src/api/queryKeys/index.ts +++ b/client/src/api/queryKeys/index.ts @@ -22,6 +22,7 @@ export const ROOTS: Root = { projectsDonors: 'projectsDonors', projectsEpoch: 'projectsEpoch', projectsIpfsResults: 'projectsIpfsResults', + raffleWinnings: 'raffleWinnings', rewardsRate: 'rewardsRate', searchResultsDetails: 'searchResultsDetails', upcomingBudget: 'upcomingBudget', @@ -74,6 +75,7 @@ export const QUERY_KEYS: QueryKeys = { ], projectsMetadataAccumulateds: ['projectsMetadataAccumulateds'], projectsMetadataPerEpoches: ['projectsMetadataPerEpoches'], + raffleWinnings: userAddress => [ROOTS.raffleWinnings, userAddress], rewardsRate: epochNumber => [ROOTS.rewardsRate, epochNumber.toString()], searchResults: ['searchResults'], searchResultsDetails: (address, epoch) => [ROOTS.searchResultsDetails, address, epoch.toString()], diff --git a/client/src/api/queryKeys/types.ts b/client/src/api/queryKeys/types.ts index 1ba6f05f85..1fc076887a 100644 --- a/client/src/api/queryKeys/types.ts +++ b/client/src/api/queryKeys/types.ts @@ -22,6 +22,7 @@ export type Root = { projectsDonors: 'projectsDonors'; projectsEpoch: 'projectsEpoch'; projectsIpfsResults: 'projectsIpfsResults'; + raffleWinnings: 'raffleWinnings'; rewardsRate: 'rewardsRate'; searchResultsDetails: 'searchResultsDetails'; upcomingBudget: 'upcomingBudget'; @@ -74,6 +75,7 @@ export type QueryKeys = { ) => [Root['projectsIpfsResults'], string, string]; projectsMetadataAccumulateds: ['projectsMetadataAccumulateds']; projectsMetadataPerEpoches: ['projectsMetadataPerEpoches']; + raffleWinnings: (userAddress: string) => [Root['raffleWinnings'], string]; rewardsRate: (epochNumber: number) => [Root['rewardsRate'], string]; searchResults: ['searchResults']; searchResultsDetails: ( diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/HomeGridCurrentGlmLock.module.scss b/client/src/components/Home/HomeGridCurrentGlmLock/HomeGridCurrentGlmLock.module.scss index 7b6adc1d17..9bf59a9285 100644 --- a/client/src/components/Home/HomeGridCurrentGlmLock/HomeGridCurrentGlmLock.module.scss +++ b/client/src/components/Home/HomeGridCurrentGlmLock/HomeGridCurrentGlmLock.module.scss @@ -1,4 +1,5 @@ .root { + position: relative; padding: 1.4rem 2.4rem 2.4rem; @media #{$tablet-up} { @@ -20,3 +21,7 @@ min-height: 6.4rem; } } + +.didUserWinAnyRaffles { + justify-content: space-between; +} diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/HomeGridCurrentGlmLock.tsx b/client/src/components/Home/HomeGridCurrentGlmLock/HomeGridCurrentGlmLock.tsx index 5d3210569f..79cbbcf973 100644 --- a/client/src/components/Home/HomeGridCurrentGlmLock/HomeGridCurrentGlmLock.tsx +++ b/client/src/components/Home/HomeGridCurrentGlmLock/HomeGridCurrentGlmLock.tsx @@ -12,10 +12,12 @@ import useMediaQuery from 'hooks/helpers/useMediaQuery'; import useCurrentEpoch from 'hooks/queries/useCurrentEpoch'; import useDepositValue from 'hooks/queries/useDepositValue'; import useEstimatedEffectiveDeposit from 'hooks/queries/useEstimatedEffectiveDeposit'; +import useUserRaffleWinnings from 'hooks/queries/useUserRaffleWinnings'; import useTransactionLocalStore from 'store/transactionLocal/store'; import getIsPreLaunch from 'utils/getIsPreLaunch'; import styles from './HomeGridCurrentGlmLock.module.scss'; +import RaffleWinnerBadge from './RaffleWinnerBadge'; import HomeGridCurrentGlmLockProps from './types'; const HomeGridCurrentGlmLock: FC = ({ className }) => { @@ -36,22 +38,31 @@ const HomeGridCurrentGlmLock: FC = ({ className }) const { data: estimatedEffectiveDeposit, isFetching: isFetchingEstimatedEffectiveDeposit } = useEstimatedEffectiveDeposit(); const { data: depositsValue, isFetching: isFetchingDepositValue } = useDepositValue(); + const { data: userRaffleWinnings, isFetching: isFetchingUserRaffleWinnings } = + useUserRaffleWinnings(); const isPreLaunch = getIsPreLaunch(currentEpoch); + const didUserWinAnyRaffles = !!userRaffleWinnings && userRaffleWinnings.sum > 0; return ( <> - + } + >
diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlm/LockGlm.tsx b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlm/LockGlm.tsx index 283cbce4ad..235d7aba6b 100644 --- a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlm/LockGlm.tsx +++ b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlm/LockGlm.tsx @@ -223,7 +223,7 @@ const LockGlm: FC = ({ currentMode, onCurrentModeChange, onCloseMo type={step === 3 ? 'success' : 'info'} /> ) : ( - + )} = ({ isVisible }) => { +const LockGlmBudget: FC = ({ currentMode, isVisible }) => { const { errors } = useFormikContext(); if (!isVisible) { @@ -17,6 +17,7 @@ const LockGlmBudget: FC = ({ isVisible }) => { return ( diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudget/types.ts b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudget/types.ts index 34f7b64b31..172429206d 100644 --- a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudget/types.ts +++ b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudget/types.ts @@ -1,3 +1,6 @@ +import { CurrentMode } from 'components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlm/types'; + export default interface LockGlmBudgetProps { + currentMode: CurrentMode; isVisible: boolean; } diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/AvailableFundsGlm.tsx b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/AvailableFundsGlm.tsx new file mode 100644 index 0000000000..a9390afd0f --- /dev/null +++ b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/AvailableFundsGlm.tsx @@ -0,0 +1,19 @@ +import React, { FC } from 'react'; + +import AvailableFundsGlmProps from './types'; + +const AvailableFundsGlm: FC = ({ + classNameSkeleton, + classNameBudgetValue, + isLoading, + value, +}) => + isLoading ? ( +
+ ) : ( +
+ {value} +
+ ); + +export default AvailableFundsGlm; diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/index.ts b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/index.ts new file mode 100644 index 0000000000..ecfe7ce80e --- /dev/null +++ b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/index.ts @@ -0,0 +1,2 @@ +// eslint-disable-next-line no-restricted-exports +export { default } from './AvailableFundsGlm'; diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/types.ts b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/types.ts new file mode 100644 index 0000000000..5d1cb0623b --- /dev/null +++ b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/AvailableFundsGlm/types.ts @@ -0,0 +1,6 @@ +export default interface AvailableFundsGlmProps { + classNameBudgetValue?: string; + classNameSkeleton?: string; + isLoading: boolean; + value: string; +} diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/LockGlmBudgetBox.tsx b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/LockGlmBudgetBox.tsx index 1713e0581d..9924d1cf86 100644 --- a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/LockGlmBudgetBox.tsx +++ b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/LockGlmBudgetBox.tsx @@ -1,22 +1,29 @@ import cx from 'classnames'; +import { format } from 'date-fns'; import React, { FC, useMemo } from 'react'; import { useTranslation } from 'react-i18next'; import BoxRounded from 'components/ui/BoxRounded'; import useAvailableFundsGlm from 'hooks/helpers/useAvailableFundsGlm'; import useDepositValue from 'hooks/queries/useDepositValue'; +import useUserRaffleWinnings from 'hooks/queries/useUserRaffleWinnings'; import getFormattedGlmValue from 'utils/getFormattedGlmValue'; +import AvailableFundsGlm from './AvailableFundsGlm'; import styles from './LockGlmBudgetBox.module.scss'; import LockGlmBudgetBoxProps from './types'; const LockGlmBudgetBox: FC = ({ className, + currentMode, isWalletBalanceError, isCurrentlyLockedError, }) => { const { data: depositsValue, isFetching: isFetchingDepositValue } = useDepositValue(); - const { data: availableFundsGlm, isFetched: isFetchedAvailableFundsGlm } = useAvailableFundsGlm(); + const { data: availableFundsGlm, isFetching: isFetchingAvailableFundsGlm } = + useAvailableFundsGlm(); + const { data: userRaffleWinnings, isFetching: isFetchingUserRaffleWinnings } = + useUserRaffleWinnings(); const { t } = useTranslation('translation', { keyPrefix: 'components.home.homeGridCurrentGlmLock.modalLockGlm.lockGlmBudgetBox', @@ -27,10 +34,30 @@ const LockGlmBudgetBox: FC = ({ [depositsValue], ); - const availableFundsGlmString = getFormattedGlmValue({ - value: BigInt(availableFundsGlm ? availableFundsGlm!.value : 0), + const shouldRaffleWinningsBeDisplayed = + currentMode === 'unlock' && userRaffleWinnings && userRaffleWinnings.sum > 0; + const areFundsFetching = isFetchingAvailableFundsGlm || isFetchingUserRaffleWinnings; + + const secondRowValue = getFormattedGlmValue({ + value: shouldRaffleWinningsBeDisplayed + ? userRaffleWinnings?.sum + : BigInt(availableFundsGlm ? availableFundsGlm!.value : 0), }).fullString; + const secondRowLabel = useMemo(() => { + if (shouldRaffleWinningsBeDisplayed) { + const date = format( + parseInt(userRaffleWinnings?.winnings[0].dateAvailableForWithdrawal, 10) * 1000, + 'd LLL y', + ); + return userRaffleWinnings?.winnings.length > 1 + ? t('raffleWinnings.multipleWins') + : t('raffleWinnings.oneWin', { date }); + } + return t('walletBalance'); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [shouldRaffleWinningsBeDisplayed, userRaffleWinnings?.winnings.length]); + return ( = ({ isVertical >
-
{t('currentlyLocked')}
+
+ {t(shouldRaffleWinningsBeDisplayed ? 'availableToUnlock' : 'currentlyLocked')} +
{isFetchingDepositValue ? (
) : ( @@ -54,17 +83,13 @@ const LockGlmBudgetBox: FC = ({ )}
-
{t('walletBalance')}
- {!isFetchedAvailableFundsGlm ? ( -
- ) : ( -
- {availableFundsGlmString} -
- )} +
{secondRowLabel}
+
); diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/types.ts b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/types.ts index 182be764b7..932e040ece 100644 --- a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/types.ts +++ b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmBudgetBox/types.ts @@ -1,5 +1,8 @@ +import { CurrentMode } from 'components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlm/types'; + export default interface LockGlmBudgetBoxProps { className?: string; + currentMode: CurrentMode; isCurrentlyLockedError?: boolean; isWalletBalanceError?: boolean; } diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmTabs/LockGlmTabs.tsx b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmTabs/LockGlmTabs.tsx index dc6615d128..6a8878334e 100644 --- a/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmTabs/LockGlmTabs.tsx +++ b/client/src/components/Home/HomeGridCurrentGlmLock/ModalLockGlm/LockGlmTabs/LockGlmTabs.tsx @@ -10,6 +10,7 @@ import Button from 'components/ui/Button'; import ButtonProps from 'components/ui/Button/types'; import useAvailableFundsGlm from 'hooks/helpers/useAvailableFundsGlm'; import useDepositValue from 'hooks/queries/useDepositValue'; +import useUserRaffleWinnings from 'hooks/queries/useUserRaffleWinnings'; import { formatUnitsBigInt } from 'utils/formatUnitsBigInt'; import getFormattedGlmValue from 'utils/getFormattedGlmValue'; import { parseUnitsBigInt } from 'utils/parseUnitsBigInt'; @@ -38,6 +39,7 @@ const LockGlmTabs: FC = ({ const { data: availableFundsGlm } = useAvailableFundsGlm(); const { data: depositsValue } = useDepositValue(); + const { data: userRaffleWinnings } = useUserRaffleWinnings(); const isMaxDisabled = isLoading || step > 1; @@ -84,6 +86,8 @@ const LockGlmTabs: FC = ({ const isButtonDisabled = !formik.isValid || parseUnitsBigInt(formik.values.valueToDeposeOrWithdraw || '0') === 0n; + const didUserWinAnyRaffles = !!userRaffleWinnings && userRaffleWinnings.sum > 0; + const shouldRaffleLabelBeVisible = didUserWinAnyRaffles && currentMode === 'unlock'; return ( = ({ formik.errors.valueToDeposeOrWithdraw === 'cantUnlock' && styles.cantUnlock, )} > - {getFormattedGlmValue({ value: depositsValue || BigInt(0) }).value} + { + getFormattedGlmValue({ + value: shouldRaffleLabelBeVisible + ? userRaffleWinnings?.sum + : depositsValue || BigInt(0), + }).value + }
- {t('glmLockTabs.locked')} + {t(shouldRaffleLabelBeVisible ? 'glmLockTabs.timeLocked' : 'glmLockTabs.locked')}
= ({ > { getFormattedGlmValue({ - value: BigInt(availableFundsGlm ? availableFundsGlm?.value : 0), + value: + currentMode === 'lock' + ? BigInt(availableFundsGlm ? availableFundsGlm?.value : 0) + : depositsValue || 0n, }).value }
diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/RaffleWinnerBadge.module.scss b/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/RaffleWinnerBadge.module.scss new file mode 100644 index 0000000000..089fa62f07 --- /dev/null +++ b/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/RaffleWinnerBadge.module.scss @@ -0,0 +1,58 @@ +$padding: 1.1rem; + +.root { + position: relative; + display: flex; + align-items: center; + justify-content: center; + height: 3.2rem; + width: 15rem; + background: $color-octant-green5; + border-radius: $border-radius-10; + font-size: $font-size-10; + color: $color-octant-green; + font-weight: $font-weight-bold; + padding: 0 $padding; + transition: opacity $transition-time-1; + + .img { + margin-right: $padding; + } + + &:not(.isVisible) { + opacity: 0; + } + + &.isVisible { + opacity: 1; + } +} + +.tooltipWrapper { + &:hover { + path { + stroke: $color-white !important; + } + } +} + +.tooltip { + white-space: pre-wrap; + width: 29.2rem !important; + + @media #{$phone-only} { + width: 25rem !important; + top: 2rem !important; + left: -14.4rem !important; + } + + @media #{$tablet-up} { + top: 2rem !important; + left: -17.4rem !important; + } + + @media #{$desktop-up} { + top: 3rem !important; + left: -16.4rem !important; + } +} diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/RaffleWinnerBadge.tsx b/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/RaffleWinnerBadge.tsx new file mode 100644 index 0000000000..3a80d68134 --- /dev/null +++ b/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/RaffleWinnerBadge.tsx @@ -0,0 +1,86 @@ +import cx from 'classnames'; +import { format } from 'date-fns'; +import React, { FC } from 'react'; +import { useTranslation } from 'react-i18next'; + +import Svg from 'components/ui/Svg/Svg'; +import Tooltip from 'components/ui/Tooltip'; +import useGetValuesToDisplay from 'hooks/helpers/useGetValuesToDisplay'; +import useDepositValue from 'hooks/queries/useDepositValue'; +import useUserRaffleWinnings from 'hooks/queries/useUserRaffleWinnings'; +import { gift } from 'svg/misc'; +import getFormattedValueWithSymbolSuffix from 'utils/getFormattedValueWithSymbolSuffix'; +import { parseUnitsBigInt } from 'utils/parseUnitsBigInt'; + +import styles from './RaffleWinnerBadge.module.scss'; +import RaffleWinnerBadgeProps from './types'; + +const RaffleWinnerBadge: FC = ({ isVisible }) => { + const { t } = useTranslation('translation', { + keyPrefix: 'components.home.homeGridCurrentGlmLock.raffleWinnerBadge', + }); + const getValuesToDisplay = useGetValuesToDisplay(); + + const { data: depositsValue } = useDepositValue(); + const { data: userRaffleWinnings } = useUserRaffleWinnings(); + + const userRaffleWinningsSumFormatted = userRaffleWinnings + ? getValuesToDisplay({ + cryptoCurrency: 'golem', + showFiatPrefix: false, + valueCrypto: userRaffleWinnings.sum, + }) + : undefined; + + const userRaffleWinningsSumFloat = userRaffleWinningsSumFormatted + ? parseFloat(userRaffleWinningsSumFormatted.primary.replace(/\s/g, '')) + : 0; + const userRaffleWinningsSumFormattedWithSymbolSuffix = getFormattedValueWithSymbolSuffix({ + format: 'thousands', + precision: 0, + value: userRaffleWinningsSumFloat, + }); + + const tooltipWinningsText = userRaffleWinnings?.winnings.reduce((acc, curr, index) => { + const amountFormatted = getValuesToDisplay({ + cryptoCurrency: 'golem', + showCryptoSuffix: true, + valueCrypto: parseUnitsBigInt(curr.amount, 'wei'), + }); + const newRow = t('tooltipWinningRow', { + date: format(parseInt(curr.dateAvailableForWithdrawal, 10) * 1000, 'd LLL y'), + value: amountFormatted.primary, + }); + + return index > 0 ? `${acc}\n${newRow}` : newRow; + }, ''); + + const depositsValueFormatted = depositsValue + ? getValuesToDisplay({ + cryptoCurrency: 'golem', + showCryptoSuffix: true, + valueCrypto: depositsValue, + }) + : undefined; + + const tooltipText = + depositsValue && depositsValue > 0n && depositsValueFormatted + ? `${tooltipWinningsText}\n${t('tooltipCurrentBalanceRow', { value: depositsValueFormatted.primary })}` + : tooltipWinningsText; + + return ( +
+ + + {t('text', { value: userRaffleWinningsSumFormattedWithSymbolSuffix })} + +
+ ); +}; + +export default RaffleWinnerBadge; diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/index.ts b/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/index.ts new file mode 100644 index 0000000000..e48ad17e7e --- /dev/null +++ b/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/index.ts @@ -0,0 +1,2 @@ +// eslint-disable-next-line no-restricted-exports +export { default } from './RaffleWinnerBadge'; diff --git a/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/types.ts b/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/types.ts new file mode 100644 index 0000000000..1f47deb068 --- /dev/null +++ b/client/src/components/Home/HomeGridCurrentGlmLock/RaffleWinnerBadge/types.ts @@ -0,0 +1,3 @@ +export default interface RaffleWinnerBadgeProps { + isVisible: boolean; +} diff --git a/client/src/components/shared/Grid/GridTile/GridTile.tsx b/client/src/components/shared/Grid/GridTile/GridTile.tsx index b3f146ad69..509c9b7261 100644 --- a/client/src/components/shared/Grid/GridTile/GridTile.tsx +++ b/client/src/components/shared/Grid/GridTile/GridTile.tsx @@ -9,12 +9,19 @@ const GridTile: FC = ({ titleSuffix, children, className, + classNameTitleWrapper, dataTest = 'GridTile', showTitleDivider, ...rest }) => (
-
+
{title}
diff --git a/client/src/components/shared/Grid/GridTile/types.ts b/client/src/components/shared/Grid/GridTile/types.ts index d8b00e1d53..a530799200 100644 --- a/client/src/components/shared/Grid/GridTile/types.ts +++ b/client/src/components/shared/Grid/GridTile/types.ts @@ -4,6 +4,7 @@ type GridTileProps = Omit< React.PropsWithChildren>, 'title' > & { + classNameTitleWrapper?: string; dataTest?: string; showTitleDivider?: boolean; title: string | ReactNode; diff --git a/client/src/hooks/queries/useUserRaffleWinnings.ts b/client/src/hooks/queries/useUserRaffleWinnings.ts new file mode 100644 index 0000000000..4c9c3787f3 --- /dev/null +++ b/client/src/hooks/queries/useUserRaffleWinnings.ts @@ -0,0 +1,28 @@ +import { useQuery, UseQueryResult } from '@tanstack/react-query'; +import { useAccount } from 'wagmi'; + +import { apiGetUserRaffleWinnings, Response } from 'api/calls/userWinnings'; +import { QUERY_KEYS } from 'api/queryKeys'; +import { parseUnitsBigInt } from 'utils/parseUnitsBigInt'; + +type ReturnType = { + sum: bigint; + winnings: Response['winnings']; +}; + +export default function useUserRaffleWinnings(): UseQueryResult { + const { address } = useAccount(); + + return useQuery({ + enabled: !!address, + queryFn: () => apiGetUserRaffleWinnings(address!), + queryKey: QUERY_KEYS.raffleWinnings(address!), + select: response => ({ + sum: response.winnings.reduce( + (acc, curr) => acc + parseUnitsBigInt(curr.amount, 'wei'), + BigInt(0), + ), + winnings: response.winnings, + }), + }); +} diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index 24d3c777e7..eaa9266172 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -114,9 +114,19 @@ "modalLockGlm": { "unlockGLM": "Unlock GLM", "lockGlmBudgetBox": { + "availableToUnlock": "Available to unlock", "currentlyLocked": "Currently Locked", - "walletBalance": "Wallet balance" + "walletBalance": "Wallet balance", + "raffleWinnings": { + "oneWin": "Locked until {{date}}", + "multipleWins": "Time locked winnings" + } } + }, + "raffleWinnerBadge": { + "text": "{{value}} GLM winnings", + "tooltipWinningRow": "{{value}} prize locked until {{date}}", + "tooltipCurrentBalanceRow": "{{value}} your locked balance" } }, "homeGridPersonalAllocation": { @@ -300,7 +310,8 @@ "useMax": "Use max", "amountToLock": "Amount to lock", "amountToUnlock": "Amount to unlock", - "locked": "Locked" + "locked": "Locked", + "timeLocked": "Time locked" }, "glmLockNotification": { "success": { diff --git a/client/src/svg/misc.ts b/client/src/svg/misc.ts index 53c7226fd2..435b1c76bf 100644 --- a/client/src/svg/misc.ts +++ b/client/src/svg/misc.ts @@ -30,6 +30,12 @@ export const cross: SvgImageConfig = { viewBox: '0 0 11 11', }; +export const gift: SvgImageConfig = { + markup: + '', + viewBox: '0 0 16 16', +}; + export const heart: SvgImageConfig = { markup: '', diff --git a/epoch-verifier/src/data/models.ts b/epoch-verifier/src/data/models.ts index 99abb31539..e9a9d5b648 100644 --- a/epoch-verifier/src/data/models.ts +++ b/epoch-verifier/src/data/models.ts @@ -220,6 +220,7 @@ export class FinalizedSimulationImpl implements Deserializable reward.matched !== BigInt(0)) .reduce((acc, reward) => acc + reward.amount, BigInt(0)) - return assertEq(claimed + rewards, context.epochInfo.totalWithdrawals) + return assertEq(claimed + rewards, context.rewards.totalWithdrawals) } const rewards = context.rewards diff --git a/localenv/apitest.yaml b/localenv/apitest.yaml index a1444a0ca6..21babe477c 100644 --- a/localenv/apitest.yaml +++ b/localenv/apitest.yaml @@ -33,6 +33,8 @@ services: DELEGATION_SALT: "salt" DELEGATION_SALT_PRIMARY: "salt_primary" + SABLIER_MAINNET_SUBGRAPH_URL: "${SABLIER_MAINNET_SUBGRAPH_URL}" + depends_on: - anvil - graph-node diff --git a/localenv/localenv.yaml b/localenv/localenv.yaml index 080f48b5f2..8a62a85935 100644 --- a/localenv/localenv.yaml +++ b/localenv/localenv.yaml @@ -48,6 +48,8 @@ services: DELEGATION_SALT: "salt" DELEGATION_SALT_PRIMARY: "salt_primary" + SABLIER_MAINNET_SUBGRAPH_URL: "${SABLIER_MAINNET_SUBGRAPH_URL}" + depends_on: - backend-postgres - anvil diff --git a/unlocks-tracker/.gitignore b/unlocks-tracker/.gitignore new file mode 100644 index 0000000000..4245e4459c --- /dev/null +++ b/unlocks-tracker/.gitignore @@ -0,0 +1,79 @@ +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +*.spec +pip-log.txt +pip-delete-this-directory.txt +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ +*.mo +*.pot +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal +instance/ +.webassets-cache +.scrapy +docs/_build/ +.pybuilder/ +target/ +.ipynb_checkpoints +profile_default/ +ipython_config.py +poetry.lock +.pdm.toml +.pdm-python +.pdm-build/ +__pypackages__/ +*.sage.py +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +.spyderproject +.spyproject +.ropeproject +/site +.mypy_cache/ +.dmypy.json +dmypy.json +.pyre/ +.pytype/ +cython_debug/ +.idea/ +resources/input +resources/output diff --git a/unlocks-tracker/README.md b/unlocks-tracker/README.md new file mode 100644 index 0000000000..defb8ab9fb --- /dev/null +++ b/unlocks-tracker/README.md @@ -0,0 +1,42 @@ +# Steps to do the analysis +1. Clone the repository +2. Place the file `addresses.txt` in the `resources/input` folder. +4. Run the following commands +```bash +poetry install +poetry shell +poetry run python run.py +``` +5. Read the analysis from the `resources/output` folder. + +# Algorithm Overview +This algorithm scans Etherscan and Subgraph to retrieve all transfers made by organizations and users involved in the Octant promotional event. It processes these transfers to identify addresses that should be removed from the allowlist by validating the coverage of transferred funds by appropriate locks and checking whether unlocks have exceeded the locks. + +## Steps: +1. **Retrieve Golem and Partner Transfers:** +The algorithm retrieves all outgoing transfers from the Golem Foundation and partner organizations. Using a predefined list of addresses, it fetches transfers via the Etherscan and Subgraph APIs and processes them for further validation. + +2. **Accumulate and Filter Transfers:** +It accumulates all outgoing transfers from both Golem and partner organizations, filtering out any redundant or unnecessary transfers. +These transfers are saved for reference and further processing. + +3. **Cross-Check with Original Allow List:** +The processed transfers are checked against the original allowlist to identify any discrepancies. Addresses found in the transfers but not on the allowlist (and vice versa) are flagged for review. + +4. **Lock Validation and Removal Decisions:** +For each address in the outgoing transfers, the algorithm retrieves associated locks (funds restricted for a specific period) and validates whether the locked funds sufficiently cover the transferred amount. +If the locks do not cover the total amount transferred, the address is flagged for removal from the allowlist with the reason: "Transfer not covered by locks." + +5. **Unlock Validation:** +If the locks cover the transfers, the algorithm checks for unlocks (released locked funds). If unlocks exceed locks or donations related to the locks have decreased, the address is flagged for removal with the reason: "Unlocks exceed locks." + +6. **Special Case for KARLAGODETH_ADDRESS:** +When processing transfers from the KARLAGODETH_ADDRESS, the algorithm applies a special error margin of 100 units when validating whether the locked funds cover the transferred amount. This allows for slight discrepancies in lock coverage for transfers from this specific address. + +7. **Process Organization-Specific Transfers:** +The algorithm accumulates and validates transfers from partner organizations, ensuring that their locked funds cover the total transferred amount. +For each organization, the accepted amount of locked funds is decreased by the total amount transferred to other addresses. If the locks no longer cover the organization’s total transfers after this adjustment, the organization’s address is flagged for removal from the allowlist. +If the locks do not cover the adjusted transfers, the organization’s address is flagged for removal from the allowlist with the reason: "Organization transfer not covered by locks." + +8. Final Output: +After processing, the algorithm compiles a list of addresses (both individual users and organizations) that should be removed from the allowlist. Detailed reasons for removal are provided for each flagged address. diff --git a/unlocks-tracker/pyproject.toml b/unlocks-tracker/pyproject.toml new file mode 100644 index 0000000000..54685950fa --- /dev/null +++ b/unlocks-tracker/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "unlocks-tracker" +version = "1.0.0" +description = "" +authors = ["Golem Foundation Contributors "] +license = "GPL-3.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" +gql = { extras = ["aiohttp"], version = "^3.4.0" } +requests = "^2.28.2" +python-dotenv = "^1.0.0" +requests-toolbelt = "^1.0.0" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/unlocks-tracker/unlocks_tracker/.env.sample b/unlocks-tracker/unlocks_tracker/.env.sample new file mode 100644 index 0000000000..2f5df426b4 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/.env.sample @@ -0,0 +1,6 @@ +ETHERSCAN_API_KEY= +ORGANIZATIONS_ADDRESS= +END_OF_PROMOTION_YEAR= +END_OF_PROMOTION_MONTH= +END_OF_PROMOTION_DAY= +DEADLINE_IN_DAYS= diff --git a/unlocks-tracker/unlocks_tracker/__init__.py b/unlocks-tracker/unlocks_tracker/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/unlocks-tracker/unlocks_tracker/config.py b/unlocks-tracker/unlocks_tracker/config.py new file mode 100644 index 0000000000..11d8273150 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/config.py @@ -0,0 +1,46 @@ +import os +from datetime import datetime + +from dotenv import load_dotenv + +load_dotenv() + + +class Config: + GOLEM_ADDRESSES = [ + "0xebe45812659d8a4fcc0703d478aba0ba56e49d9c", + "0x1e90474d2e83e7b7dd45553156beb316845e66a4", + ] + + ORGANIZATIONS_ADDRESS = os.getenv("ORGANIZATION_ADDRESSES").split(",") + KARLAGODETH_ADDRESS = ORGANIZATIONS_ADDRESS[2] + + EXCLUDED_ADDRESSES = [ + "0x879133fd79b7f48ce1c368b0fca9ea168eaf117c", + "0x00000000009726632680fb29d3f7a9734e3010e2", + ] # Deposit Contract, Rainbow Router + + ETHERSCAN_PARAMS = { + "module": "account", + "action": "tokentx", + "address": None, + "sort": "asc", + "apikey": os.getenv("ETHERSCAN_API_KEY"), + "startblock": None, + } + + ETHERSCAN_URL = "https://api.etherscan.io/api" + # We started the promotion action 78 days ago, the first outgoing transfer; + # TX hash: + # 0xe3e6b5db8651f67e7bfb3391af6d2c834344453eea79cfd6d562fcf0fdb52e55 + GOLEM_FIRST_OUT_TRANSFER_BLOCK = 20334443 + END_OF_PROMOTION_TS = int( + datetime( + int(os.getenv("END_OF_PROMOTION_YEAR")), + int(os.getenv("END_OF_PROMOTION_MONTH")), + int(os.getenv("END_OF_PROMOTION_DAY")), + ).timestamp() + ) + DEADLINE_IN_DAYS = int(os.getenv("DEADLINE_IN_DAYS")) + ORIGINAL_ALLOW_LIST_PATH = "resources/input/addresses.txt" + RESOURCES_OUTPUT_PATH = "resources/output" diff --git a/unlocks-tracker/unlocks_tracker/constants.py b/unlocks-tracker/unlocks_tracker/constants.py new file mode 100644 index 0000000000..9809648070 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/constants.py @@ -0,0 +1,6 @@ +class ReasonsForRemoving: + TRANSFER_NOT_COVERED_BY_LOCKS = "Locks do not cover received transfers." + ORG_TRANSFERS_NOT_COVERED_BY_LOCKS = ( + "Locks do not cover accumulated transfers for an organization." + ) + UNLOCKS_EXCEED_LOCKS = "A part of locked GLMs has been unlocked." diff --git a/unlocks-tracker/unlocks_tracker/files.py b/unlocks-tracker/unlocks_tracker/files.py new file mode 100644 index 0000000000..a8bf3bff2a --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/files.py @@ -0,0 +1,22 @@ +import json +from typing import List + +from config import Config + + +def save_processing_stage(filename: str, content): + with open(filename, "w") as json_file: + json.dump(content, json_file, indent=4) + + +def save_output(addresses_to_remove_from_allowlist: List): + with open( + f"{Config.RESOURCES_OUTPUT_PATH}/addresses_to_remove.txt", "w" + ) as f: + for address in addresses_to_remove_from_allowlist: + f.write(str(address) + "\n") + + +def read_original_list() -> List[str]: + with open(Config.ORIGINAL_ALLOW_LIST_PATH, "r") as file: + return file.readlines() diff --git a/unlocks-tracker/unlocks_tracker/gql_integration/__init__.py b/unlocks-tracker/unlocks_tracker/gql_integration/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/unlocks-tracker/unlocks_tracker/gql_integration/execution.py b/unlocks-tracker/unlocks_tracker/gql_integration/execution.py new file mode 100644 index 0000000000..865f0e70e5 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/gql_integration/execution.py @@ -0,0 +1,46 @@ +from datetime import timedelta + +from config import Config +from gql import gql +from gql_integration.gql_factory import gql_factory +from gql_integration.gql_queries import GQLQueries +from helpers import cast_to_int + + +def get_unlocks(address: str, from_ts: int): + query = gql(GQLQueries.GET_UNLOCKS) + + to_ts = Config.END_OF_PROMOTION_TS + + variables = { + "fromTimestamp": from_ts, + "toTimestamp": to_ts, + "userAddress": address, + } + + result = gql_factory.build().execute(query, variable_values=variables) + unlockeds = result["unlockeds"] + + unlockeds = cast_to_int(unlockeds, "depositBefore", "amount") + + return unlockeds + + +def get_locks(address: str, from_ts: int, latest_ts: int): + query = gql(GQLQueries.GET_LOCKS) + + to_ts = latest_ts + int( + timedelta(days=Config.DEADLINE_IN_DAYS).total_seconds() + ) + + variables = { + "fromTimestamp": from_ts, + "toTimestamp": to_ts, + "userAddress": address, + } + result = gql_factory.build().execute(query, variable_values=variables) + lockeds = result["lockeds"] + + lockeds = cast_to_int(lockeds, "depositBefore", "amount") + + return lockeds diff --git a/unlocks-tracker/unlocks_tracker/gql_integration/gql_factory.py b/unlocks-tracker/unlocks_tracker/gql_integration/gql_factory.py new file mode 100644 index 0000000000..62e1b0d155 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/gql_integration/gql_factory.py @@ -0,0 +1,40 @@ +from gql import Client +from gql.transport.requests import RequestsHTTPTransport + + +class GQLWithRetryBackoff(Client): + """ + A retry wrapper for async transports. It overrides execute() + method to handle TransportQueryError and uses @backoff decorator + to make it retryable for given period of time. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def execute(self, *args, **kwargs): + return super().execute(*args, **kwargs) + + +class GQLConnectionFactory: + def __init__(self): + self._url = None + + def set_url(self, url: str): + self._url = url + + def build(self): + if not self._url: + raise RuntimeError( + "GQL Connection Factory hasn't been properly initialised." + ) + client = GQLWithRetryBackoff() + transport = RequestsHTTPTransport(url=self._url, timeout=2) + client.transport = transport + client.fetch_schema_from_transport = False + + return client + + +gql_factory = GQLConnectionFactory() +gql_factory.set_url("https://graph.mainnet.octant.app/subgraphs/name/octant") diff --git a/unlocks-tracker/unlocks_tracker/gql_integration/gql_queries.py b/unlocks-tracker/unlocks_tracker/gql_integration/gql_queries.py new file mode 100644 index 0000000000..4904d9a2e7 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/gql_integration/gql_queries.py @@ -0,0 +1,42 @@ +class GQLQueries: + GET_LOCKS = """ + query GetLocks($fromTimestamp: Int!, $toTimestamp: Int!, + $userAddress: String!) { + lockeds( + orderBy: timestamp + where: { + timestamp_gte: $fromTimestamp, + timestamp_lt: $toTimestamp, + user: $userAddress + } + ) { + __typename + depositBefore + amount + timestamp + user + transactionHash + } + } + """ + + GET_UNLOCKS = """ + query GetUnlocks($fromTimestamp: Int!, $toTimestamp: Int!, + $userAddress: String!) { + unlockeds( + orderBy: timestamp + where: { + timestamp_gte: $fromTimestamp, + timestamp_lt: $toTimestamp, + user: $userAddress + } + ) { + __typename + depositBefore + amount + timestamp + user + transactionHash + } + } + """ diff --git a/unlocks-tracker/unlocks_tracker/helpers.py b/unlocks-tracker/unlocks_tracker/helpers.py new file mode 100644 index 0000000000..5a8751874e --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/helpers.py @@ -0,0 +1,16 @@ +from copy import deepcopy + +from schemas import LockedsSchema, UnlockedsSchema + + +def cast_to_int(original: UnlockedsSchema | LockedsSchema, *keys): + copy = deepcopy(original) + for row in copy: + for key in keys: + row[key] = int(row[key]) + + return copy + + +def are_addresses_the_same(*addresses): + return len(set(map(lambda word: word.lower(), addresses))) == 1 diff --git a/unlocks-tracker/unlocks_tracker/infrastructure/__init__.py b/unlocks-tracker/unlocks_tracker/infrastructure/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/unlocks-tracker/unlocks_tracker/infrastructure/etherscan_api.py b/unlocks-tracker/unlocks_tracker/infrastructure/etherscan_api.py new file mode 100644 index 0000000000..b2601c9cdc --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/infrastructure/etherscan_api.py @@ -0,0 +1,26 @@ +import requests +from config import Config +from helpers import are_addresses_the_same +from pipeline.daos import SenderDetails +from schemas import TransfersSchema + + +def retrieve_outgoing_transfers(*senders: SenderDetails) -> TransfersSchema: + outgoing_transfers = [] + for sender in senders: + params = Config.ETHERSCAN_PARAMS.copy() + + params["address"] = sender.address + params["startBlock"] = sender.start_block + + response = requests.get(Config.ETHERSCAN_URL, params=params) + + if response.status_code != 200: + raise Exception(f"Invalid status code: {response.status_code}") + + result = response.json()["result"] + for transaction in result: + if are_addresses_the_same(transaction["from"], sender.address): + outgoing_transfers.append(transaction) + + return outgoing_transfers diff --git a/unlocks-tracker/unlocks_tracker/pipeline/__init__.py b/unlocks-tracker/unlocks_tracker/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/unlocks-tracker/unlocks_tracker/pipeline/accumulators.py b/unlocks-tracker/unlocks_tracker/pipeline/accumulators.py new file mode 100644 index 0000000000..eaee30bcae --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/accumulators.py @@ -0,0 +1,43 @@ +from schemas import AccumulatedTransfersSchema, TransfersSchema + + +def accumulate_transfers( + transfers: TransfersSchema, +) -> AccumulatedTransfersSchema: + accumulated_transfers = {} + for transfer in transfers: + transfer_ts = int(transfer["timeStamp"]) + + if transfer["to"] not in accumulated_transfers.keys(): + data = { + "value": 0, + "earliestTimestamp": transfer_ts, + "from": [], + "latestTimestamp": transfer_ts, + } + accumulated_transfers.update({transfer["to"]: data}) + + accumulated_transfers[transfer["to"]]["value"] += int( + transfer["value"] + ) + accumulated_transfers[transfer["to"]]["from"].append( + transfer["from"].lower() + ) + + if ( + transfer_ts + < accumulated_transfers[transfer["to"]]["earliestTimestamp"] + ): + accumulated_transfers[transfer["to"]][ + "earliestTimestamp" + ] = transfer_ts + + if ( + transfer_ts + > accumulated_transfers[transfer["to"]]["latestTimestamp"] + ): + accumulated_transfers[transfer["to"]][ + "latestTimestamp" + ] = transfer_ts + + return accumulated_transfers diff --git a/unlocks-tracker/unlocks_tracker/pipeline/daos.py b/unlocks-tracker/unlocks_tracker/pipeline/daos.py new file mode 100644 index 0000000000..fdf311e620 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/daos.py @@ -0,0 +1,18 @@ +from dataclasses import dataclass +from typing import List, Set + + +@dataclass +class Context: + processed_golem_out_transfers: List = None + from_partners_out_transfers: List = None + all_outgoing_transfers_no_companies: List = None + not_present_original_list: Set = None + addresses_to_remove_from_allowlist: List = None + organization_addresses_to_remove: List = None + + +@dataclass +class SenderDetails: + address: str + start_block: int # where we should start searching for transactions diff --git a/unlocks-tracker/unlocks_tracker/pipeline/filters.py b/unlocks-tracker/unlocks_tracker/pipeline/filters.py new file mode 100644 index 0000000000..3939f4d606 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/filters.py @@ -0,0 +1,69 @@ +from datetime import timedelta + +from config import Config +from schemas import TransferDetailsSchema, TransfersSchema + + +def filter_outgoing_transfers(transfers: TransfersSchema) -> TransfersSchema: + """ + Filters out transfers that are not GLM and are not sent + to excluded addresses. + """ + + processed_transfers = [] + + for transfer in transfers: + if ( + transfer["tokenSymbol"] == "GLM" + and transfer["to"].lower() not in Config.EXCLUDED_ADDRESSES + ): + processed_transfer = transfer.copy() + processed_transfer["value"] = transfer["value"] + processed_transfers.append(processed_transfer) + + return processed_transfers + + +def filter_transfers_to_organizations_only(transfers): + return list( + filter( + lambda transfer: transfer["to"].lower() + in Config.ORGANIZATIONS_ADDRESS, + transfers, + ) + ) + + +def filter_transfers_from_organization( + address: str, + transfer_details: TransferDetailsSchema, + transfers: TransfersSchema, +): + deadline_ts = transfer_details["latestTimestamp"] + int( + timedelta(days=Config.DEADLINE_IN_DAYS).total_seconds() + ) + all_transfers_from_org = list( + filter( + lambda transfer: transfer["from"] == address + and int(transfer["timeStamp"]) + > transfer_details["earliestTimestamp"] + and int(int(transfer["timeStamp"]) <= deadline_ts), + transfers, + ) + ) + + return all_transfers_from_org + + +def remove_redundancy(transfers): + """ + Removes transfers that are sent to organizations or Golem addresses. + """ + return list( + filter( + lambda transfer: transfer["to"].lower() + not in Config.ORGANIZATIONS_ADDRESS + and transfer["to"].lower() not in Config.GOLEM_ADDRESSES, + transfers, + ) + ) diff --git a/unlocks-tracker/unlocks_tracker/pipeline/pipeline.py b/unlocks-tracker/unlocks_tracker/pipeline/pipeline.py new file mode 100644 index 0000000000..ce376b4d1a --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/pipeline.py @@ -0,0 +1,11 @@ +from pipeline.daos import Context + + +class Pipeline: + def __init__(self, stages): + self.stages = stages + + def run(self, context: Context): + for stage in self.stages: + context = stage.execute(context) + return context diff --git a/unlocks-tracker/unlocks_tracker/pipeline/stages/__init__.py b/unlocks-tracker/unlocks_tracker/pipeline/stages/__init__.py new file mode 100644 index 0000000000..fe56c3a96a --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/stages/__init__.py @@ -0,0 +1,16 @@ +from pipeline.stages.accumulate_and_check_locks import ( + AccumulateAndCheckLocksStage, +) +from pipeline.stages.accumulate_and_check_organizations import ( + AccumulateAndCheckOrganizationsStage, +) +from pipeline.stages.check_original_list import CheckOriginalListStage +from pipeline.stages.combine_transfers import CombineTransfersStage +from pipeline.stages.retrieve_golem_transfers import ( + RetrieveGolemTransfersStage, +) +from pipeline.stages.retrieve_partner_transfers import ( + RetrievePartnerTransfersStage, +) +from pipeline.stages.save_output import SaveAnalysisOutput +# flake8: noqa diff --git a/unlocks-tracker/unlocks_tracker/pipeline/stages/accumulate_and_check_locks.py b/unlocks-tracker/unlocks_tracker/pipeline/stages/accumulate_and_check_locks.py new file mode 100644 index 0000000000..5de0436889 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/stages/accumulate_and_check_locks.py @@ -0,0 +1,74 @@ +from config import Config +from constants import ReasonsForRemoving +from gql_integration.execution import get_locks, get_unlocks +from pipeline.accumulators import accumulate_transfers +from pipeline.daos import Context +from pipeline.validators import ( + check_if_donation_is_decreased, + check_if_locks_covers_transfers, +) + + +class AccumulateAndCheckLocksStage: + def execute(self, context: Context): + all_outgoing_transfers_no_companies = ( + context.all_outgoing_transfers_no_companies + ) + accumulated_transfers = accumulate_transfers( + all_outgoing_transfers_no_companies + ) + addresses_to_remove_from_allowlist = [] + + for to_address, transfer_details in accumulated_transfers.items(): + locks = get_locks( + to_address, + transfer_details["earliestTimestamp"], + transfer_details["latestTimestamp"], + ) + + if not locks or not check_if_locks_covers_transfers( + transfer_details, + locks, + error_margin=( + 100 + if Config.KARLAGODETH_ADDRESS in transfer_details["from"] + else 0 + ), + ): + addresses_to_remove_from_allowlist.append( + { + "address": to_address, + "reason": + ReasonsForRemoving.TRANSFER_NOT_COVERED_BY_LOCKS, + } + ) + continue + + unlocks = get_unlocks( + to_address, transfer_details["earliestTimestamp"] + ) + if unlocks and check_if_donation_is_decreased( + locks, unlocks, transfer_details + ): + addresses_to_remove_from_allowlist.append( + { + "address": to_address, + "reason": ReasonsForRemoving.UNLOCKS_EXCEED_LOCKS, + } + ) + + print( + "[DEBUG] Non-organization addresses " + "to remove from the allow list.", + "\n".join( + map( + lambda entry: entry["address"] + entry["reason"], + addresses_to_remove_from_allowlist, + ) + ), + ) + + context.addresses_to_remove_from_allowlist = ( + addresses_to_remove_from_allowlist + ) + return context diff --git a/unlocks-tracker/unlocks_tracker/pipeline/stages/accumulate_and_check_organizations.py b/unlocks-tracker/unlocks_tracker/pipeline/stages/accumulate_and_check_organizations.py new file mode 100644 index 0000000000..157263f044 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/stages/accumulate_and_check_organizations.py @@ -0,0 +1,85 @@ +from constants import ReasonsForRemoving +from gql_integration.execution import get_locks, get_unlocks +from pipeline.accumulators import accumulate_transfers +from pipeline.daos import Context +from pipeline.filters import ( + filter_transfers_from_organization, + filter_transfers_to_organizations_only, +) +from pipeline.validators import ( + check_if_donation_is_decreased, + check_if_locks_covers_transfers, +) + + +class AccumulateAndCheckOrganizationsStage: + def execute(self, context: Context): + all_processed_outgoing_transfers = ( + context.from_partners_out_transfers + + context.processed_golem_out_transfers + ) + transfers_only_to_organizations = ( + filter_transfers_to_organizations_only( + all_processed_outgoing_transfers + ) + ) + accumulated_transfers_to_organizations_only = accumulate_transfers( + transfers_only_to_organizations + ) + + organization_addresses_to_remove = [] + + for ( + org_address, + transfer_details, + ) in accumulated_transfers_to_organizations_only.items(): + all_transfers_by_org = filter_transfers_from_organization( + org_address, transfer_details, all_processed_outgoing_transfers + ) + + accumulated_transfer_by_org_only = accumulate_transfers( + all_transfers_by_org + ) + total_transfers = sum( + map( + lambda transfer: transfer["value"], + accumulated_transfer_by_org_only.values(), + ) + ) + + locks = get_locks( + org_address, + transfer_details["earliestTimestamp"], + transfer_details["latestTimestamp"], + ) + + if not check_if_locks_covers_transfers( + transfer_details, locks, error_margin=total_transfers + ): + organization_addresses_to_remove.append( + { + "organization_address": org_address, + "reason": + ReasonsForRemoving. + ORG_TRANSFERS_NOT_COVERED_BY_LOCKS, + } + ) + continue + + unlocks = get_unlocks( + org_address, transfer_details["earliestTimestamp"] + ) + if unlocks and check_if_donation_is_decreased( + locks, unlocks, transfer_details, error_margin=total_transfers + ): + organization_addresses_to_remove.append( + { + "organization_address": org_address, + "reason": ReasonsForRemoving.UNLOCKS_EXCEED_LOCKS, + } + ) + + context.organization_addresses_to_remove = ( + organization_addresses_to_remove + ) + return context diff --git a/unlocks-tracker/unlocks_tracker/pipeline/stages/check_original_list.py b/unlocks-tracker/unlocks_tracker/pipeline/stages/check_original_list.py new file mode 100644 index 0000000000..96eb98c37c --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/stages/check_original_list.py @@ -0,0 +1,32 @@ +from files import read_original_list +from pipeline.daos import Context + + +class CheckOriginalListStage: + def execute(self, context: Context): + all_outgoing_transfers = ( + context.processed_golem_out_transfers + + context.from_partners_out_transfers + ) + not_present_original_list = set() + + original = read_original_list() + + for address in original: + if address.strip() not in map( + lambda word: word["to"].lower(), all_outgoing_transfers + ): + print( + "[ERROR] Address that is present on " + "the original list but we didn't find it in our list", + address.strip(), + ) + + raise Exception( + f"Address that is present on the " + f"original list but we didn't " + f"find it in our list {address.strip()}" + ) + + context.not_present_original_list = not_present_original_list + return context diff --git a/unlocks-tracker/unlocks_tracker/pipeline/stages/combine_transfers.py b/unlocks-tracker/unlocks_tracker/pipeline/stages/combine_transfers.py new file mode 100644 index 0000000000..ee725c85ab --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/stages/combine_transfers.py @@ -0,0 +1,21 @@ +from pipeline.daos import Context +from pipeline.filters import remove_redundancy + + +class CombineTransfersStage: + def execute(self, context: Context): + from_golem_out_transfers = context.processed_golem_out_transfers + from_partners_out_transfers = context.from_partners_out_transfers + + all_outgoing_transfers = ( + from_partners_out_transfers + from_golem_out_transfers + ) + all_outgoing_transfers_no_companies = remove_redundancy( + all_outgoing_transfers + ) + + context.all_outgoing_transfers_no_companies = ( + all_outgoing_transfers_no_companies + ) + + return context diff --git a/unlocks-tracker/unlocks_tracker/pipeline/stages/retrieve_golem_transfers.py b/unlocks-tracker/unlocks_tracker/pipeline/stages/retrieve_golem_transfers.py new file mode 100644 index 0000000000..58d9d09506 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/stages/retrieve_golem_transfers.py @@ -0,0 +1,26 @@ +from config import Config +from files import save_processing_stage +from infrastructure.etherscan_api import retrieve_outgoing_transfers +from pipeline.daos import Context, SenderDetails +from pipeline.filters import filter_outgoing_transfers + + +class RetrieveGolemTransfersStage: + def execute(self, context: Context): + senders_details = [ + SenderDetails(address, Config.GOLEM_FIRST_OUT_TRANSFER_BLOCK) + for address in Config.GOLEM_ADDRESSES + ] + out_transfers = retrieve_outgoing_transfers(*senders_details) + processed_golem_out_transfers = filter_outgoing_transfers( + out_transfers + ) + + context.processed_golem_out_transfers = processed_golem_out_transfers + + save_processing_stage( + "resources/output/from_golem_outgoing_transfers.json", + processed_golem_out_transfers, + ) + + return context diff --git a/unlocks-tracker/unlocks_tracker/pipeline/stages/retrieve_partner_transfers.py b/unlocks-tracker/unlocks_tracker/pipeline/stages/retrieve_partner_transfers.py new file mode 100644 index 0000000000..6201a7fb75 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/stages/retrieve_partner_transfers.py @@ -0,0 +1,67 @@ +from config import Config +from files import save_processing_stage +from infrastructure.etherscan_api import retrieve_outgoing_transfers +from pipeline.daos import Context, SenderDetails +from pipeline.filters import filter_outgoing_transfers +from schemas import TransfersSchema + + +class RetrievePartnerTransfersStage: + def _retrieve_organizations_txs_start_block( + self, transfers: TransfersSchema + ): + organizations_txs_blocks = {} + organization_addresses = set( + map(lambda word: word.lower(), Config.ORGANIZATIONS_ADDRESS) + ) + + for transfer in transfers: + recipient = transfer["to"].lower() + + if recipient not in organization_addresses: + continue + + block_number = int(transfer["blockNumber"]) + if ( + recipient not in organizations_txs_blocks + or block_number < organizations_txs_blocks[recipient] + ): + organizations_txs_blocks[recipient] = block_number + + return organizations_txs_blocks + + def execute(self, context: Context): + from_golem_out_transfers = context.processed_golem_out_transfers + organizations_txs_blocks = ( + self._retrieve_organizations_txs_start_block( + from_golem_out_transfers + ) + ) + + if len(organizations_txs_blocks.keys()) != len( + Config.ORGANIZATIONS_ADDRESS + ): + raise Exception("Not all organizations have been found") + + from_partners_out_transfers = [] + for address, start_block in organizations_txs_blocks.items(): + sender_details = SenderDetails(address, start_block) + organization_out_transfers = retrieve_outgoing_transfers( + sender_details + ) + processed_organization_out_transfers = filter_outgoing_transfers( + organization_out_transfers + ) + from_partners_out_transfers.extend( + processed_organization_out_transfers + ) + + context.from_partners_out_transfers = from_partners_out_transfers + + save_processing_stage( + f"{Config.RESOURCES_OUTPUT_PATH}/" + f"from_partners_outgoing_transfers.json", + from_partners_out_transfers, + ) + + return context diff --git a/unlocks-tracker/unlocks_tracker/pipeline/stages/save_output.py b/unlocks-tracker/unlocks_tracker/pipeline/stages/save_output.py new file mode 100644 index 0000000000..ead286bd98 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/stages/save_output.py @@ -0,0 +1,19 @@ +from files import save_output +from pipeline.daos import Context + + +class SaveAnalysisOutput: + def execute(self, context: Context): + print( + "Addresses to remove:", context.addresses_to_remove_from_allowlist + ) + print( + "Organizations to remove:", + context.organization_addresses_to_remove, + ) + + all_addresses_to_remove = ( + context.addresses_to_remove_from_allowlist + + context.organization_addresses_to_remove + ) + save_output(all_addresses_to_remove) diff --git a/unlocks-tracker/unlocks_tracker/pipeline/validators.py b/unlocks-tracker/unlocks_tracker/pipeline/validators.py new file mode 100644 index 0000000000..9e6f86f630 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/pipeline/validators.py @@ -0,0 +1,35 @@ +from schemas import ( + AccumulatedTransfersSchema, + LockedsSchema, + UnlockedsSchema, + TransferDetailsSchema, +) + + +def check_if_locks_covers_transfers( + acc_transfers: AccumulatedTransfersSchema, + locks: LockedsSchema, + error_margin: int = 0, +) -> bool: + if locks is not None: + acc_locks = sum(map(lambda lock: lock["amount"], locks)) + else: + acc_locks = 0 + + acc_transfers_amount = acc_transfers["value"] + + return acc_locks >= acc_transfers_amount - error_margin + + +def check_if_donation_is_decreased( + locks: LockedsSchema, + unlocks: UnlockedsSchema, + transfer_details: TransferDetailsSchema, + error_margin: int = 0, +): + acc_locks = sum(map(lambda lock: lock["amount"], locks)) + acc_unlocks = sum(map(lambda unlock: unlock["amount"], unlocks)) + + return (acc_locks - acc_unlocks) < ( + transfer_details["value"] - error_margin + ) diff --git a/unlocks-tracker/unlocks_tracker/run.py b/unlocks-tracker/unlocks_tracker/run.py new file mode 100644 index 0000000000..0af3d3c43b --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/run.py @@ -0,0 +1,32 @@ +from pipeline.daos import Context +from pipeline.pipeline import Pipeline +from pipeline.stages import ( + AccumulateAndCheckLocksStage, + AccumulateAndCheckOrganizationsStage, + CheckOriginalListStage, + CombineTransfersStage, + RetrieveGolemTransfersStage, + RetrievePartnerTransfersStage, + SaveAnalysisOutput, +) + + +def main(): + pipeline = Pipeline( + [ + RetrieveGolemTransfersStage(), + RetrievePartnerTransfersStage(), + CombineTransfersStage(), + CheckOriginalListStage(), + AccumulateAndCheckLocksStage(), + AccumulateAndCheckOrganizationsStage(), + SaveAnalysisOutput(), + ] + ) + context = Context() + + pipeline.run(context) + + +if __name__ == "__main__": + main() diff --git a/unlocks-tracker/unlocks_tracker/schemas.py b/unlocks-tracker/unlocks_tracker/schemas.py new file mode 100644 index 0000000000..2e71c41c45 --- /dev/null +++ b/unlocks-tracker/unlocks_tracker/schemas.py @@ -0,0 +1,7 @@ +from typing import Dict, List + +UnlockedsSchema = List[Dict[str, str | int]] +LockedsSchema = List[Dict[str, str | int]] +TransfersSchema = List[Dict[str, str]] +AccumulatedTransfersSchema = Dict[str, Dict[str, str | int]] +TransferDetailsSchema = Dict[str, int]