From 52348e2a0a251b5d557fe47fe8c80e1f07cf6d7b Mon Sep 17 00:00:00 2001 From: v0-e Date: Tue, 15 Oct 2024 13:20:19 +0100 Subject: [PATCH] Add Rust client Co-authored-by: Denis Kolodin Co-authored-by: Maksim Ryndin --- .../rs-build-fmt-clippy-audit-test.yml | 56 + README.md | 6 + v4-client-rs/.gitignore | 2 + v4-client-rs/Cargo.toml | 20 + v4-client-rs/LICENSE | 802 ++++++++++++ v4-client-rs/README.md | 64 + v4-client-rs/client/Cargo.toml | 59 + .../client/examples/account_endpoint.rs | 205 +++ .../client/examples/batch_cancel_orders.rs | 97 ++ .../client/examples/bot_basic_adder.rs | 271 ++++ .../client/examples/bot_trend_follower.rs | 388 ++++++ v4-client-rs/client/examples/cancel_order.rs | 71 ++ .../client/examples/close_all_positions.rs | 90 ++ .../client/examples/close_position.rs | 121 ++ v4-client-rs/client/examples/deposit.rs | 39 + .../client/examples/faucet_endpoint.rs | 54 + v4-client-rs/client/examples/live_price.rs | 83 ++ .../client/examples/market_endpoint.rs | 100 ++ .../client/examples/noble_transfer.rs | 116 ++ .../client/examples/place_order_long_term.rs | 65 + .../client/examples/place_order_short_term.rs | 63 + v4-client-rs/client/examples/send_token.rs | 40 + v4-client-rs/client/examples/sequencer.rs | 162 +++ .../client/examples/support/constants.rs | 1 + v4-client-rs/client/examples/support/mod.rs | 6 + .../client/examples/support/order_book.rs | 149 +++ .../client/examples/support/telemetry.rs | 37 + v4-client-rs/client/examples/transfer.rs | 37 + .../client/examples/utility_endpoint.rs | 42 + v4-client-rs/client/examples/validator_get.rs | 120 ++ .../client/examples/validator_post.rs | 82 ++ v4-client-rs/client/examples/wallet.rs | 102 ++ v4-client-rs/client/examples/websockets.rs | 89 ++ v4-client-rs/client/examples/withdraw.rs | 37 + .../client/examples/withdraw_other.rs | 75 ++ v4-client-rs/client/src/config.rs | 33 + v4-client-rs/client/src/faucet.rs | 84 ++ v4-client-rs/client/src/indexer/config.rs | 13 + v4-client-rs/client/src/indexer/mod.rs | 51 + .../src/indexer/rest/client/accounts.rs | 520 ++++++++ .../client/src/indexer/rest/client/markets.rs | 174 +++ .../client/src/indexer/rest/client/mod.rs | 58 + .../client/src/indexer/rest/client/utility.rs | 71 ++ .../client/src/indexer/rest/config.rs | 10 + v4-client-rs/client/src/indexer/rest/mod.rs | 13 + .../client/src/indexer/rest/options.rs | 157 +++ v4-client-rs/client/src/indexer/rest/types.rs | 303 +++++ .../client/src/indexer/sock/config.rs | 27 + .../client/src/indexer/sock/connector.rs | 316 +++++ v4-client-rs/client/src/indexer/sock/feed.rs | 101 ++ .../client/src/indexer/sock/messages.rs | 1106 +++++++++++++++++ v4-client-rs/client/src/indexer/sock/mod.rs | 176 +++ v4-client-rs/client/src/indexer/tokens.rs | 116 ++ v4-client-rs/client/src/indexer/types.rs | 1057 ++++++++++++++++ v4-client-rs/client/src/lib.rs | 49 + v4-client-rs/client/src/noble/config.rs | 31 + v4-client-rs/client/src/noble/mod.rs | 242 ++++ v4-client-rs/client/src/noble/tokens.rs | 43 + v4-client-rs/client/src/node/builder.rs | 68 + v4-client-rs/client/src/node/client/error.rs | 57 + .../client/src/node/client/methods.rs | 423 +++++++ v4-client-rs/client/src/node/client/mod.rs | 648 ++++++++++ v4-client-rs/client/src/node/config.rs | 37 + v4-client-rs/client/src/node/fee.rs | 43 + v4-client-rs/client/src/node/mod.rs | 17 + v4-client-rs/client/src/node/order.rs | 616 +++++++++ v4-client-rs/client/src/node/sequencer.rs | 130 ++ v4-client-rs/client/src/node/types.rs | 27 + v4-client-rs/client/src/node/utils.rs | 96 ++ v4-client-rs/client/src/node/wallet.rs | 180 +++ v4-client-rs/client/src/telemetry.rs | 72 ++ v4-client-rs/client/tests/env.rs | 118 ++ v4-client-rs/client/tests/mainnet.toml | 9 + v4-client-rs/client/tests/test_faucet.rs | 40 + .../client/tests/test_indexer_rest.rs | 304 +++++ .../client/tests/test_indexer_sock.rs | 635 ++++++++++ v4-client-rs/client/tests/test_noble.rs | 72 ++ v4-client-rs/client/tests/test_node.rs | 355 ++++++ .../client/tests/test_node_methods.rs | 228 ++++ .../client/tests/test_node_sequencer.rs | 66 + v4-client-rs/client/tests/testnet.toml | 17 + v4-client-rs/deny.toml | 60 + 82 files changed, 12820 insertions(+) create mode 100644 .github/workflows/rs-build-fmt-clippy-audit-test.yml create mode 100644 v4-client-rs/.gitignore create mode 100644 v4-client-rs/Cargo.toml create mode 100644 v4-client-rs/LICENSE create mode 100644 v4-client-rs/README.md create mode 100644 v4-client-rs/client/Cargo.toml create mode 100644 v4-client-rs/client/examples/account_endpoint.rs create mode 100644 v4-client-rs/client/examples/batch_cancel_orders.rs create mode 100644 v4-client-rs/client/examples/bot_basic_adder.rs create mode 100644 v4-client-rs/client/examples/bot_trend_follower.rs create mode 100644 v4-client-rs/client/examples/cancel_order.rs create mode 100644 v4-client-rs/client/examples/close_all_positions.rs create mode 100644 v4-client-rs/client/examples/close_position.rs create mode 100644 v4-client-rs/client/examples/deposit.rs create mode 100644 v4-client-rs/client/examples/faucet_endpoint.rs create mode 100644 v4-client-rs/client/examples/live_price.rs create mode 100644 v4-client-rs/client/examples/market_endpoint.rs create mode 100644 v4-client-rs/client/examples/noble_transfer.rs create mode 100644 v4-client-rs/client/examples/place_order_long_term.rs create mode 100644 v4-client-rs/client/examples/place_order_short_term.rs create mode 100644 v4-client-rs/client/examples/send_token.rs create mode 100644 v4-client-rs/client/examples/sequencer.rs create mode 100644 v4-client-rs/client/examples/support/constants.rs create mode 100644 v4-client-rs/client/examples/support/mod.rs create mode 100644 v4-client-rs/client/examples/support/order_book.rs create mode 100644 v4-client-rs/client/examples/support/telemetry.rs create mode 100644 v4-client-rs/client/examples/transfer.rs create mode 100644 v4-client-rs/client/examples/utility_endpoint.rs create mode 100644 v4-client-rs/client/examples/validator_get.rs create mode 100644 v4-client-rs/client/examples/validator_post.rs create mode 100644 v4-client-rs/client/examples/wallet.rs create mode 100644 v4-client-rs/client/examples/websockets.rs create mode 100644 v4-client-rs/client/examples/withdraw.rs create mode 100644 v4-client-rs/client/examples/withdraw_other.rs create mode 100644 v4-client-rs/client/src/config.rs create mode 100644 v4-client-rs/client/src/faucet.rs create mode 100644 v4-client-rs/client/src/indexer/config.rs create mode 100644 v4-client-rs/client/src/indexer/mod.rs create mode 100644 v4-client-rs/client/src/indexer/rest/client/accounts.rs create mode 100644 v4-client-rs/client/src/indexer/rest/client/markets.rs create mode 100644 v4-client-rs/client/src/indexer/rest/client/mod.rs create mode 100644 v4-client-rs/client/src/indexer/rest/client/utility.rs create mode 100644 v4-client-rs/client/src/indexer/rest/config.rs create mode 100644 v4-client-rs/client/src/indexer/rest/mod.rs create mode 100644 v4-client-rs/client/src/indexer/rest/options.rs create mode 100644 v4-client-rs/client/src/indexer/rest/types.rs create mode 100644 v4-client-rs/client/src/indexer/sock/config.rs create mode 100644 v4-client-rs/client/src/indexer/sock/connector.rs create mode 100644 v4-client-rs/client/src/indexer/sock/feed.rs create mode 100644 v4-client-rs/client/src/indexer/sock/messages.rs create mode 100644 v4-client-rs/client/src/indexer/sock/mod.rs create mode 100644 v4-client-rs/client/src/indexer/tokens.rs create mode 100644 v4-client-rs/client/src/indexer/types.rs create mode 100644 v4-client-rs/client/src/lib.rs create mode 100644 v4-client-rs/client/src/noble/config.rs create mode 100644 v4-client-rs/client/src/noble/mod.rs create mode 100644 v4-client-rs/client/src/noble/tokens.rs create mode 100644 v4-client-rs/client/src/node/builder.rs create mode 100644 v4-client-rs/client/src/node/client/error.rs create mode 100644 v4-client-rs/client/src/node/client/methods.rs create mode 100644 v4-client-rs/client/src/node/client/mod.rs create mode 100644 v4-client-rs/client/src/node/config.rs create mode 100644 v4-client-rs/client/src/node/fee.rs create mode 100644 v4-client-rs/client/src/node/mod.rs create mode 100644 v4-client-rs/client/src/node/order.rs create mode 100644 v4-client-rs/client/src/node/sequencer.rs create mode 100644 v4-client-rs/client/src/node/types.rs create mode 100644 v4-client-rs/client/src/node/utils.rs create mode 100644 v4-client-rs/client/src/node/wallet.rs create mode 100644 v4-client-rs/client/src/telemetry.rs create mode 100644 v4-client-rs/client/tests/env.rs create mode 100644 v4-client-rs/client/tests/mainnet.toml create mode 100644 v4-client-rs/client/tests/test_faucet.rs create mode 100644 v4-client-rs/client/tests/test_indexer_rest.rs create mode 100644 v4-client-rs/client/tests/test_indexer_sock.rs create mode 100644 v4-client-rs/client/tests/test_noble.rs create mode 100644 v4-client-rs/client/tests/test_node.rs create mode 100644 v4-client-rs/client/tests/test_node_methods.rs create mode 100644 v4-client-rs/client/tests/test_node_sequencer.rs create mode 100644 v4-client-rs/client/tests/testnet.toml create mode 100644 v4-client-rs/deny.toml diff --git a/.github/workflows/rs-build-fmt-clippy-audit-test.yml b/.github/workflows/rs-build-fmt-clippy-audit-test.yml new file mode 100644 index 00000000..167830b7 --- /dev/null +++ b/.github/workflows/rs-build-fmt-clippy-audit-test.yml @@ -0,0 +1,56 @@ +name: "[v4-client-rs] Build, Fmt, Clippy, Audit, & Test" + +on: + pull_request: + paths: + - 'v4-client-rs/**' + push: + paths: + - 'v4-client-rs/**' + branches: + - main + - "release/*" + +jobs: + test: + + runs-on: ubuntu-latest + + steps: + - name: Checkout source code + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Install Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + components: rustfmt, clippy + + - name: Install protoc # for dev dependencies only + run: sudo apt install -y protobuf-compiler + + - name: Build + run: cargo build + + - name: Check formatting + run: cargo fmt -- --check + + - name: Linter + shell: bash + run: | + cargo clippy -- -D warnings + + - name: Install audit + shell: bash + run: | + cargo install cargo-deny + + - name: Security audit, licenses + shell: bash + run: | + cargo deny check licenses advisories sources + + - name: Test + run: cargo test diff --git a/README.md b/README.md index 48ab0631..a2680c02 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,12 @@ Python client for dYdX Chain. Developed and maintained by the Nethermind team. - [Saul M.](https://github.com/samtin0x) - [Piotr P.](https://github.com/piwonskp) +## v4-client-rs +Rust client for dYdX Chain. Developed and maintained by the Nethermind team. +- [Emanuel V.](https://github.com/v0-e) +- [Denis K.](https://github.com/therustmonk) +- [Maksim R.](https://github.com/maksimryndin) + ## v4-client-cpp (Third Party Client) To pull the latest C++ client, run `git submodule update --init --recursive` diff --git a/v4-client-rs/.gitignore b/v4-client-rs/.gitignore new file mode 100644 index 00000000..a9d37c56 --- /dev/null +++ b/v4-client-rs/.gitignore @@ -0,0 +1,2 @@ +target +Cargo.lock diff --git a/v4-client-rs/Cargo.toml b/v4-client-rs/Cargo.toml new file mode 100644 index 00000000..b4235010 --- /dev/null +++ b/v4-client-rs/Cargo.toml @@ -0,0 +1,20 @@ +[workspace] +resolver = "2" +members = [ + "client", +] + +[workspace.package] +version = "0.1.0" +edition = "2021" +license = "AGPL-3.0" + +[workspace.dependencies] +anyhow = "1" +async-trait = "0.1" +bigdecimal = { version = "0.4", features = ["serde"] } +derive_more = { version = "1", features = ["full"] } +log = "0.4" +thiserror = "1" +tokio = { version = "1.39", features = ["full"] } +v4-proto-rs = { git = "https://github.com/therustmonk/v4-chain", rev = "a6265bbf4cd9812382a89d32c9304c08551f7bae" } diff --git a/v4-client-rs/LICENSE b/v4-client-rs/LICENSE new file mode 100644 index 00000000..22304f63 --- /dev/null +++ b/v4-client-rs/LICENSE @@ -0,0 +1,802 @@ +Copyright (C) 2023 dYdX Trading Inc. + +Subject to your compliance with applicable law and the v4 Terms of Use, available at dydx.exchange/legal, you are granted the right to use the Program or Licensed Work (defined below) under the terms of the GNU Affero General Public License as set forth below; provided, however, that if you violate any such applicable law in your use of the Program or Licensed Work, all of your rights and licenses to use (including any rights to reproduce, distribute, install or modify) the Program or Licensed Work will automatically and immediately terminate. + + +The “Program” or “Licensed Work” shall mean any of the following: dydxprotocol/cosmos-sdk, dydxprotocol/cometbft, dydxprotocol/v4-chain, dydxprotocol/v4-clients, dydxprotocol/v4-web, dydxprotocol/v4-abacus, dydxprotocol/v4-localization, dydxprotocol/v4-documentation, and any dYdX or dYdX Trading Inc. repository reflecting a copy of, or link to, this license. + + +The GNU Affero General Public License +Version 3, 19 November 2007 + + +Copyright (C) 2007 Free Software Foundation, Inc. +Everyone is permitted to copy and distribute verbatim copies +of this license document, but changing it is not allowed. + + + Preamble + + + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + + The precise terms and conditions for copying, distribution and +modification follow. + + + + TERMS AND CONDITIONS + + + 0. Definitions. + + + "This License" refers to version 3 of the GNU Affero General Public License. + + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + + A "covered work" means either the unmodified Program or a work based +on the Program. + + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + + + 1. Source Code. + + + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + + + The Corresponding Source for a work in source code form is that +same work. + + + 2. Basic Permissions. + + + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; Section 10 +makes it unnecessary. + + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + + + 4. Conveying Verbatim Copies. + + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with Section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + + 5. Conveying Modified Source Versions. + + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of Section 4, provided that you also meet all of these conditions: + + + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under Section + 7. This requirement modifies the requirement in Section 4 to + "keep intact all notices". + + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable Section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + + + 6. Conveying Non-Source Forms. + + + + You may convey a covered work in object code form under the terms +of Sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with Subsection 6b. + + + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under Subsection 6d. + + + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + + + 7. Additional Terms. + + + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + + + a) Disclaiming warranty or limiting liability differently from the + terms of Sections 15 and 16 of this License; or + + + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of Section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + + + 8. Termination. + + + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of Section 11). + + + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under Section 10. + + + + 9. Acceptance Not Required for Having Copies. + + + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + + + 10. Automatic Licensing of Downstream Recipients. + + + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + + + 11. Patents. + + + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + + + 12. No Surrender of Others' Freedom. + + + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + + + 13. Remote Network Interaction; Use with the GNU General Public License. + + + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + + + 14. Revised Versions of this License. + + + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + + + 15. Disclaimer of Warranty. + + + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + + + 16. Limitation of Liability. + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + 17. Interpretation of Sections 15 and 16. + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + +For more information about this software, see https://dydx.exchange. + Copyright (C) 2023 dYdX Trading Inc. diff --git a/v4-client-rs/README.md b/v4-client-rs/README.md new file mode 100644 index 00000000..84bcc694 --- /dev/null +++ b/v4-client-rs/README.md @@ -0,0 +1,64 @@ +# Rust client for dYdX v4 + +The crate implements interaction with the dYdX API. + +The following features are implemented: +- `NodeClient`, `IndexerClient` + WebSockets, `FaucetClient`, `NobleClient` +- Fully asynchronous implementation +- Telemetry +- Convenient builder for constructing requests +- Automatic WS connection support + +## Install + +To add the crate to your project, use the command: + +```sh +cargo add dydx-v4-rust +``` + +## Development + +Workspace consists of a single crate: +* `client` - to provide connection management with dYdX, common types and utils + +### Prerequisites + +* [Rust](https://www.rust-lang.org/tools/install) +* [cargo deny](https://github.com/EmbarkStudios/cargo-deny) +* [protoc](https://grpc.io/docs/protoc-installation/) for dev dependencies (`metrics-exporter-tcp`) + + +### Examples + +To run the example, you need to use the `cargo` command as follows: + +```sh +cargo run --example bot_basic_adder +``` + +You can find the full set of examples in the [examples](client/examples) folder. + +### Code quality assurance + +Before publishing make sure to run (and fix all warnings and errors) + +```sh +cargo fmt +cargo clippy +cargo deny check licenses advisories sources +``` + +### Documentation + +To generate the documentation, use the command + +```sh +cargo doc -p dydx-v4-rust +``` + +## Acknowledgements + +Built by Nethermind: [@v0-e](https://github.com/v0-e), [@therustmonk](https://github.com/therustmonk), [@maksimryndin](https://github.com/maksimryndin) + +For more details about the grant see [link](https://www.dydxgrants.com/grants/rust-trading-client). diff --git a/v4-client-rs/client/Cargo.toml b/v4-client-rs/client/Cargo.toml new file mode 100644 index 00000000..e6ffa235 --- /dev/null +++ b/v4-client-rs/client/Cargo.toml @@ -0,0 +1,59 @@ +[package] +name = "dydx-v4-rust" +version.workspace = true +edition.workspace = true +license.workspace = true + +description = "dYdX v4 asynchronous client." +homepage = "https://github.com/dydxprotocol/v4-clients/v4-client-rs" +repository = "https://github.com/dydxprotocol/v4-clients/v4-clients-rs" +readme = "README.md" + +# https://crates.io/categories +categories = ["api-bindings", "asynchronous", "finance"] +keywords = ["trading", "dex"] + +[features] +default = ["faucet", "noble", "telemetry"] +faucet = [] +noble = [ + "dep:ibc-proto" +] +telemetry = [ + "dep:metrics", +] + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +bigdecimal.workspace = true +bip32 = { version = "0.5", default-features = false, features = ["bip39", "alloc", "secp256k1"] } +cosmrs = "0.16" +chrono = { version = "0.4", features = ["serde"] } +derive_more.workspace = true +futures-util = "0.3" +governor = "0.6" +ibc-proto = { version = "0.46", optional = true } +log.workspace = true +rand = "0.8" +reqwest = { version = "0.12", features = ["json"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +serde_with = "3.9" +strum = { version = "0.26", features = ["derive"] } +thiserror.workspace = true +tonic = { version = "0.11", features = ["tls", "tls-roots", "transport", "channel"] } +tokio.workspace = true +tokio-tungstenite = { version = "0.23", features = ["native-tls"] } +toml = "0.8" +tower = "0.4" +v4-proto-rs.workspace = true + +# Telemetry +metrics = { version = "0.23", optional = true } + +[dev-dependencies] +metrics-exporter-tcp = "0.10.0" +serial_test = "3.1.1" +tracing = "0.1" +tracing-subscriber = "0.3" diff --git a/v4-client-rs/client/examples/account_endpoint.rs b/v4-client-rs/client/examples/account_endpoint.rs new file mode 100644 index 00000000..1c4d427d --- /dev/null +++ b/v4-client-rs/client/examples/account_endpoint.rs @@ -0,0 +1,205 @@ +mod support; + +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ + GetAggregationsOpts, GetFillsOpts, GetHistoricalPnlOpts, GetTradingRewardsOpts, + GetTransfersOpts, IndexerClient, ListOrdersOpts, ListPositionsOpts, MarketType, OrderSide, + PerpetualPositionStatus, Ticker, TradingRewardAggregationPeriod, +}; +use dydx_v4_rust::node::Wallet; +use support::constants::TEST_MNEMONIC; + +pub struct Rester { + indexer: IndexerClient, + wallet: Wallet, +} + +impl Rester { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { indexer, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let rester = Rester::connect().await?; + let account = rester.wallet.account_offline(0)?; + let indexer = rester.indexer; + + // Test values + let address = account.address(); + let subaccount = account.subaccount(0)?; + let parent_subaccount = subaccount.parent(); + + let subaccounts = indexer.accounts().get_subaccounts(address).await?; + tracing::info!("Subaccounts response: {:?}", subaccounts); + + let subaccount_resp = indexer.accounts().get_subaccount(&subaccount).await?; + tracing::info!("Subaccount response: {:?}", subaccount_resp); + + let asset_positions = indexer.accounts().get_asset_positions(&subaccount).await?; + tracing::info!("Asset positions response: {:?}", asset_positions); + + let pos_opts = ListPositionsOpts { + status: PerpetualPositionStatus::Closed.into(), + limit: Some(3), + ..Default::default() + }; + let positions = indexer + .accounts() + .list_positions(&subaccount, Some(pos_opts)) + .await?; + tracing::info!("Perpetual positions response: {:?}", positions); + + let trf_opts = GetTransfersOpts { + limit: Some(3), + ..Default::default() + }; + let transfers = indexer + .accounts() + .get_transfers(&subaccount, Some(trf_opts)) + .await?; + tracing::info!("Transfers response: {:?}", transfers); + + let ord_opts = ListOrdersOpts { + ticker: Some(Ticker::from("ETH-USD")), + limit: Some(3), + side: OrderSide::Buy.into(), + ..Default::default() + }; + let orders = indexer + .accounts() + .list_orders(&subaccount, Some(ord_opts)) + .await?; + tracing::info!("Orders response: {:?}", orders); + + let fill_opts = GetFillsOpts { + limit: Some(3), + market: Some(Ticker::from("ETH-USD")), + market_type: Some(MarketType::Perpetual), + ..Default::default() + }; + let fills = indexer + .accounts() + .get_fills(&subaccount, Some(fill_opts)) + .await?; + tracing::info!("Fills response: {:?}", fills); + + let pnl_opts = GetHistoricalPnlOpts { + limit: Some(3), + ..Default::default() + }; + let pnls = indexer + .accounts() + .get_historical_pnl(&subaccount, Some(pnl_opts)) + .await?; + tracing::info!("Historical PnLs response: {:?}", pnls); + + let rwds_opts = GetTradingRewardsOpts { + limit: Some(3), + ..Default::default() + }; + let rewards = indexer + .accounts() + .get_rewards(account.address(), Some(rwds_opts)) + .await?; + tracing::info!("Trading rewards response: {:?}", rewards); + + let aggr_opts = GetAggregationsOpts { + limit: Some(3), + ..Default::default() + }; + let aggregated = indexer + .accounts() + .get_rewards_aggregated( + address, + TradingRewardAggregationPeriod::Daily, + Some(aggr_opts), + ) + .await?; + tracing::info!("Trading rewards aggregated response: {:?}", aggregated); + + // Parent subaccount + let subaccount_resp = indexer + .accounts() + .get_parent_subaccount(&parent_subaccount) + .await?; + tracing::info!( + "Subaccount response (parent subaccount): {:?}", + subaccount_resp + ); + + let asset_positions = indexer + .accounts() + .get_parent_asset_positions(&parent_subaccount) + .await?; + tracing::info!( + "Asset positions response (parent subaccount): {:?}", + asset_positions + ); + + let pos_opts = ListPositionsOpts { + status: PerpetualPositionStatus::Closed.into(), + limit: Some(3), + ..Default::default() + }; + let positions = indexer + .accounts() + .list_parent_positions(&parent_subaccount, Some(pos_opts)) + .await?; + tracing::info!( + "Perpetual positions response (parent subaccount): {:?}", + positions + ); + + let trf_opts = GetTransfersOpts { + limit: Some(3), + ..Default::default() + }; + let transfers = indexer + .accounts() + .get_parent_transfers(&parent_subaccount, Some(trf_opts)) + .await?; + tracing::info!("Transfers response (parent subaccount): {:?}", transfers); + + let ord_opts = ListOrdersOpts { + ticker: Some(Ticker::from("ETH-USD")), + limit: Some(3), + side: OrderSide::Buy.into(), + ..Default::default() + }; + let orders = indexer + .accounts() + .list_parent_orders(&parent_subaccount, Some(ord_opts)) + .await?; + tracing::info!("Orders response (parent subaccount): {:?}", orders); + + let fill_opts = GetFillsOpts { + limit: Some(3), + market: Some(Ticker::from("ETH-USD")), + market_type: Some(MarketType::Perpetual), + ..Default::default() + }; + let fills = indexer + .accounts() + .get_parent_fills(&parent_subaccount, Some(fill_opts)) + .await?; + tracing::info!("Fills response (parent subaccount): {:?}", fills); + + let pnl_opts = GetHistoricalPnlOpts { + limit: Some(3), + ..Default::default() + }; + let pnls = indexer + .accounts() + .get_parent_historical_pnl(&parent_subaccount, Some(pnl_opts)) + .await?; + tracing::info!("Historical PnLs response (parent subaccount): {:?}", pnls); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/batch_cancel_orders.rs b/v4-client-rs/client/examples/batch_cancel_orders.rs new file mode 100644 index 00000000..4655f1d1 --- /dev/null +++ b/v4-client-rs/client/examples/batch_cancel_orders.rs @@ -0,0 +1,97 @@ +mod support; +use anyhow::{Error, Result}; +use bigdecimal::BigDecimal; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ClientId, IndexerClient}; +use dydx_v4_rust::node::{ + NodeClient, OrderBuilder, OrderSide, Wallet, SHORT_TERM_ORDER_MAXIMUM_LIFETIME, +}; +use rand::thread_rng; +use std::str::FromStr; +use support::constants::TEST_MNEMONIC; +use tokio::time::{sleep, Duration}; +use v4_proto_rs::dydxprotocol::clob::{order::TimeInForce, OrderBatch}; + +const N_ORDERS: usize = 6; + +const ETH_USD_TICKER: &str = "ETH-USD"; + +pub struct OrderPlacer { + client: NodeClient, + indexer: IndexerClient, + wallet: Wallet, +} + +impl OrderPlacer { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { + client, + indexer, + wallet, + }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut placer = OrderPlacer::connect().await?; + let mut account = placer.wallet.account(0, &mut placer.client).await?; + + let subaccount = account.subaccount(0)?; + + let market = placer + .indexer + .markets() + .get_perpetual_market(Ð_USD_TICKER.into()) + .await?; + + let builder = OrderBuilder::new(market.clone(), subaccount.clone()) + .market(OrderSide::Buy, BigDecimal::from_str("0.001")?) + .price(100) + .reduce_only(false) + .time_in_force(TimeInForce::Unspecified); + + let mut client_ids = Vec::new(); + // Push some orders + for _id in 0..N_ORDERS { + // Short term orders have a maximum validity of 20 blocks + let height = placer.client.get_latest_block_height().await?; + let order_builder = builder.clone().until(height.ahead(10)); + + let (order_id, order) = + order_builder.build(ClientId::random_with_rng(&mut thread_rng()))?; + let client_id = order_id.client_id; + client_ids.push(client_id); + let tx_hash = placer.client.place_order(&mut account, order).await?; + tracing::info!("Broadcast order ({client_id}) transaction hash: {tx_hash:?}"); + sleep(Duration::from_secs(2)).await; + } + + // Batch cancel + let batch = OrderBatch { + clob_pair_id: market.clob_pair_id.0, + client_ids, + }; + let til_height = placer + .client + .get_latest_block_height() + .await? + .ahead(SHORT_TERM_ORDER_MAXIMUM_LIFETIME); + let tx_hash = placer + .client + .batch_cancel_orders(&mut account, subaccount, vec![batch], til_height) + .await?; + tracing::info!( + "Broadcast cancel orders batch transaction hash: {:?}", + tx_hash + ); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/bot_basic_adder.rs b/v4-client-rs/client/examples/bot_basic_adder.rs new file mode 100644 index 00000000..4a09f95f --- /dev/null +++ b/v4-client-rs/client/examples/bot_basic_adder.rs @@ -0,0 +1,271 @@ +mod support; + +use anyhow::{anyhow as err, Error, Result}; +use bigdecimal::{BigDecimal, One, Signed}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ + AnyId, Feed, IndexerClient, ListPerpetualMarketsOpts, PerpetualMarket, Price, Quantity, + SubaccountsMessage, Ticker, TradesMessage, +}; +use dydx_v4_rust::node::{Account, NodeClient, OrderBuilder, OrderId, OrderSide, Wallet}; +use std::str::FromStr; +use support::constants::TEST_MNEMONIC; +use support::order_book::LiveOrderBook; +use tokio::select; + +pub struct Parameters { + ticker: Ticker, + depth: BigDecimal, + allowed_deviation: BigDecimal, + max_position: Quantity, +} + +pub struct Variables { + position: Quantity, + state: State, +} + +enum State { + Resting { price: Price, oid: OrderId }, + InFlightOrder, + Cancelled, +} + +pub struct BasicAdder { + client: NodeClient, + #[allow(dead_code)] // TODO remove after completion + indexer: IndexerClient, + #[allow(dead_code)] // TODO remove after completion + wallet: Wallet, + account: Account, + #[allow(dead_code)] // TODO remove after completion + market: PerpetualMarket, + generator: OrderBuilder, + trades_feed: Feed, + subaccounts_feed: Feed, + order_book: LiveOrderBook, + parameters: Parameters, + variables: Variables, +} + +impl BasicAdder { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let mut client = NodeClient::connect(config.node).await?; + let mut indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + let account = wallet.account(0, &mut client).await?; + let subaccount = account.subaccount(0)?; + + let ticker = Ticker::from("ETH-USD"); + let market = indexer + .markets() + .list_perpetual_markets(Some(ListPerpetualMarketsOpts { + ticker: Some(ticker.clone()), + limit: None, + })) + .await? + .remove(&ticker) + .ok_or_else(|| err!("{ticker} not found in markets query response"))?; + let generator = OrderBuilder::new(market.clone(), subaccount.clone()); + + let trades_feed = indexer.feed().trades(&ticker, false).await?; + let orders_feed = indexer.feed().orders(&ticker, false).await?; + let subaccounts_feed = indexer.feed().subaccounts(subaccount, false).await?; + let order_book = LiveOrderBook::new(orders_feed); + let depth: BigDecimal = BigDecimal::from_str("0.001")?; + let allowed_deviation: BigDecimal = BigDecimal::from_str("0.2")?; + let max_position: Quantity = "1.0".parse()?; + let parameters = Parameters { + ticker, + depth, + allowed_deviation, + max_position, + }; + let variables = Variables { + position: 0.into(), + state: State::Cancelled, + }; + Ok(Self { + client, + indexer, + wallet, + account, + market, + generator, + trades_feed, + subaccounts_feed, + order_book, + parameters, + variables, + }) + } + + async fn entrypoint(mut self) { + loop { + if let Err(err) = self.step().await { + tracing::error!("Bot update failed: {err}"); + } + } + } + + async fn step(&mut self) -> Result<()> { + select! { + msg = self.trades_feed.recv() => { + if let Some(msg) = msg { + self.handle_trades_message(msg).await?; + } + } + msg = self.subaccounts_feed.recv() => { + if let Some(msg) = msg { + self.handle_subaccounts_message(msg).await?; + } + } + _ = self.order_book.changed() => { + self.handle_order_book().await?; + } + } + Ok(()) + } + + async fn handle_trades_message(&mut self, msg: TradesMessage) -> Result<()> { + match msg { + TradesMessage::Initial(_upd) => {} + TradesMessage::Update(_upd) => {} + } + Ok(()) + } + + async fn handle_subaccounts_message(&mut self, msg: SubaccountsMessage) -> Result<()> { + match msg { + SubaccountsMessage::Initial(upd) => { + let positions = upd.contents.subaccount.open_perpetual_positions; + if let Some(position) = positions.get(&self.parameters.ticker) { + self.variables.position = position.size.clone(); + tracing::info!("Position: {}", self.variables.position); + } + } + SubaccountsMessage::Update(upd) => { + if let Some(ref positions) = upd + .contents + .first() + .ok_or_else(|| err!("Subaccount message does not have data!"))? + .perpetual_positions + { + let size = positions + .iter() + .find(|p| (p.market == self.parameters.ticker)) + .map(|p| p.size.clone()); + if let Some(size) = size { + self.variables.position = size; + tracing::info!("Position: {}", self.variables.position); + } + } + } + } + Ok(()) + } + + async fn handle_order_book(&mut self) -> Result<()> { + let spread = self + .order_book + .borrow() + .spread() + .map(|spread| (spread.bid.price.clone(), spread.ask.price.clone())); + + if let Some((bid, ask)) = spread { + let side = if self.variables.position.is_negative() { + OrderSide::Buy + } else { + OrderSide::Sell + }; + + let one = ::one(); + let (book_price, ideal_price) = match side { + OrderSide::Buy => (&bid, bid.clone() * (one + &self.parameters.depth)), + OrderSide::Sell => (&ask, ask.clone() * (one - &self.parameters.depth)), + other => panic!("Unhandled side {other:?}!"), + }; + let ideal_distance = &book_price.0 * &self.parameters.depth; + + match &self.variables.state { + State::Resting { price, oid } => { + let distance = (ideal_price.clone() - price.clone()).abs(); + if distance > &self.parameters.allowed_deviation * ideal_distance { + tracing::info!( + "Cancelling order due to deviation: ID:{} side:{:?} ideal_price:{} price:{}", + oid.client_id, side, ideal_price, price + ); + self.cancel_order(oid.clone()).await?; + self.variables.state = State::Cancelled; + } + } + State::InFlightOrder => { + tracing::info!("Not placing an order because in flight"); + } + State::Cancelled => { + let size = &self.parameters.max_position.0 - self.variables.position.abs(); + if &size * &ideal_price.0 < BigDecimal::from_str("3.0")? { + tracing::info!("Not placing an order because at position limit: size:{size} ideal_price:{ideal_price}"); + return Ok(()); + } + self.variables.state = State::InFlightOrder; + if let Ok(oid) = self + .place_limit_order(side, ideal_price.clone(), size) + .await + { + self.variables.state = State::Resting { + price: ideal_price, + oid, + }; + } else { + self.variables.state = State::Cancelled; + } + } + } + } + Ok(()) + } + + async fn place_limit_order( + &mut self, + side: OrderSide, + price: Price, + size: BigDecimal, + ) -> Result { + let current_block = self.client.get_latest_block_height().await?; + let (id, order) = self + .generator + .clone() + .limit(side, price, size) + .until(current_block.ahead(10)) + .build(AnyId)?; + let hash = self.client.place_order(&mut self.account, order).await?; + tracing::info!("Placing {side:?} order: {hash} (ID: {})", id.client_id); + + Ok(id) + } + + async fn cancel_order(&mut self, id: OrderId) -> Result<()> { + let current_block = self.client.get_latest_block_height().await?; + let until = current_block.ahead(10); + let c_id = id.client_id; + let hash = self + .client + .cancel_order(&mut self.account, id, until) + .await?; + tracing::info!("Cancelling order: {hash} (ID: {c_id})"); + + Ok(()) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let adder = BasicAdder::connect().await?; + adder.entrypoint().await; + Ok(()) +} diff --git a/v4-client-rs/client/examples/bot_trend_follower.rs b/v4-client-rs/client/examples/bot_trend_follower.rs new file mode 100644 index 00000000..9a1772bc --- /dev/null +++ b/v4-client-rs/client/examples/bot_trend_follower.rs @@ -0,0 +1,388 @@ +mod support; + +use anyhow::{anyhow as err, Error, Result}; +use chrono::{TimeDelta, Utc}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ + CandleResolution, ClientId, Feed, GetCandlesOpts, IndexerClient, ListPerpetualMarketsOpts, + PerpetualMarket, Price, Quantity, Subaccount, SubaccountsMessage, Ticker, TradesMessage, +}; +use dydx_v4_rust::node::{ + Account, NodeClient, OrderBuilder, OrderId, OrderSide, Wallet, + SHORT_TERM_ORDER_MAXIMUM_LIFETIME, +}; +use std::fmt; +use support::constants::TEST_MNEMONIC; +use support::order_book::LiveOrderBook; +use tokio::{ + select, + sync::mpsc, + time::{sleep, Duration}, +}; + +pub struct Parameters { + ticker: Ticker, + position_size: Quantity, + shorter_span: TimeDelta, + longer_span: TimeDelta, +} + +pub struct Variables { + position: Quantity, + shorter_channel: Channel, + longer_channel: Channel, + state: State, +} + +enum State { + Waiting, + InTrend(OrderSide), +} + +pub struct Channel { + high: Price, + low: Price, +} + +impl fmt::Display for Channel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "[{}, {}]", self.low, self.high)?; + Ok(()) + } +} + +pub struct TrendFollower { + client: NodeClient, + indexer: IndexerClient, + #[allow(dead_code)] // TODO remove after completion + wallet: Wallet, + account: Account, + subaccount: Subaccount, + market: PerpetualMarket, + generator: OrderBuilder, + trades_feed: Feed, + subaccounts_feed: Feed, + order_book: LiveOrderBook, + channel_rx: mpsc::UnboundedReceiver, + parameters: Parameters, + variables: Variables, +} + +impl TrendFollower { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let mut client = NodeClient::connect(config.node).await?; + let mut indexer = IndexerClient::new(config.indexer.clone()); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + let mut account = wallet.account(0, &mut client).await?; + let subaccount = account.subaccount(0)?; + + let ticker = Ticker::from("ETH-USD"); + let market = indexer + .markets() + .list_perpetual_markets(Some(ListPerpetualMarketsOpts { + ticker: Some(ticker.clone()), + limit: None, + })) + .await? + .remove(&ticker) + .ok_or_else(|| err!("{ticker} not found in markets query response"))?; + let generator = OrderBuilder::new(market.clone(), subaccount.clone()); + + // Close position + client + .close_position( + &mut account, + subaccount.clone(), + market.clone(), + None, + ClientId::random(), + ) + .await?; + + let trades_feed = indexer.feed().trades(&ticker, false).await?; + let orders_feed = indexer.feed().orders(&ticker, false).await?; + let subaccounts_feed = indexer + .feed() + .subaccounts(subaccount.clone(), false) + .await?; + let order_book = LiveOrderBook::new(orders_feed); + let position_size: Quantity = "0.001".parse()?; + let shorter_span = TimeDelta::minutes(10); + let longer_span = TimeDelta::minutes(30); + + let shorter_channel = calculate_channel(&indexer, &ticker, shorter_span).await?; + let longer_channel = calculate_channel(&indexer, &ticker, longer_span).await?; + + tracing::info!("Watching channel: {longer_channel}"); + + let (tx, channel_rx) = mpsc::unbounded_channel(); + tokio::spawn(Self::channel_fetcher( + tx, + IndexerClient::new(config.indexer), + ticker.clone(), + shorter_span, + )); + + let parameters = Parameters { + ticker, + position_size, + shorter_span, + longer_span, + }; + let variables = Variables { + position: 0.into(), + shorter_channel, + longer_channel, + state: State::Waiting, + }; + Ok(Self { + client, + indexer, + wallet, + account, + subaccount, + market, + generator, + trades_feed, + subaccounts_feed, + order_book, + channel_rx, + parameters, + variables, + }) + } + + async fn entrypoint(mut self) { + loop { + if let Err(err) = self.step().await { + tracing::error!("Bot update failed: {err}"); + } + } + } + + async fn step(&mut self) -> Result<()> { + select! { + msg = self.trades_feed.recv() => { + if let Some(msg) = msg { + self.handle_trades_message(msg).await?; + } + } + msg = self.subaccounts_feed.recv() => { + if let Some(msg) = msg { + self.handle_subaccounts_message(msg).await?; + } + } + channel = self.channel_rx.recv() => { + if let Some(channel) = channel { + self.variables.shorter_channel = channel; + } + } + _ = self.order_book.changed() => { + self.handle_order_book().await?; + } + } + Ok(()) + } + + async fn handle_trades_message(&mut self, msg: TradesMessage) -> Result<()> { + match msg { + TradesMessage::Initial(_upd) => {} + TradesMessage::Update(_upd) => {} + } + Ok(()) + } + + async fn handle_subaccounts_message(&mut self, msg: SubaccountsMessage) -> Result<()> { + match msg { + SubaccountsMessage::Initial(upd) => { + let positions = upd.contents.subaccount.open_perpetual_positions; + if let Some(position) = positions.get(&self.parameters.ticker) { + self.variables.position = position.size.clone(); + tracing::info!("Position: {}", self.variables.position); + } + } + SubaccountsMessage::Update(upd) => { + if let Some(ref positions) = upd + .contents + .first() + .ok_or_else(|| err!("Subaccount message does not have data!"))? + .perpetual_positions + { + let size = positions + .iter() + .find(|p| (p.market == self.parameters.ticker)) + .map(|p| p.size.clone()); + if let Some(size) = size { + self.variables.position = size; + tracing::info!("Position: {}", self.variables.position); + } + } + } + } + Ok(()) + } + + async fn handle_order_book(&mut self) -> Result<()> { + let spread = self + .order_book + .borrow() + .spread() + .map(|spread| (spread.bid.price.clone(), spread.ask.price.clone())); + + if let Some((bid, ask)) = spread { + let price = Price((bid.0 + ask.0) / 2); + match self.variables.state { + State::Waiting => { + if price > self.variables.longer_channel.high { + tracing::info!("Channel broken at {price}. Placing buy order."); + self.place_limit_order(OrderSide::Buy, price).await?; + self.variables.state = State::InTrend(OrderSide::Buy); + self.variables.shorter_channel = + self.get_channel(self.parameters.shorter_span).await?; + tracing::info!("In-trend channel: {}", self.variables.shorter_channel); + } else if price < self.variables.longer_channel.low { + tracing::info!("Channel broken at {price}. Placing sell order."); + self.place_limit_order(OrderSide::Sell, price).await?; + self.variables.state = State::InTrend(OrderSide::Sell); + self.variables.shorter_channel = + self.get_channel(self.parameters.shorter_span).await?; + tracing::info!("In-trend channel: {}", self.variables.shorter_channel); + } + } + State::InTrend(side) => { + let break_price = match side { + OrderSide::Buy => { + if price < self.variables.shorter_channel.low { + Some(price) + } else { + None + } + } + OrderSide::Sell => { + if price > self.variables.shorter_channel.high { + Some(price) + } else { + None + } + } + _ => None, + }; + if let Some(price) = break_price { + tracing::info!( + "Leaving trend at {price}, channel: {}. Closing position.", + self.variables.shorter_channel + ); + self.close_position().await?; + self.variables.state = State::Waiting; + self.variables.longer_channel = + self.get_channel(self.parameters.longer_span).await?; + tracing::info!("Watching channel {}.", self.variables.longer_channel); + } + } + } + } + Ok(()) + } + + async fn place_limit_order(&mut self, side: OrderSide, price: Price) -> Result { + let current_block = self.client.get_latest_block_height().await?; + let (id, order) = self + .generator + .clone() + .limit(side, price, self.parameters.position_size.clone()) + .until(current_block.ahead(SHORT_TERM_ORDER_MAXIMUM_LIFETIME)) + .build(ClientId::random())?; + let hash = self.client.place_order(&mut self.account, order).await?; + tracing::info!("Placing {side:?} order: {hash} (ID: {})", id.client_id); + + Ok(id) + } + + async fn _cancel_order(&mut self, id: OrderId) -> Result<()> { + let current_block = self.client.get_latest_block_height().await?; + let until = current_block.ahead(10); + let c_id = id.client_id; + let hash = self + .client + .cancel_order(&mut self.account, id, until) + .await?; + tracing::info!("Cancelling order: {hash} (ID: {c_id})"); + + Ok(()) + } + + async fn close_position(&mut self) -> Result<()> { + self.client + .close_position( + &mut self.account, + self.subaccount.clone(), + self.market.clone(), + None, + ClientId::random(), + ) + .await + .map(|_| ()) + .map_err(|e| err!("Failed closing position: {e}")) + } + + async fn get_channel(&self, span: TimeDelta) -> Result { + calculate_channel(&self.indexer, &self.parameters.ticker, span).await + } + + async fn channel_fetcher( + tx: mpsc::UnboundedSender, + indexer: IndexerClient, + ticker: Ticker, + span: TimeDelta, + ) -> Result { + loop { + sleep(Duration::from_secs(30)).await; + let result = calculate_channel(&indexer, &ticker, span).await?; + tx.send(result)?; + } + } +} + +async fn calculate_channel( + indexer: &IndexerClient, + ticker: &Ticker, + span: TimeDelta, +) -> Result { + let now = Utc::now(); + let opts = GetCandlesOpts { + from_iso: Some(now - span), + to_iso: Some(now), + limit: None, + }; + let candles = indexer + .markets() + .get_candles(ticker, CandleResolution::M1, Some(opts)) + .await?; + if candles.is_empty() { + return Err(err!("Candles response is empty")); + } + let high = candles + .iter() + .max_by_key(|c| c.high.clone()) + .unwrap() + .high + .clone(); + let low = candles + .iter() + .min_by_key(|c| c.low.clone()) + .unwrap() + .low + .clone(); + Ok(Channel { low, high }) +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let follower = TrendFollower::connect().await?; + follower.entrypoint().await; + Ok(()) +} diff --git a/v4-client-rs/client/examples/cancel_order.rs b/v4-client-rs/client/examples/cancel_order.rs new file mode 100644 index 00000000..318775bf --- /dev/null +++ b/v4-client-rs/client/examples/cancel_order.rs @@ -0,0 +1,71 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{AnyId, IndexerClient, Ticker}; +use dydx_v4_rust::node::{NodeClient, OrderBuilder, OrderSide, Wallet}; +use support::constants::TEST_MNEMONIC; +use tokio::time::{sleep, Duration}; +use v4_proto_rs::dydxprotocol::clob::order::TimeInForce; + +const ETH_USD_TICKER: &str = "ETH-USD"; + +pub struct OrderPlacer { + client: NodeClient, + indexer: IndexerClient, + wallet: Wallet, +} + +impl OrderPlacer { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { + client, + indexer, + wallet, + }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut placer = OrderPlacer::connect().await?; + let mut account = placer.wallet.account(0, &mut placer.client).await?; + let subaccount = account.subaccount(0)?; + + let ticker = Ticker(ETH_USD_TICKER.into()); + let market = placer + .indexer + .markets() + .get_perpetual_market(&ticker) + .await?; + + let current_block_height = placer.client.get_latest_block_height().await?; + let good_until = current_block_height.ahead(10); + + let (order_id, order) = OrderBuilder::new(market, subaccount) + .limit(OrderSide::Buy, 100, 3) + .reduce_only(false) + .time_in_force(TimeInForce::Unspecified) + .until(good_until.clone()) + .build(AnyId)?; + + let place_tx_hash = placer.client.place_order(&mut account, order).await?; + tracing::info!("Place order transaction hash: {:?}", place_tx_hash); + + sleep(Duration::from_secs(5)).await; + + // Cancel order + let cancel_tx_hash = placer + .client + .cancel_order(&mut account, order_id, good_until) + .await?; + tracing::info!("Cancel order transaction hash: {:?}", cancel_tx_hash); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/close_all_positions.rs b/v4-client-rs/client/examples/close_all_positions.rs new file mode 100644 index 00000000..a537a47e --- /dev/null +++ b/v4-client-rs/client/examples/close_all_positions.rs @@ -0,0 +1,90 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ + ClientId, IndexerClient, ListPositionsOpts, + PerpetualPositionResponseObject as PerpetualPosition, PerpetualPositionStatus, Subaccount, +}; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use support::constants::TEST_MNEMONIC; + +pub struct OrderPlacer { + client: NodeClient, + indexer: IndexerClient, + wallet: Wallet, +} + +impl OrderPlacer { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { + client, + indexer, + wallet, + }) + } +} + +async fn get_open_positions( + indexer: &IndexerClient, + subaccount: &Subaccount, +) -> Result> { + indexer + .accounts() + .list_positions( + subaccount, + Some(ListPositionsOpts { + status: Some(PerpetualPositionStatus::Open), + ..Default::default() + }), + ) + .await +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut placer = OrderPlacer::connect().await?; + let mut account = placer.wallet.account(0, &mut placer.client).await?; + + let subaccount = account.subaccount(0)?; + + let open_positions = get_open_positions(&placer.indexer, &subaccount).await?; + tracing::info!("Number of open positions: {}", open_positions.len()); + + for position in open_positions { + let market = placer + .indexer + .markets() + .get_perpetual_market(&position.market) + .await?; + let ticker = market.ticker.clone(); + + // Fully close the position, if open, matching best current market prices + let tx_hash = placer + .client + .close_position( + &mut account, + subaccount.clone(), + market, + None, + ClientId::random(), + ) + .await?; + tracing::info!("{ticker} position close transaction hash: {:?}", tx_hash); + } + + tracing::info!( + "Number of open positions: {}", + get_open_positions(&placer.indexer, &subaccount) + .await? + .len() + ); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/close_position.rs b/v4-client-rs/client/examples/close_position.rs new file mode 100644 index 00000000..dc4058f6 --- /dev/null +++ b/v4-client-rs/client/examples/close_position.rs @@ -0,0 +1,121 @@ +mod support; +use anyhow::{Error, Result}; +use bigdecimal::BigDecimal; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ + ClientId, IndexerClient, ListPositionsOpts, + PerpetualPositionResponseObject as PerpetualPosition, PerpetualPositionStatus, Subaccount, + Ticker, +}; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use std::str::FromStr; +use support::constants::TEST_MNEMONIC; +use tokio::time::{sleep, Duration}; + +const ETH_USD_TICKER: &str = "ETH-USD"; + +pub struct OrderPlacer { + client: NodeClient, + indexer: IndexerClient, + wallet: Wallet, +} + +impl OrderPlacer { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { + client, + indexer, + wallet, + }) + } +} + +async fn get_open_position( + indexer: &IndexerClient, + subaccount: &Subaccount, + ticker: &Ticker, +) -> Option { + indexer + .accounts() + .list_positions( + subaccount, + Some(ListPositionsOpts { + status: Some(PerpetualPositionStatus::Open), + ..Default::default() + }), + ) + .await + .ok() + .and_then(|positions| positions.into_iter().find(|pos| pos.market == *ticker)) +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut placer = OrderPlacer::connect().await?; + let mut account = placer.wallet.account(0, &mut placer.client).await?; + + let subaccount = account.subaccount(0)?; + let ticker = Ticker(ETH_USD_TICKER.into()); + + let market = placer + .indexer + .markets() + .get_perpetual_market(Ð_USD_TICKER.into()) + .await?; + + println!( + "Current open position: {:?}", + get_open_position(&placer.indexer, &subaccount, &ticker).await + ); + + // Reduce position by an amount, if open, matching best current market prices + let reduce_by = BigDecimal::from_str("0.0001")?; + let tx_hash = placer + .client + .close_position( + &mut account, + subaccount.clone(), + market.clone(), + Some(reduce_by), + ClientId::random(), + ) + .await?; + tracing::info!( + "Partial position close broadcast transaction hash: {:?}", + tx_hash + ); + + sleep(Duration::from_secs(3)).await; + + // Fully close the position, if open, matching best current market prices + let tx_hash = placer + .client + .close_position( + &mut account, + subaccount.clone(), + market, + None, + ClientId::random(), + ) + .await?; + tracing::info!( + "Fully position close broadcast transaction hash: {:?}", + tx_hash + ); + + sleep(Duration::from_secs(3)).await; + + println!( + "Current open position: {:?}", + get_open_position(&placer.indexer, &subaccount, &ticker).await + ); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/deposit.rs b/v4-client-rs/client/examples/deposit.rs new file mode 100644 index 00000000..86481d56 --- /dev/null +++ b/v4-client-rs/client/examples/deposit.rs @@ -0,0 +1,39 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use support::constants::TEST_MNEMONIC; + +pub struct Transferor { + client: NodeClient, + wallet: Wallet, +} + +impl Transferor { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { client, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut transferor = Transferor::connect().await?; + let mut account = transferor.wallet.account(0, &mut transferor.client).await?; + + let sender = account.address().clone(); + let recipient = account.subaccount(0)?; + + let tx_hash = transferor + .client + .deposit(&mut account, sender, recipient, 1) + .await?; + tracing::info!("Deposit transaction hash: {:?}", tx_hash); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/faucet_endpoint.rs b/v4-client-rs/client/examples/faucet_endpoint.rs new file mode 100644 index 00000000..4aa18b5a --- /dev/null +++ b/v4-client-rs/client/examples/faucet_endpoint.rs @@ -0,0 +1,54 @@ +mod support; +use anyhow::Result; + +#[cfg(feature = "faucet")] +mod faucet_endpoint_example { + use super::support::constants::TEST_MNEMONIC; + use anyhow::{anyhow as err, Error, Result}; + use dydx_v4_rust::config::ClientConfig; + use dydx_v4_rust::faucet::FaucetClient; + use dydx_v4_rust::indexer::Usdc; + use dydx_v4_rust::node::Wallet; + pub struct FaucetRequester { + faucet: FaucetClient, + wallet: Wallet, + } + + impl FaucetRequester { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let faucet = FaucetClient::new( + config + .faucet + .ok_or_else(|| err!("Config file must contain a [faucet] config!"))?, + ); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { faucet, wallet }) + } + } + + pub async fn run() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let requester = FaucetRequester::connect().await?; + let subaccount = requester.wallet.account_offline(0)?.subaccount(0)?; + + requester + .faucet + .fill(&subaccount, &Usdc::from(1000)) + .await?; + + Ok(()) + } +} + +#[cfg(feature = "faucet")] +#[tokio::main] +async fn main() -> Result<()> { + faucet_endpoint_example::run().await?; + Ok(()) +} + +#[cfg(not(feature = "faucet"))] +fn main() { + eprintln!("Feature 'faucet' must be enabled to run this example!") +} diff --git a/v4-client-rs/client/examples/live_price.rs b/v4-client-rs/client/examples/live_price.rs new file mode 100644 index 00000000..9611e637 --- /dev/null +++ b/v4-client-rs/client/examples/live_price.rs @@ -0,0 +1,83 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{Feed, IndexerClient, MarketsMessage, Ticker}; +use dydx_v4_rust::node::{OrderBuilder, Wallet}; +use support::constants::TEST_MNEMONIC; + +pub struct Feeder { + ticker: Ticker, + markets_feed: Feed, + ordergen: OrderBuilder, +} + +impl Feeder { + pub async fn connect() -> Result { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::DEBUG) + .try_init() + .map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let mut indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + let ticker = "ETH-USD".into(); + + let account = wallet.account_offline(0)?; + let subaccount = account.subaccount(0)?; + let market = indexer.markets().get_perpetual_market(&ticker).await?; + let ordergen = OrderBuilder::new(market, subaccount); + let markets_feed = indexer.feed().markets(false).await?; + + Ok(Self { + ticker, + markets_feed, + ordergen, + }) + } + + async fn entrypoint(mut self) { + loop { + self.step().await; + } + } + + async fn step(&mut self) { + if let Some(msg) = self.markets_feed.recv().await { + self.handle_markets_msg(msg).await; + } + } + + async fn handle_markets_msg(&mut self, msg: MarketsMessage) { + let price_opt = match msg { + MarketsMessage::Initial(mut init) => init + .contents + .markets + .remove(&self.ticker) + .and_then(|market| market.oracle_price), + MarketsMessage::Update(mut upd) => upd + .contents + .first_mut() + .and_then(|contents| { + contents + .oracle_prices + .as_mut() + .and_then(|prices| prices.remove(&self.ticker)) + }) + .map(|opm| opm.oracle_price), + }; + if let Some(price) = price_opt { + tracing::info!("Oracle price updated: {price:?}"); + // Since `OrderBuilder` uses the oracle price for slippage protection in Market orders, + // it is recommended to be updated if the same instance is re-used for different orders. + self.ordergen.update_market_price(price); + } + } +} +#[tokio::main] +async fn main() -> Result<()> { + let feeder = Feeder::connect().await?; + feeder.entrypoint().await; + Ok(()) +} diff --git a/v4-client-rs/client/examples/market_endpoint.rs b/v4-client-rs/client/examples/market_endpoint.rs new file mode 100644 index 00000000..907db033 --- /dev/null +++ b/v4-client-rs/client/examples/market_endpoint.rs @@ -0,0 +1,100 @@ +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ + CandleResolution, GetCandlesOpts, GetHistoricalFundingOpts, GetTradesOpts, IndexerClient, + ListPerpetualMarketsOpts, SparklineTimePeriod, Ticker, +}; + +const ETH_USD_TICKER: &str = "ETH-USD"; + +pub struct Rester { + indexer: IndexerClient, +} + +impl Rester { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let indexer = IndexerClient::new(config.indexer); + Ok(Self { indexer }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let rester = Rester::connect().await?; + let indexer = rester.indexer; + + // Test value + let ticker = Ticker::from(ETH_USD_TICKER); + + let markets_options = ListPerpetualMarketsOpts { + limit: Some(5), + ..Default::default() + }; + let markets = indexer + .markets() + .list_perpetual_markets(Some(markets_options)) + .await?; + tracing::info!("Markets response: {:?}", markets); + + let markets_options = ListPerpetualMarketsOpts { + ticker: Some(ticker.clone()), + ..Default::default() + }; + let market = indexer + .markets() + .list_perpetual_markets(Some(markets_options)) + .await?; + tracing::info!("Market ({ETH_USD_TICKER}) response: {:?}", market); + + let sparklines = indexer + .markets() + .get_sparklines(SparklineTimePeriod::SevenDays) + .await?; + tracing::info!( + "Sparklines ({ETH_USD_TICKER}) response: {:?}", + sparklines.get(&ticker) + ); + + let trades_opts = GetTradesOpts { + limit: Some(5), + ..Default::default() + }; + let trades = indexer + .markets() + .get_trades(&ticker, Some(trades_opts)) + .await?; + tracing::info!("Trades ({ETH_USD_TICKER}) response: {:?}", trades); + + let orderbook = indexer + .markets() + .get_perpetual_market_orderbook(&ticker) + .await?; + tracing::info!("Orderbook ({ETH_USD_TICKER}) response: {:?}", orderbook); + + let candles_opts = GetCandlesOpts { + limit: Some(3), + ..Default::default() + }; + let candles = indexer + .markets() + .get_candles(&ticker, CandleResolution::M1, Some(candles_opts)) + .await?; + tracing::info!("Candles ({ETH_USD_TICKER}) response: {:?}", candles); + + let fund_opts = GetHistoricalFundingOpts { + limit: Some(3), + ..Default::default() + }; + let funding = indexer + .markets() + .get_historical_funding(&ticker, Some(fund_opts)) + .await?; + tracing::info!( + "Historical funding ({ETH_USD_TICKER}) response: {:?}", + funding + ); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/noble_transfer.rs b/v4-client-rs/client/examples/noble_transfer.rs new file mode 100644 index 00000000..ad9edc12 --- /dev/null +++ b/v4-client-rs/client/examples/noble_transfer.rs @@ -0,0 +1,116 @@ +use anyhow::Result; + +#[cfg(feature = "noble")] +mod noble_transfer_example { + use super::*; + use anyhow::{anyhow as err, Error}; + use dydx_v4_rust::config::ClientConfig; + use dydx_v4_rust::indexer::Token; + use dydx_v4_rust::noble::{NobleClient, NobleUsdc}; + use dydx_v4_rust::node::{NodeClient, Wallet}; + use tokio::time::{sleep, Duration}; + + const TEST_MNEMONIC: &str = "mirror actor skill push coach wait confirm orchard lunch mobile athlete gossip awake miracle matter bus reopen team ladder lazy list timber render wait"; + const DYDX_SOURCE_CHANNEL: &str = "channel-0"; + const NOBLE_SOURCE_CHANNEL: &str = "channel-33"; + + pub struct Bridger { + wallet: Wallet, + noble: NobleClient, + node: NodeClient, + } + + impl Bridger { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let noble = NobleClient::connect( + config + .noble + .ok_or_else(|| err!("Config file must contain a [noble] config!"))?, + ) + .await?; + let node = NodeClient::connect(config.node).await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { + noble, + wallet, + node, + }) + } + } + + #[tokio::main] + pub async fn run() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let mut bridger = Bridger::connect().await?; + + let mut account_dydx = bridger.wallet.account_offline(0)?; + let mut account_noble = bridger.wallet.noble().account_offline(0)?; + + let address_dydx = account_dydx.address().clone(); + let address_noble = account_noble.address().clone(); + + tracing::info!( + "Before transfer balance: {:?}", + bridger + .noble + .get_account_balances(address_noble.clone()) + .await? + ); + let tx_hash = bridger + .node + .send_token_ibc( + &mut account_dydx, + address_dydx.clone(), + address_noble.clone(), + Token::Usdc(1.into()), + DYDX_SOURCE_CHANNEL.into(), + ) + .await?; + tracing::info!("dYdX -> Noble Tx hash: {tx_hash}"); + + sleep(Duration::from_secs(30)).await; + + tracing::info!( + "After transfer balance: {:?}", + bridger + .noble + .get_account_balances(address_noble.clone()) + .await? + ); + + let tx_hash = bridger + .noble + .send_token_ibc( + &mut account_noble, + address_noble.clone(), + address_dydx, + NobleUsdc::from(1), + NOBLE_SOURCE_CHANNEL.into(), + ) + .await?; + tracing::info!("Noble -> dYdX Tx hash: {tx_hash}"); + + sleep(Duration::from_secs(30)).await; + + tracing::info!( + "Undo transfer balance: {:?}", + bridger + .noble + .get_account_balances(address_noble.clone()) + .await? + ); + + Ok(()) + } +} + +#[cfg(feature = "noble")] +fn main() -> Result<()> { + noble_transfer_example::run() +} + +#[cfg(not(feature = "noble"))] +fn main() { + eprintln!("Feature 'noble' must be enabled to run this example!") +} diff --git a/v4-client-rs/client/examples/place_order_long_term.rs b/v4-client-rs/client/examples/place_order_long_term.rs new file mode 100644 index 00000000..1c18b3b8 --- /dev/null +++ b/v4-client-rs/client/examples/place_order_long_term.rs @@ -0,0 +1,65 @@ +mod support; +use anyhow::{Error, Result}; +use bigdecimal::BigDecimal; +use chrono::{TimeDelta, Utc}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ClientId, IndexerClient, Ticker}; +use dydx_v4_rust::node::{NodeClient, OrderBuilder, OrderSide, Wallet}; +use support::constants::TEST_MNEMONIC; +use v4_proto_rs::dydxprotocol::clob::order::TimeInForce; + +const ETH_USD_TICKER: &str = "ETH-USD"; + +pub struct OrderPlacer { + client: NodeClient, + indexer: IndexerClient, + wallet: Wallet, +} + +impl OrderPlacer { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { + client, + indexer, + wallet, + }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut placer = OrderPlacer::connect().await?; + let mut account = placer.wallet.account(0, &mut placer.client).await?; + + // Test values + let subaccount = account.subaccount(0)?; + let client_id = ClientId::random(); + let ticker = Ticker(ETH_USD_TICKER.into()); + let market = placer + .indexer + .markets() + .get_perpetual_market(&ticker) + .await?; + + let now = Utc::now(); + let time_in_force_seconds = now + TimeDelta::seconds(60); + + let (_id, order) = OrderBuilder::new(market, subaccount) + .limit(OrderSide::Buy, 123, BigDecimal::new(2.into(), 2)) + .time_in_force(TimeInForce::Unspecified) + .until(time_in_force_seconds) + .long_term() + .build(client_id)?; + + let tx_hash = placer.client.place_order(&mut account, order).await?; + tracing::info!("Broadcast transaction hash: {:?}", tx_hash); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/place_order_short_term.rs b/v4-client-rs/client/examples/place_order_short_term.rs new file mode 100644 index 00000000..f0e81f97 --- /dev/null +++ b/v4-client-rs/client/examples/place_order_short_term.rs @@ -0,0 +1,63 @@ +mod support; +use anyhow::{Error, Result}; +use bigdecimal::BigDecimal; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::IndexerClient; +use dydx_v4_rust::node::{NodeClient, OrderBuilder, OrderSide, Wallet}; +use std::str::FromStr; +use support::constants::TEST_MNEMONIC; +use v4_proto_rs::dydxprotocol::clob::order::TimeInForce; + +const ETH_USD_TICKER: &str = "ETH-USD"; + +pub struct OrderPlacer { + client: NodeClient, + indexer: IndexerClient, + wallet: Wallet, +} + +impl OrderPlacer { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { + client, + indexer, + wallet, + }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut placer = OrderPlacer::connect().await?; + let mut account = placer.wallet.account(0, &mut placer.client).await?; + let subaccount = account.subaccount(0)?; + + let market = placer + .indexer + .markets() + .get_perpetual_market(Ð_USD_TICKER.into()) + .await?; + + let current_block_height = placer.client.get_latest_block_height().await?; + + let size = BigDecimal::from_str("0.02")?; + let (_id, order) = OrderBuilder::new(market, subaccount) + .market(OrderSide::Buy, size) + .reduce_only(false) + .price(100) // market-order slippage protection price + .time_in_force(TimeInForce::Unspecified) + .until(current_block_height.ahead(10)) + .build(123456)?; + + let tx_hash = placer.client.place_order(&mut account, order).await?; + tracing::info!("Broadcast transaction hash: {:?}", tx_hash); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/send_token.rs b/v4-client-rs/client/examples/send_token.rs new file mode 100644 index 00000000..d6b0cc90 --- /dev/null +++ b/v4-client-rs/client/examples/send_token.rs @@ -0,0 +1,40 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::Token; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use support::constants::TEST_MNEMONIC; + +pub struct Transferor { + client: NodeClient, + wallet: Wallet, +} + +impl Transferor { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { client, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let mut transferor = Transferor::connect().await?; + + let mut account0 = transferor.wallet.account(0, &mut transferor.client).await?; + let sender = account0.address().clone(); + + let account1 = transferor.wallet.account(1, &mut transferor.client).await?; + let recipient = account1.address().clone(); + + let tx_hash = transferor + .client + .send_token(&mut account0, sender, recipient, Token::DydxTnt(1.into())) + .await?; + tracing::info!("Send token transaction hash: {:?}", tx_hash); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/sequencer.rs b/v4-client-rs/client/examples/sequencer.rs new file mode 100644 index 00000000..54c9a105 --- /dev/null +++ b/v4-client-rs/client/examples/sequencer.rs @@ -0,0 +1,162 @@ +mod support; +use anyhow::{Error, Result}; +use async_trait::async_trait; +use bigdecimal::BigDecimal; +use chrono::{TimeDelta, Utc}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ + Address, ClientId, IndexerClient, PerpetualMarket, Subaccount, Ticker, +}; +use dydx_v4_rust::node::{sequencer::*, Account, NodeClient, OrderBuilder, OrderSide, Wallet}; +use std::sync::Arc; +use support::constants::TEST_MNEMONIC; +use tokio::sync::Mutex; +use tokio::time::{sleep, Duration}; +use v4_proto_rs::dydxprotocol::clob::order::TimeInForce; + +const ETH_USD_TICKER: &str = "ETH-USD"; + +pub struct OrderPlacer { + client: NodeClient, + market: PerpetualMarket, + account: Account, + subaccount: Subaccount, +} + +impl OrderPlacer { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let mut client = NodeClient::connect(config.node).await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + let ticker = Ticker(ETH_USD_TICKER.into()); + let market = indexer.markets().get_perpetual_market(&ticker).await?; + let account = wallet.account(0, &mut client).await?; + let subaccount = account.subaccount(0)?; + Ok(Self { + client, + market, + account, + subaccount, + }) + } + + pub async fn place_order(&mut self) -> Result<()> { + let (_, order) = OrderBuilder::new(self.market.clone(), self.subaccount.clone()) + .limit(OrderSide::Buy, 123, BigDecimal::new(2.into(), 2)) + .time_in_force(TimeInForce::Unspecified) + .until(Utc::now() + TimeDelta::seconds(60)) + .long_term() + .build(ClientId::random())?; + + self.client + .place_order(&mut self.account, order) + .await + .map(drop) + .map_err(Error::msg) + } + + pub async fn fetch_sequence_number(&mut self) -> Result { + let (_, sequence_number) = self.client.query_address(self.account.address()).await?; + Ok(sequence_number) + } +} + +#[derive(Clone)] +pub struct CustomSequencer { + counter: Arc>, + // Or use an Atomic in this case +} + +impl CustomSequencer { + pub fn new(start_at: u64) -> Self { + Self { + counter: Arc::new(Mutex::new(start_at)), + } + } +} + +#[async_trait] +impl Sequencer for CustomSequencer { + async fn next_nonce(&mut self, _: &Address) -> Result { + let mut counter = self.counter.lock().await; + *counter += 1; + Ok(Nonce::Sequence(*counter - 1)) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut placer = OrderPlacer::connect().await?; + + // In Cosmos-based blockchains, like dYdX, an account sequence number is used as a nonce to + // prevent replay attacks. This affects only relevant requests: non-short term orders and transfer methods. + // This crate provides three different mechanisms to set the account number: + // + // - QueryingSequencer: a request is made to the network to fetch the correct sequence number + // to be used in the next transaction. This request is made for every relevant request + // previously to the transaction broadcast. + // - IncrementalSequencer: for each relevant request, a simple counter is increased. The + // starting value counter must be set manually, using for example the value returned by + // NodeClient::query_address(). + // - TimestamperSequencer: for each relevant request, the current timestamp (milliseconds) is + // used. + // + // The Sequencer trait can be used to provide custom sequencers to the NodeClient. + + // By default, NodeClient uses the QueryingSequencer. + placer.place_order().await?; + sleep(Duration::from_secs(4)).await; + tracing::info!( + "(After QueryingSequencer) Sequence number: {}", + placer.fetch_sequence_number().await? + ); + + // To use the incremental sequencer, create one with the to-be used addresses and initial + // counters. + let incremental_sequencer = IncrementalSequencer::new(&[( + placer.account.address().clone(), + placer.fetch_sequence_number().await?, + )]); + placer.client.with_sequencer(incremental_sequencer); + + placer.place_order().await?; + sleep(Duration::from_secs(4)).await; + tracing::info!( + "(After IncrementalSequencer) Sequence number: {}", + placer.fetch_sequence_number().await? + ); + + // And the timestamper sequencer, + let timestamper_sequencer = TimestamperSequencer; + placer.client.with_sequencer(timestamper_sequencer); + + placer.place_order().await?; + sleep(Duration::from_secs(4)).await; + tracing::info!( + "(After TimestamperSequencer) Sequence number: {}", + placer.fetch_sequence_number().await? + ); + + // To tackle other specific scenarios, a Sequencer can also be provided. + // Here we try to tackle a concurrent scenario where different trading bots running in the same + // process are utilizing the same account to issue long-term orders. + // Note: here, orders may reach the network out-of-order, resulting in a sequencing error. + let custom_sequencer = CustomSequencer::new(placer.fetch_sequence_number().await?); + let mut placer1 = OrderPlacer::connect().await?; + let mut placer2 = OrderPlacer::connect().await?; + placer1.client.with_sequencer(custom_sequencer.clone()); + placer2.client.with_sequencer(custom_sequencer.clone()); + + tokio::try_join!(placer1.place_order(), placer2.place_order())?; + sleep(Duration::from_secs(4)).await; + tracing::info!( + "(After CustomSequencer, two orders) Sequence number: {}", + placer.fetch_sequence_number().await? + ); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/support/constants.rs b/v4-client-rs/client/examples/support/constants.rs new file mode 100644 index 00000000..b9ba39cf --- /dev/null +++ b/v4-client-rs/client/examples/support/constants.rs @@ -0,0 +1 @@ +pub const TEST_MNEMONIC: &str = "mirror actor skill push coach wait confirm orchard lunch mobile athlete gossip awake miracle matter bus reopen team ladder lazy list timber render wait"; diff --git a/v4-client-rs/client/examples/support/mod.rs b/v4-client-rs/client/examples/support/mod.rs new file mode 100644 index 00000000..a59bb337 --- /dev/null +++ b/v4-client-rs/client/examples/support/mod.rs @@ -0,0 +1,6 @@ +pub mod constants; +#[allow(dead_code)] +pub mod order_book; +#[allow(dead_code)] +#[cfg(feature = "telemetry")] +pub mod telemetry; diff --git a/v4-client-rs/client/examples/support/order_book.rs b/v4-client-rs/client/examples/support/order_book.rs new file mode 100644 index 00000000..9cbea9ce --- /dev/null +++ b/v4-client-rs/client/examples/support/order_book.rs @@ -0,0 +1,149 @@ +use bigdecimal::Zero; +use derive_more::{Deref, DerefMut}; +use dydx_v4_rust::indexer::{ + Feed, OrderBookResponseObject, OrderbookResponsePriceLevel, OrdersMessage, Price, Quantity, +}; +use std::collections::BTreeMap; +use std::fmt; +use tokio::sync::watch; +use tokio::task::JoinHandle; + +#[derive(Deref, DerefMut)] +pub struct LiveOrderBook { + handle: JoinHandle<()>, + #[deref] + #[deref_mut] + rx: watch::Receiver, +} + +impl LiveOrderBook { + pub fn new(feed: Feed) -> Self { + let (tx, rx) = watch::channel(OrderBook::default()); + let task = LiveOrderBookTask { feed, tx }; + let handle = tokio::spawn(task.entrypoint()); + Self { handle, rx } + } +} + +impl Drop for LiveOrderBook { + fn drop(&mut self) { + self.handle.abort(); + } +} + +struct LiveOrderBookTask { + feed: Feed, + tx: watch::Sender, +} + +impl LiveOrderBookTask { + async fn entrypoint(mut self) { + while let Some(msg) = self.feed.recv().await { + match msg { + OrdersMessage::Initial(upd) => { + self.tx.send_modify(move |order_book| { + order_book.update_bids(upd.contents.bids); + order_book.update_asks(upd.contents.asks); + }); + } + OrdersMessage::Update(upd) => { + self.tx.send_modify(move |order_book| { + if let Some(bids) = upd.contents.bids { + order_book.update_bids(bids); + } + if let Some(asks) = upd.contents.asks { + order_book.update_asks(asks); + } + }); + } + } + } + } +} + +pub struct Quote<'a> { + pub price: &'a Price, + pub quantity: &'a Quantity, +} + +impl<'a> From<(&'a Price, &'a Quantity)> for Quote<'a> { + fn from((price, quantity): (&'a Price, &'a Quantity)) -> Self { + Self { price, quantity } + } +} + +pub struct Spread<'a> { + pub bid: Quote<'a>, + pub ask: Quote<'a>, +} + +#[derive(Default, Debug)] +pub struct OrderBook { + /// Prices you can sell + pub bids: BTreeMap, + /// Prices you can buy (how much the seller asks) + pub asks: BTreeMap, +} + +impl OrderBook { + pub fn bids(&self) -> impl Iterator { + self.bids.iter().map(Quote::from).rev() + } + + pub fn asks(&self) -> impl Iterator { + self.asks.iter().map(Quote::from) + } + + pub fn spread(&self) -> Option { + let bid = self.bids().next()?; + let ask = self.asks().next()?; + Some(Spread { bid, ask }) + } + + fn update(map: &mut BTreeMap, levels: Vec) { + for level in levels { + if level.size.is_zero() { + map.remove(&level.price); + } else { + map.insert(level.price, level.size); + } + } + } + + pub fn update_bids(&mut self, bids: Vec) { + Self::update(&mut self.bids, bids); + } + + pub fn update_asks(&mut self, asks: Vec) { + Self::update(&mut self.asks, asks); + } + + pub fn table(&self) -> OrderBookTable { + OrderBookTable { inner: self } + } +} + +impl From for OrderBook { + fn from(response: OrderBookResponseObject) -> Self { + let mut order_book = OrderBook::default(); + order_book.update_bids(response.bids); + order_book.update_asks(response.asks); + order_book + } +} + +pub struct OrderBookTable<'a> { + inner: &'a OrderBook, +} + +impl<'a> fmt::Display for OrderBookTable<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for (price, size) in &self.inner.bids { + writeln!(f, "BID: {} - {}", price, size)?; + } + for (price, size) in &self.inner.asks { + writeln!(f, "ASK: {} - {}", price, size)?; + } + Ok(()) + } +} diff --git a/v4-client-rs/client/examples/support/telemetry.rs b/v4-client-rs/client/examples/support/telemetry.rs new file mode 100644 index 00000000..5c6c2513 --- /dev/null +++ b/v4-client-rs/client/examples/support/telemetry.rs @@ -0,0 +1,37 @@ +use anyhow::{anyhow as err, Result}; +use metrics_exporter_tcp::Error; +use std::io::ErrorKind; + +/// Setup telemetry +pub async fn metrics_dashboard() -> Result<()> { + let default_port = 5000; + let alt_port = 5049; + // Try server on default port (5000) else try listen on 5049 + let port = match setup_server(default_port) { + Ok(()) => default_port, + Err(e) => match e { + Error::Io(e) => { + if matches!(e.kind(), ErrorKind::AddrInUse) { + setup_server(alt_port) + .map_err(|e| err!("Unable to setup telemetry server on port {default_port} or {alt_port}: {e}"))?; + alt_port + } else { + return Err(e.into()); + } + } + _ => return Err(e.into()), + }, + }; + + tracing::info!("== THIS EXAMPLE USES `https://github.com/metrics-rs/metrics/tree/main/metrics-observer` AS A METRICS EXPORTER (on TCP port {port}) =="); + + Ok(()) +} + +fn setup_server(port: u16) -> Result<(), Error> { + let addr: std::net::SocketAddr = format!("0.0.0.0:{port}") + .parse() + .expect("Failed parsing SocketAddr"); + let builder = metrics_exporter_tcp::TcpBuilder::new().listen_address(addr); + builder.install() +} diff --git a/v4-client-rs/client/examples/transfer.rs b/v4-client-rs/client/examples/transfer.rs new file mode 100644 index 00000000..5585ca17 --- /dev/null +++ b/v4-client-rs/client/examples/transfer.rs @@ -0,0 +1,37 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use support::constants::TEST_MNEMONIC; + +pub struct Transferor { + client: NodeClient, + wallet: Wallet, +} + +impl Transferor { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { client, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let mut transferor = Transferor::connect().await?; + let mut account = transferor.wallet.account(0, &mut transferor.client).await?; + + let sender = account.subaccount(0)?; + let recipient = account.subaccount(1)?; + + let tx_hash = transferor + .client + .transfer(&mut account, sender, recipient, 1) + .await?; + tracing::info!("Transfer transaction hash: {:?}", tx_hash); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/utility_endpoint.rs b/v4-client-rs/client/examples/utility_endpoint.rs new file mode 100644 index 00000000..1db64b36 --- /dev/null +++ b/v4-client-rs/client/examples/utility_endpoint.rs @@ -0,0 +1,42 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::IndexerClient; +use dydx_v4_rust::node::Wallet; +use support::constants::TEST_MNEMONIC; + +pub struct Rester { + indexer: IndexerClient, + wallet: Wallet, +} + +impl Rester { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { indexer, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let rester = Rester::connect().await?; + let account = rester.wallet.account_offline(0)?; + let indexer = rester.indexer; + + // Test values + let address = account.address(); + + let time = indexer.utility().get_time().await?; + tracing::info!("Time: {time:?}"); + + let height = indexer.utility().get_height().await?; + tracing::info!("Height: {height:?}"); + + let screen = indexer.utility().get_screen(address).await?; + tracing::info!("Screen for address {address}: {screen:?}"); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/validator_get.rs b/v4-client-rs/client/examples/validator_get.rs new file mode 100644 index 00000000..8f2b169e --- /dev/null +++ b/v4-client-rs/client/examples/validator_get.rs @@ -0,0 +1,120 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use support::constants::TEST_MNEMONIC; + +const ETH_USD_PAIR_ID: u32 = 1; + +pub struct Getter { + client: NodeClient, + wallet: Wallet, +} + +impl Getter { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { client, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let mut getter = Getter::connect().await?; + // Test values + let account = getter.wallet.account_offline(0)?; + let address = account.address().clone(); + let subaccount = account.subaccount(0)?; + + let account = getter.client.get_account(&address).await?; + tracing::info!("Get account: {account:?}"); + + let balances = getter.client.get_account_balances(&address).await?; + tracing::info!("Get account balances: {balances:?}"); + + let balance = getter + .client + .get_account_balance(&address, &"adv4tnt".parse()?) + .await?; + tracing::info!("Get account balance: {balance:?}"); + + let node_info = getter.client.get_node_info().await?; + let version = node_info + .application_version + .map(|v| format!("{} v{} @{}", v.name, v.version, &v.git_commit[0..7])); + tracing::info!( + "Get node info (node version): {}", + version.unwrap_or("unknown".into()) + ); + + let block = getter.client.get_latest_block().await?; + tracing::info!("Get latest block: {block:?}"); + + let height = getter.client.get_latest_block_height().await?; + tracing::info!("Get latest block height: {height:?}"); + + let stats = getter.client.get_user_stats(&address).await?; + tracing::info!("Get user stats: {stats:?}"); + + let validators = getter.client.get_all_validators(None).await?; + tracing::info!("Get all validators: {validators:?}"); + + let subaccount = getter.client.get_subaccount(&subaccount).await?; + tracing::info!("Get subaccount: {subaccount:?}"); + + let subaccounts = getter.client.get_subaccounts().await?; + tracing::info!("Get subaccounts: {subaccounts:?}"); + + let clob_pair = getter.client.get_clob_pair(ETH_USD_PAIR_ID).await?; + tracing::info!("Get clob pair: {clob_pair:?}"); + + let clob_pairs = getter.client.get_clob_pairs(None).await?; + tracing::info!("Get clob pairs: {clob_pairs:?}"); + + let price = getter.client.get_price(ETH_USD_PAIR_ID).await?; + tracing::info!("Get price: {price:?}"); + + let prices = getter.client.get_prices(None).await?; + tracing::info!("Get prices: {prices:?}"); + + let perpetual = getter.client.get_perpetual(ETH_USD_PAIR_ID).await?; + tracing::info!("Get perpetual: {perpetual:?}"); + + let perpetuals = getter.client.get_perpetuals(None).await?; + tracing::info!("Get perpetuals: {perpetuals:?}"); + + let equity_tier_limit = getter.client.get_equity_tier_limit_config().await?; + tracing::info!("Get equity tier limit config: {equity_tier_limit:?}"); + + let delegations = getter + .client + .get_delegator_delegations(address.clone(), None) + .await?; + tracing::info!("Get delegator delegations: {delegations:?}"); + + let unbonding_delegations = getter + .client + .get_delegator_unbonding_delegations(address.clone(), None) + .await?; + tracing::info!("Get delegator unbonding delegations: {unbonding_delegations:?}"); + + let bridge_messages = getter + .client + .get_delayed_complete_bridge_messages(address.clone()) + .await?; + tracing::info!("Get delayed complete bridge messages: {bridge_messages:?}"); + + let fee_tiers = getter.client.get_fee_tiers().await?; + tracing::info!("Get fee tiers: {fee_tiers:?}"); + + let user_fee_tier = getter.client.get_user_fee_tier(address.clone()).await?; + tracing::info!("Get user fee tier: {user_fee_tier:?}"); + + let reward_params = getter.client.get_rewards_params().await?; + tracing::info!("Get reward params: {reward_params:?}"); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/validator_post.rs b/v4-client-rs/client/examples/validator_post.rs new file mode 100644 index 00000000..88cc095a --- /dev/null +++ b/v4-client-rs/client/examples/validator_post.rs @@ -0,0 +1,82 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use rand::{thread_rng, Rng}; +use support::constants::TEST_MNEMONIC; +use tokio::time::{sleep, Duration}; +use v4_proto_rs::dydxprotocol::clob::{ + order::{ConditionType, GoodTilOneof, Side, TimeInForce}, + Order, OrderId, +}; +use v4_proto_rs::dydxprotocol::subaccounts::SubaccountId; + +const ETH_USD_PAIR_ID: u32 = 1; +const ETH_USD_QUANTUMS: u64 = 10_000_000; // calculated based on market +const SUBTICKS: u64 = 40_000_000_000; // calculated based on market and price +const ORDER_FLAGS_SHORT_TERM: u32 = 0; // for short term order is 0 +const N_ORDERS: usize = 6; + +pub struct OrderPlacer { + client: NodeClient, + wallet: Wallet, +} + +impl OrderPlacer { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { client, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let mut placer = OrderPlacer::connect().await?; + let mut account = placer.wallet.account(0, &mut placer.client).await?; + + let subaccount = SubaccountId { + owner: account.address().to_string(), + number: 0, + }; + let order_ids = (0..N_ORDERS) + .map(|_| OrderId { + subaccount_id: Some(subaccount.clone()), + client_id: thread_rng().gen_range(0..100_000_000), + order_flags: ORDER_FLAGS_SHORT_TERM, + clob_pair_id: ETH_USD_PAIR_ID, + }) + .collect::>(); + + // Push some orders + for id in &order_ids { + // Short term orders have a maximum validity of 20 blocks + let til_height = placer.client.get_latest_block_height().await?.ahead(10).0; + let order = Order { + order_id: Some(id.clone()), + side: Side::Sell.into(), + quantums: ETH_USD_QUANTUMS, + subticks: SUBTICKS, + time_in_force: TimeInForce::Unspecified.into(), + reduce_only: false, + client_metadata: 0u32, + condition_type: ConditionType::Unspecified.into(), + conditional_order_trigger_subticks: 0u64, + good_til_oneof: Some(GoodTilOneof::GoodTilBlock(til_height)), + }; + + let tx_hash = placer.client.place_order(&mut account, order).await?; + tracing::info!( + "Broadcast order ({}) transaction hash: {:?}", + id.client_id, + tx_hash + ); + sleep(Duration::from_secs(2)).await; + } + + Ok(()) +} diff --git a/v4-client-rs/client/examples/wallet.rs b/v4-client-rs/client/examples/wallet.rs new file mode 100644 index 00000000..2631d1c2 --- /dev/null +++ b/v4-client-rs/client/examples/wallet.rs @@ -0,0 +1,102 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +#[cfg(feature = "noble")] +use dydx_v4_rust::noble::NobleClient; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use support::constants::TEST_MNEMONIC; + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + + // Create a `Wallet` from a mnemonic + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + + // A `Wallet` is used to derive an `Account` used to sign transactions + let account0 = wallet.account_offline(0)?; + + // Multiple accounts can be derived from your mnemonic/master private key + let account1 = wallet.account_offline(1)?; + + // Some online attributes like an up-to-date sequence number are required for some + // order/transfer methods in `NodeClient`'s operations. + // This is usually not required if `NodeClient` is allowed to `manage_sequencing = true`. + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let mut client = NodeClient::connect(config.node).await?; + let account_upd = wallet.account(0, &mut client).await?; + + // An `Account` is mostly identified by its `Address` + let address = account0.address(); + tracing::info!( + "Account '0' address: {address} | sequence-number: {} | account-number (online ID): {}", + account0.sequence_number(), + account0.account_number() + ); + tracing::info!( + "Account '0' (synced-values) address: {} | sequence-number: {} | account-number (online ID): {}", + account_upd.address(), account_upd.sequence_number(), account_upd.account_number() + ); + tracing::info!("Account '1' address: {}", account1.address()); + + // dYdX uses the concept of "subaccounts" to help isolate funds and manage risk + let subaccount00 = account0.subaccount(0)?; + let subaccount01 = account0.subaccount(1)?; + + // Different subaccounts under the same account have the same address, being differentiated by + // their subaccount number + tracing::info!( + "Account '0' subaccount '0': address {} | number {}", + subaccount00.address, + subaccount00.number + ); + tracing::info!( + "Account '0' subaccount '1': address {} | number {}", + subaccount01.address, + subaccount01.number + ); + + // Subaccounts 0..=127 are parent subaccounts. These subaccounts can have multiple positions + // opened and all positions are cross-margined. + // Subaccounts 128..=128000 are child subaccounts. These subaccounts can only have one position + // open. + tracing::info!( + "Is subaccount '0' a parent subaccount? {:?}", + subaccount00.is_parent() + ); + tracing::info!( + "The parent subaccount of the subaccount '256' is: {:?}", + account0.subaccount(256)?.parent() + ); + tracing::info!( + "Is the parent of subaccount '256' equal to subaccount '0'? {:?}", + account0.subaccount(256)?.parent() == subaccount00 + ); + + #[cfg(feature = "noble")] + { + // To derive a Noble account (used to transfer USDC in and out of dYdX through Cosmos IBC) + // the same wallet instance as before can be used + let noble_account0 = wallet.noble().account_offline(0)?; + tracing::info!( + "Account '0' (Noble) address: {} | sequence-number: {}", + noble_account0.address(), + noble_account0.sequence_number() + ); + + // Noble accounts also use sequence numbers + if let Some(noble_config) = config.noble { + let mut noble = NobleClient::connect(noble_config).await?; + let noble_account_upd = wallet.noble().account(0, &mut noble).await?; + tracing::info!( + "Account '0' (Noble, synced-values) address: {} | sequence-number: {}", + noble_account_upd.address(), + noble_account_upd.sequence_number() + ); + } else { + tracing::warn!("A [noble] configuration is required for some parts of this example."); + } + } + + Ok(()) +} diff --git a/v4-client-rs/client/examples/websockets.rs b/v4-client-rs/client/examples/websockets.rs new file mode 100644 index 00000000..2851412e --- /dev/null +++ b/v4-client-rs/client/examples/websockets.rs @@ -0,0 +1,89 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::indexer::{ + BlockHeightMessage, CandleResolution, CandlesMessage, Feed, IndexerClient, MarketsMessage, + OrdersMessage, ParentSubaccountsMessage, SubaccountsMessage, Ticker, TradesMessage, +}; +use dydx_v4_rust::node::Wallet; +use support::constants::TEST_MNEMONIC; +use tokio::select; + +pub struct Feeder { + trades_feed: Feed, + orders_feed: Feed, + markets_feed: Feed, + subaccounts_feed: Feed, + parent_subaccounts_feed: Feed, + candles_feed: Feed, + height_feed: Feed, +} + +impl Feeder { + pub async fn connect() -> Result { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::DEBUG) + .try_init() + .map_err(Error::msg)?; + #[cfg(feature = "telemetry")] + support::telemetry::metrics_dashboard().await?; + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let mut indexer = IndexerClient::new(config.indexer); + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + + let account = wallet.account_offline(0)?; + let subaccount = account.subaccount(127)?; + + let ticker = Ticker::from("ETH-USD"); + let markets_feed = indexer.feed().markets(false).await?; + let trades_feed = indexer.feed().trades(&ticker, false).await?; + let orders_feed = indexer.feed().orders(&ticker, false).await?; + let candles_feed = indexer + .feed() + .candles(&ticker, CandleResolution::M1, false) + .await?; + let subaccounts_feed = indexer + .feed() + .subaccounts(subaccount.clone(), false) + .await?; + let parent_subaccounts_feed = indexer + .feed() + .parent_subaccounts(subaccount.parent(), false) + .await?; + let height_feed = indexer.feed().block_height(false).await?; + + Ok(Self { + trades_feed, + markets_feed, + orders_feed, + candles_feed, + subaccounts_feed, + parent_subaccounts_feed, + height_feed, + }) + } + + async fn step(&mut self) { + select! { + msg = self.trades_feed.recv() => if let Some(msg) = msg { tracing::info!("Received trades message: {msg:?}") }, + msg = self.orders_feed.recv() => if let Some(msg) = msg { tracing::info!("Received orders message: {msg:?}") }, + msg = self.markets_feed.recv() => if let Some(msg) = msg { tracing::info!("Received markets message: {msg:?}") }, + msg = self.subaccounts_feed.recv() => if let Some(msg) = msg { tracing::info!("Received subaccounts message: {msg:?}") }, + msg = self.parent_subaccounts_feed.recv() => if let Some(msg) = msg { tracing::info!("Received parent subaccounts message: {msg:?}") }, + msg = self.candles_feed.recv() => if let Some(msg) = msg { tracing::info!("Received candles message: {msg:?}") }, + msg = self.height_feed.recv() => if let Some(msg) = msg { tracing::info!("Received block height message: {msg:?}") }, + } + } + + async fn entrypoint(mut self) { + loop { + self.step().await; + } + } +} +#[tokio::main] +async fn main() -> Result<()> { + let feeder = Feeder::connect().await?; + feeder.entrypoint().await; + Ok(()) +} diff --git a/v4-client-rs/client/examples/withdraw.rs b/v4-client-rs/client/examples/withdraw.rs new file mode 100644 index 00000000..0e68d239 --- /dev/null +++ b/v4-client-rs/client/examples/withdraw.rs @@ -0,0 +1,37 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use support::constants::TEST_MNEMONIC; + +pub struct Transferor { + client: NodeClient, + wallet: Wallet, +} + +impl Transferor { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { client, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let mut transferor = Transferor::connect().await?; + let mut account = transferor.wallet.account(0, &mut transferor.client).await?; + + let recipient = account.address().clone(); + let sender = account.subaccount(0)?; + + let tx_hash = transferor + .client + .withdraw(&mut account, sender, recipient, 1) + .await?; + tracing::info!("Withdraw transaction hash: {:?}", tx_hash); + + Ok(()) +} diff --git a/v4-client-rs/client/examples/withdraw_other.rs b/v4-client-rs/client/examples/withdraw_other.rs new file mode 100644 index 00000000..0a0f6308 --- /dev/null +++ b/v4-client-rs/client/examples/withdraw_other.rs @@ -0,0 +1,75 @@ +mod support; +use anyhow::{Error, Result}; +use dydx_v4_rust::config::ClientConfig; +use dydx_v4_rust::node::{NodeClient, Wallet}; +use std::iter::once; +use support::constants::TEST_MNEMONIC; +use v4_proto_rs::{ + dydxprotocol::{sending::MsgWithdrawFromSubaccount, subaccounts::SubaccountId}, + ToAny, +}; + +pub struct Transferor { + client: NodeClient, + wallet: Wallet, +} + +impl Transferor { + pub async fn connect() -> Result { + let config = ClientConfig::from_file("client/tests/testnet.toml").await?; + let client = NodeClient::connect(config.node).await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + Ok(Self { client, wallet }) + } +} + +#[tokio::main] +async fn main() -> Result<()> { + tracing_subscriber::fmt().try_init().map_err(Error::msg)?; + let mut transferor = Transferor::connect().await?; + let account = transferor.wallet.account(0, &mut transferor.client).await?; + let mut client = transferor.client; + + let amount = 1_u64; + + let recipient = account.address().clone(); + let sender = SubaccountId { + owner: recipient.to_string(), + number: 0, + }; + + // Simulate transaction + let msg = MsgWithdrawFromSubaccount { + sender: Some(sender.clone()), + recipient: recipient.to_string(), + asset_id: 0, + quantums: amount, + } + .to_any(); + let simulated_tx = client + .builder + .build_transaction(&account, once(msg), None)?; + let simulation = client.simulate(&simulated_tx).await?; + tracing::info!("Simulation: {:?}", simulation); + + let fee = client.builder.calculate_fee(Some(simulation.gas_used))?; + tracing::info!("Total fee: {:?}", fee); + + let fee_amount: u64 = fee.amount[0].amount.try_into()?; + + // Issue transaction + let final_msg = MsgWithdrawFromSubaccount { + sender: Some(sender), + recipient: recipient.into(), + asset_id: 0, + quantums: amount - fee_amount, + } + .to_any(); + let final_tx = client + .builder + .build_transaction(&account, once(final_msg), Some(fee))?; + let tx_hash = client.broadcast_transaction(final_tx).await?; + tracing::info!("Withdraw transaction hash: {:?}", tx_hash); + + Ok(()) +} diff --git a/v4-client-rs/client/src/config.rs b/v4-client-rs/client/src/config.rs new file mode 100644 index 00000000..5ed33acf --- /dev/null +++ b/v4-client-rs/client/src/config.rs @@ -0,0 +1,33 @@ +#[cfg(feature = "faucet")] +use super::faucet::FaucetConfig; +#[cfg(feature = "noble")] +use super::noble::NobleConfig; +use super::{indexer::IndexerConfig, node::NodeConfig}; +use anyhow::Error; +use serde::Deserialize; +use std::path::Path; +use tokio::fs; + +/// Serves as a configuration wrapper over configurations for specific clients. +#[derive(Debug, Deserialize)] +pub struct ClientConfig { + /// Configuration for [`IndexerClient`](crate::indexer::IndexerClient) + pub indexer: IndexerConfig, + /// Configuration for [`NodeClient`](crate::node::NodeClient) + pub node: NodeConfig, + /// Configuration for [`FaucetClient`](crate::faucet::FaucetClient) + #[cfg(feature = "faucet")] + pub faucet: Option, + /// Configuration for [`NobleClient`](crate::noble::NobleClient) + #[cfg(feature = "noble")] + pub noble: Option, +} + +impl ClientConfig { + /// Creates a new `ClientConfig` instance from a TOML file at the given path + pub async fn from_file(path: impl AsRef) -> Result { + let toml_str = fs::read_to_string(path).await?; + let config = toml::from_str(&toml_str)?; + Ok(config) + } +} diff --git a/v4-client-rs/client/src/faucet.rs b/v4-client-rs/client/src/faucet.rs new file mode 100644 index 00000000..d75032f1 --- /dev/null +++ b/v4-client-rs/client/src/faucet.rs @@ -0,0 +1,84 @@ +pub use crate::indexer::{Address, Subaccount, SubaccountNumber, Usdc}; +use anyhow::{anyhow as err, Error}; +use bigdecimal::num_traits::ToPrimitive; +use reqwest::Client; +use serde::{Deserialize, Serialize}; + +/// Configuration for the Faucet client. +#[derive(Debug, Deserialize)] +pub struct FaucetConfig { + /// The base url of the faucet service. + pub endpoint: String, +} + +/// [Faucet](https://docs.dydx.exchange/infrastructure_providers-network/faucet) +/// serves as a source of funds for test purposes. +/// +/// See also [What is a Crypto Faucet?](https://dydx.exchange/crypto-learning/crypto-faucet). +/// +/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/faucet_endpoint.rs). +#[derive(Debug)] +pub struct FaucetClient { + config: FaucetConfig, + client: Client, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct FillReq<'t> { + address: &'t Address, + subaccount_number: &'t SubaccountNumber, + amount: u64, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct FillNativeReq<'t> { + address: &'t Address, +} + +impl FaucetClient { + /// Creates a new `FaucetClient` + pub fn new(config: FaucetConfig) -> Self { + Self { + config, + client: Client::default(), + } + } + + /// add USDC to a subaccount + pub async fn fill(&self, subaccount: &Subaccount, amount: &Usdc) -> Result<(), Error> { + const URI: &str = "/faucet/tokens"; + let url = format!("{}{URI}", self.config.endpoint); + let body = FillReq { + address: &subaccount.address, + subaccount_number: &subaccount.number, + amount: amount + .to_u64() + .ok_or_else(|| err!("Failed converting USDC amount to u64"))?, + }; + let _resp = self + .client + .post(url) + .json(&body) + .send() + .await? + .error_for_status()?; + Ok(()) + } + + /// add native dYdX testnet token to an address + pub async fn fill_native(&self, address: &Address) -> Result<(), Error> { + const URI: &str = "/faucet/native-token"; + let url = format!("{}{URI}", self.config.endpoint); + let body = FillNativeReq { address }; + let _resp = self + .client + .post(url) + .json(&body) + .send() + .await? + .error_for_status()?; + Ok(()) + } +} diff --git a/v4-client-rs/client/src/indexer/config.rs b/v4-client-rs/client/src/indexer/config.rs new file mode 100644 index 00000000..64248ac6 --- /dev/null +++ b/v4-client-rs/client/src/indexer/config.rs @@ -0,0 +1,13 @@ +pub use crate::indexer::{rest::RestConfig, sock::SockConfig}; +use serde::Deserialize; + +/// Indexer client configuration. +#[derive(Clone, Debug, Deserialize)] +pub struct IndexerConfig { + /// Indexer REST client configuration. + #[serde(alias = "http")] + pub rest: RestConfig, + /// Indexer Websocket client configuration. + #[serde(alias = "ws")] + pub sock: SockConfig, +} diff --git a/v4-client-rs/client/src/indexer/mod.rs b/v4-client-rs/client/src/indexer/mod.rs new file mode 100644 index 00000000..8f26c0ab --- /dev/null +++ b/v4-client-rs/client/src/indexer/mod.rs @@ -0,0 +1,51 @@ +/// Indexer client configuration. +pub mod config; +mod rest; +mod sock; +/// Tokens. +pub mod tokens; +/// Types for Indexer data. +pub mod types; + +pub use config::IndexerConfig; +pub use rest::*; +pub use sock::*; +pub use tokens::*; +pub use types::*; + +/// Indexer client. +#[derive(Debug)] +pub struct IndexerClient { + rest: RestClient, + sock: SockClient, +} + +impl IndexerClient { + /// Create a new Indexer client. + pub fn new(config: IndexerConfig) -> Self { + Self { + rest: RestClient::new(config.rest), + sock: SockClient::new(config.sock), + } + } + + /// Get accounts query dispatcher. + pub fn accounts(&self) -> rest::Accounts { + self.rest.accounts() + } + + /// Get markets query dispatcher. + pub fn markets(&self) -> rest::Markets { + self.rest.markets() + } + + /// Get utility query dispatcher. + pub fn utility(&self) -> rest::Utility { + self.rest.utility() + } + + /// Get feeds dispatcher. + pub fn feed(&mut self) -> Feeds<'_> { + Feeds::new(&mut self.sock) + } +} diff --git a/v4-client-rs/client/src/indexer/rest/client/accounts.rs b/v4-client-rs/client/src/indexer/rest/client/accounts.rs new file mode 100644 index 00000000..5da56641 --- /dev/null +++ b/v4-client-rs/client/src/indexer/rest/client/accounts.rs @@ -0,0 +1,520 @@ +use super::*; +use anyhow::Error; + +/// Accounts dispatcher. +/// +/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/account_endpoint.rs). +pub struct Accounts<'a> { + rest: &'a RestClient, +} + +impl<'a> Accounts<'a> { + /// Create a new accounts dispatcher. + pub(crate) fn new(rest: &'a RestClient) -> Self { + Self { rest } + } + + /// Query for all subaccounts infos. + /// + /// Compare with [`Self::get_subaccount`]. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getaddress). + pub async fn get_subaccounts(&self, address: &Address) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/addresses"; + let url = format!("{}{URI}/{address}", rest.config.endpoint); + let resp = rest + .client + .get(url) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(resp) + } + + /// Query for the subaccount, its current perpetual and asset positions, margin and collateral. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getsubaccount). + pub async fn get_subaccount( + &self, + subaccount: &Subaccount, + ) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/addresses"; + let address = &subaccount.address; + let number = &subaccount.number; + let url = format!( + "{}{URI}/{address}/subaccountNumber/{number}", + rest.config.endpoint + ); + let subaccount = rest + .client + .get(url) + .send() + .await? + .error_for_status()? + .json::() + .await? + .subaccount; + Ok(subaccount) + } + + /// Query for the parent subaccount, its child subaccounts, equity, collateral and margin. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getparentsubaccount). + pub async fn get_parent_subaccount( + &self, + subaccount: &ParentSubaccount, + ) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/addresses"; + let address = &subaccount.address; + let number = &subaccount.number; + let url = format!( + "{}{URI}/{address}/parentSubaccountNumber/{number}", + rest.config.endpoint + ); + let subaccount = rest + .client + .get(url) + .send() + .await? + .error_for_status()? + .json::() + .await? + .subaccount; + Ok(subaccount) + } + + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/close_all_positions.rs). + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listpositions). + pub async fn list_positions( + &self, + subaccount: &Subaccount, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/perpetualPositions"; + let url = format!("{}{URI}", rest.config.endpoint); + let query = Query { + address: &subaccount.address, + subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let positions = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .positions; + Ok(positions) + } + + /// List all positions of a parent subaccount. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listpositionsforparentsubaccount). + pub async fn list_parent_positions( + &self, + subaccount: &ParentSubaccount, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/perpetualPositions"; + let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint); + let query = QueryParent { + address: &subaccount.address, + parent_subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let positions = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .positions; + Ok(positions) + } + + /// Query for asset positions (size, buy/sell etc). + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getassetpositions). + pub async fn get_asset_positions( + &self, + subaccount: &Subaccount, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/assetPositions"; + let url = format!("{}{URI}", rest.config.endpoint); + let query = Query { + address: &subaccount.address, + subaccount_number: &subaccount.number, + }; + let positions = rest + .client + .get(url) + .query(&query) + .send() + .await? + .error_for_status()? + .json::() + .await? + .positions; + Ok(positions) + } + + /// Query for asset positions (size, buy/sell etc) for a parent subaccount. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getassetpositionsforparentsubaccount). + pub async fn get_parent_asset_positions( + &self, + subaccount: &ParentSubaccount, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/assetPositions"; + let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint); + let query = QueryParent { + address: &subaccount.address, + parent_subaccount_number: &subaccount.number, + }; + let positions = rest + .client + .get(url) + .query(&query) + .send() + .await? + .error_for_status()? + .json::() + .await? + .positions; + Ok(positions) + } + + /// Query for transfers between subaccounts. + /// + /// See also [`crate::node::NodeClient::transfer`]. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettransfers). + pub async fn get_transfers( + &self, + subaccount: &Subaccount, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/transfers"; + let url = format!("{}{URI}", rest.config.endpoint); + let query = Query { + address: &subaccount.address, + subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let transfers = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .transfers; + Ok(transfers) + } + + /// Query for transfers between subaccounts associated with a parent subaccount. + /// + /// See also [`crate::node::NodeClient::transfer`]. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettransfersforparentsubaccount). + pub async fn get_parent_transfers( + &self, + subaccount: &ParentSubaccount, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/transfers"; + let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint); + let query = QueryParent { + address: &subaccount.address, + parent_subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let transfers = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .transfers; + Ok(transfers) + } + + /// Query for orders filtered by order params. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listorders). + pub async fn list_orders( + &self, + subaccount: &Subaccount, + opts: Option, + ) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/orders"; + let url = format!("{}{URI}", rest.config.endpoint); + let query = Query { + address: &subaccount.address, + subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let orders = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(orders) + } + + /// Query for orders filtered by order params of a parent subaccount. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listordersforparentsubaccount). + pub async fn list_parent_orders( + &self, + subaccount: &ParentSubaccount, + opts: Option, + ) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/orders"; + let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint); + let query = QueryParent { + address: &subaccount.address, + parent_subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let orders = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(orders) + } + + /// Query for the order. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getorder). + pub async fn get_order(&self, order_id: &OrderId) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/orders"; + let url = format!("{}{URI}/{order_id}", rest.config.endpoint); + let order = rest + .client + .get(url) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(order) + } + + /// Query for fills (i.e. filled orders data). + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getfills). + pub async fn get_fills( + &self, + subaccount: &Subaccount, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/fills"; + let url = format!("{}{URI}", rest.config.endpoint); + let query = Query { + address: &subaccount.address, + subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let fills = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .fills; + Ok(fills) + } + + /// Query for fills (i.e. filled orders data) for a parent subaccount. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getfillsforparentsubaccount). + pub async fn get_parent_fills( + &self, + subaccount: &ParentSubaccount, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/fills"; + let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint); + let query = QueryParent { + address: &subaccount.address, + parent_subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let fills = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .fills; + Ok(fills) + } + + /// Query for profit and loss report for the specified time/block range. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gethistoricalpnl). + pub async fn get_historical_pnl( + &self, + subaccount: &Subaccount, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/historical-pnl"; + let url = format!("{}{URI}", rest.config.endpoint); + let query = Query { + address: &subaccount.address, + subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let pnls = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .historical_pnl; + Ok(pnls) + } + + /// Query for profit and loss report for the specified time/block range of a parent subaccount. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gethistoricalpnlforparentsubaccount). + pub async fn get_parent_historical_pnl( + &self, + subaccount: &ParentSubaccount, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/historical-pnl"; + let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint); + let query = QueryParent { + address: &subaccount.address, + parent_subaccount_number: &subaccount.number, + }; + let options = opts.unwrap_or_default(); + let pnls = rest + .client + .get(url) + .query(&query) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .historical_pnl; + Ok(pnls) + } + + /// Get trader's rewards. + /// + /// See also [Trading Rewards](https://docs.dydx.exchange/concepts-trading/rewards_fees_and_parameters#trading-rewards). + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettradingrewards). + pub async fn get_rewards( + &self, + address: &Address, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/historicalBlockTradingRewards"; + let url = format!("{}{URI}/{address}", rest.config.endpoint); + let options = opts.unwrap_or_default(); + let rewards = rest + .client + .get(url) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .rewards; + Ok(rewards) + } + + /// Get trader's rewards aggregation. + /// + /// See also [`Self::get_rewards`]. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getaggregations). + pub async fn get_rewards_aggregated( + &self, + address: &Address, + period: TradingRewardAggregationPeriod, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/historicalTradingRewardAggregations"; + let url = format!("{}{URI}/{address}", rest.config.endpoint); + let options = opts.unwrap_or_default(); + let aggregated = rest + .client + .get(url) + .query(&[("period", &period)]) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .rewards; + Ok(aggregated) + } +} diff --git a/v4-client-rs/client/src/indexer/rest/client/markets.rs b/v4-client-rs/client/src/indexer/rest/client/markets.rs new file mode 100644 index 00000000..eb45f526 --- /dev/null +++ b/v4-client-rs/client/src/indexer/rest/client/markets.rs @@ -0,0 +1,174 @@ +use super::*; +use anyhow::{anyhow as err, Error}; +use std::collections::HashMap; + +/// Markets dispatcher. +/// +/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/market_endpoint.rs). +pub struct Markets<'a> { + rest: &'a RestClient, +} + +impl<'a> Markets<'a> { + /// Create a new markets dispatcher. + pub(crate) fn new(rest: &'a RestClient) -> Self { + Self { rest } + } + + /// Query for perpetual markets data. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listperpetualmarkets). + pub async fn list_perpetual_markets( + &self, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/perpetualMarkets"; + let url = format!("{}{URI}", rest.config.endpoint); + let options = opts.unwrap_or_default(); + let markets = rest + .client + .get(url) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .markets; + Ok(markets) + } + + /// Query for the perpetual market. + pub async fn get_perpetual_market(&self, ticker: &Ticker) -> Result { + let mut markets = self + .list_perpetual_markets(Some(ListPerpetualMarketsOpts { + limit: Some(1), + ticker: Some(ticker.clone()), + })) + .await?; + markets + .remove(ticker) + .ok_or_else(|| err!("Market ticker not found in list Markets response")) + } + + /// Query for bids-asks for the perpetual market. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getperpetualmarket). + pub async fn get_perpetual_market_orderbook( + &self, + ticker: &Ticker, + ) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/orderbooks/perpetualMarket"; + let url = format!("{}{URI}/{ticker}", rest.config.endpoint); + let orderbook = rest + .client + .get(url) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(orderbook) + } + + /// Query for trades. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettrades). + pub async fn get_trades( + &self, + ticker: &Ticker, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/trades/perpetualMarket"; + let url = format!("{}{URI}/{ticker}", rest.config.endpoint); + let options = opts.unwrap_or_default(); + let trades = rest + .client + .get(url) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .trades; + Ok(trades) + } + + /// Query for [candles](https://dydx.exchange/crypto-learning/candlestick-patterns). + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getcandles). + pub async fn get_candles( + &self, + ticker: &Ticker, + res: CandleResolution, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/candles/perpetualMarkets"; + let url = format!("{}{URI}/{ticker}", rest.config.endpoint); + let options = opts.unwrap_or_default(); + let candles = rest + .client + .get(url) + .query(&[("resolution", &res)]) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .candles; + Ok(candles) + } + + /// Query for funding till time/block specified. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gethistoricalfunding). + pub async fn get_historical_funding( + &self, + ticker: &Ticker, + opts: Option, + ) -> Result, Error> { + let rest = &self.rest; + const URI: &str = "/v4/historicalFunding"; + let url = format!("{}{URI}/{ticker}", rest.config.endpoint); + let options = opts.unwrap_or_default(); + let funding = rest + .client + .get(url) + .query(&options) + .send() + .await? + .error_for_status()? + .json::() + .await? + .historical_funding; + Ok(funding) + } + + /// Query for [sparklines](https://en.wikipedia.org/wiki/Sparkline). + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#get). + pub async fn get_sparklines( + &self, + period: SparklineTimePeriod, + ) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/sparklines"; + let url = format!("{}{URI}", rest.config.endpoint); + let sparklines = rest + .client + .get(url) + .query(&[("timePeriod", &period)]) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(sparklines) + } +} diff --git a/v4-client-rs/client/src/indexer/rest/client/mod.rs b/v4-client-rs/client/src/indexer/rest/client/mod.rs new file mode 100644 index 00000000..9b27af44 --- /dev/null +++ b/v4-client-rs/client/src/indexer/rest/client/mod.rs @@ -0,0 +1,58 @@ +pub mod accounts; +pub mod markets; +pub mod utility; + +use super::config::RestConfig; +use super::options::*; +use crate::indexer::{rest::types::*, types::*}; +use accounts::Accounts; +use markets::Markets; +use reqwest::Client; +use serde::Serialize; +use utility::Utility; + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct Query<'t> { + address: &'t Address, + subaccount_number: &'t SubaccountNumber, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct QueryParent<'t> { + address: &'t Address, + parent_subaccount_number: &'t ParentSubaccountNumber, +} + +/// REST client to Indexer. +#[derive(Debug)] +pub(crate) struct RestClient { + config: RestConfig, + client: Client, +} + +impl RestClient { + /// Create a new Indexer REST client. + pub(crate) fn new(config: RestConfig) -> Self { + Self { + config, + client: Client::default(), + } + } + + /// Get accounts query dispatcher. + pub(crate) fn accounts(&self) -> Accounts<'_> { + Accounts::new(self) + } + + /// Get markets query dispatcher. + pub(crate) fn markets(&self) -> Markets<'_> { + Markets::new(self) + } + + /// Get utility query dispatcher. + pub(crate) fn utility(&self) -> Utility<'_> { + Utility::new(self) + } +} diff --git a/v4-client-rs/client/src/indexer/rest/client/utility.rs b/v4-client-rs/client/src/indexer/rest/client/utility.rs new file mode 100644 index 00000000..28a51e18 --- /dev/null +++ b/v4-client-rs/client/src/indexer/rest/client/utility.rs @@ -0,0 +1,71 @@ +use super::*; +use anyhow::Error; + +/// Other data dispatcher. +/// +/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/utility_endpoint.rs). +pub struct Utility<'a> { + rest: &'a RestClient, +} + +impl<'a> Utility<'a> { + /// Create a new utility dispatcher. + pub(crate) fn new(rest: &'a RestClient) -> Self { + Self { rest } + } + + /// Current server time (UTC) of Indexer. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettime). + pub async fn get_time(&self) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/time"; + let url = format!("{}{URI}", rest.config.endpoint); + let resp = rest + .client + .get(url) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(resp) + } + + /// Current block height and block time (UTC) parsed by Indexer. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getheight). + pub async fn get_height(&self) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/height"; + let url = format!("{}{URI}", rest.config.endpoint); + let resp = rest + .client + .get(url) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(resp) + } + + /// Query for screening results (compliance) of the address. + /// + /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#screen). + pub async fn get_screen(&self, query: &Address) -> Result { + let rest = &self.rest; + const URI: &str = "/v4/screen"; + let url = format!("{}{URI}", rest.config.endpoint); + let resp = rest + .client + .get(url) + .query(&[("address", query)]) + .send() + .await? + .error_for_status()? + .json() + .await?; + Ok(resp) + } +} diff --git a/v4-client-rs/client/src/indexer/rest/config.rs b/v4-client-rs/client/src/indexer/rest/config.rs new file mode 100644 index 00000000..7d7d7e88 --- /dev/null +++ b/v4-client-rs/client/src/indexer/rest/config.rs @@ -0,0 +1,10 @@ +use serde::Deserialize; + +/// REST Indexer client configuration. +#[derive(Clone, Debug, Deserialize)] +pub struct RestConfig { + /// REST endpoint. + /// + /// You can select REST endpoints from [the list](https://docs.dydx.exchange/infrastructure_providers-network/resources#indexer-endpoints). + pub endpoint: String, +} diff --git a/v4-client-rs/client/src/indexer/rest/mod.rs b/v4-client-rs/client/src/indexer/rest/mod.rs new file mode 100644 index 00000000..dc71ef71 --- /dev/null +++ b/v4-client-rs/client/src/indexer/rest/mod.rs @@ -0,0 +1,13 @@ +mod client; +mod config; +mod options; +mod types; + +pub(crate) use client::RestClient; +pub use config::RestConfig; +pub use options::*; +pub use types::*; + +pub use client::accounts::Accounts; +pub use client::markets::Markets; +pub use client::utility::Utility; diff --git a/v4-client-rs/client/src/indexer/rest/options.rs b/v4-client-rs/client/src/indexer/rest/options.rs new file mode 100644 index 00000000..a9decada --- /dev/null +++ b/v4-client-rs/client/src/indexer/rest/options.rs @@ -0,0 +1,157 @@ +use crate::indexer::types::*; +use chrono::{DateTime, Utc}; +use serde::Serialize; + +/// Filter options for perpetual markets. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct ListPerpetualMarketsOpts { + /// Limit. + pub limit: Option, + /// Ticker. + pub ticker: Option, +} + +/// Filter options for trades. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct GetTradesOpts { + /// Limit. + pub limit: Option, + /// Block height. + pub created_before_or_at_height: Option, + /// Time. + pub created_before_or_at: Option>, +} + +/// Filter options for candles. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct GetCandlesOpts { + /// Limit. + pub limit: Option, + /// Time. + #[serde(rename = "fromISO")] + pub from_iso: Option>, + /// Time. + #[serde(rename = "toISO")] + pub to_iso: Option>, +} + +/// Filter options for fundings. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct GetHistoricalFundingOpts { + /// Limit. + pub limit: Option, + /// Block height. + pub effective_before_or_at_height: Option, + /// Time. + pub effective_before_or_at: Option>, +} + +/// Filter options for positions. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct ListPositionsOpts { + /// Perpetual postion status. + pub status: Option, + /// Limit. + pub limit: Option, + /// Block height. + pub created_before_or_at_height: Option, + /// Time. + pub created_before_or_at: Option>, +} + +/// Filter options for transfers. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct GetTransfersOpts { + /// Limit. + pub limit: Option, + /// Block height. + pub created_before_or_at_height: Option, + /// Time. + pub created_before_or_at: Option>, +} + +/// Filter options for orders. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct ListOrdersOpts { + /// Limit. + pub limit: Option, + /// Ticker. + pub ticker: Option, + /// Side (buy/sell). + pub side: Option, + // TODO: Arrays is supported + /// Order status. + pub status: Option, + /// Order type. + #[serde(rename = "type")] + pub order_type: Option, + /// Block height. + pub good_til_block_before_or_at: Option, + /// Time. + pub good_til_block_time_before_or_at: Option>, + /// Whether to return the latest orders. + pub return_latest_orders: Option, +} + +/// Filter options for fills. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct GetFillsOpts { + /// Limit. + pub limit: Option, + /// Block height. + pub created_before_or_at_height: Option, + /// Time. + pub created_before_or_at: Option>, + /// Ticker. + pub market: Option, + /// Market type. + pub market_type: Option, +} + +/// Filter options for profit and loss. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct GetHistoricalPnlOpts { + /// Limit. + pub limit: Option, + /// Block height. + pub created_before_or_at_height: Option, + /// Time. + pub created_before_or_at: Option>, + /// Block height. + pub created_on_or_after_height: Option, + /// Time. + pub created_on_or_after: Option>, +} + +/// Filter options for rewards. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct GetTradingRewardsOpts { + /// Limit. + pub limit: Option, + /// Block height. + pub starting_before_or_at_height: Option, + /// Time. + pub starting_before_or_at: Option>, +} + +/// Filter options for aggregated rewards. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct GetAggregationsOpts { + /// Limit. + pub limit: Option, + /// Block height. + pub starting_before_or_at_height: Option, + /// Time. + pub starting_before_or_at: Option>, +} diff --git a/v4-client-rs/client/src/indexer/rest/types.rs b/v4-client-rs/client/src/indexer/rest/types.rs new file mode 100644 index 00000000..8c830e00 --- /dev/null +++ b/v4-client-rs/client/src/indexer/rest/types.rs @@ -0,0 +1,303 @@ +use bigdecimal::BigDecimal; +use chrono::{DateTime, Utc}; +use derive_more::{Display, From}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +use crate::indexer::types::*; + +/// REST Indexer response error. +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ResponseError { + /// Errors. + pub errors: Vec, +} + +/// REST Indexer error message. +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ErrorMsg { + /// Message. + pub msg: String, + /// Parameter. + pub param: String, + /// Location. + pub location: String, +} + +/// Profit and loss tick id. +#[derive(Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct PnlTickId(pub String); + +/// Transfer id. +#[derive( + Serialize, Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +pub struct TransferId(pub String); + +/// Period to aggregate rewards over. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum TradingRewardAggregationPeriod { + /// Day. + Daily, + /// Week. + Weekly, + /// Month. + Monthly, +} + +/// Sparkline time period. +#[derive(Serialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum SparklineTimePeriod { + /// 1 day. + OneDay, + /// 7 days. + SevenDays, +} + +/// Fundings response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct HistoricalFundingResponse { + /// List of fundings + pub historical_funding: Vec, +} + +/// Funding response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct HistoricalFundingResponseObject { + /// Market ticker. + pub ticker: Ticker, + /// Time. + pub effective_at: DateTime, + /// Block height. + pub effective_at_height: Height, + /// Price. + pub price: Price, + /// Funding rate. + pub rate: BigDecimal, +} + +/// Sparkline response. +pub type SparklineResponseObject = HashMap>; + +/// Indexer server time. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TimeResponse { + /// Time (UTC). + pub iso: DateTime, + /// Unix epoch. + pub epoch: f64, +} + +/// Compliance response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct ComplianceResponse { + /// Whether the address is restricted. + pub restricted: bool, + /// Reason. + pub reason: Option, +} + +/// Address response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct AddressResponse { + /// List of all subaccounts. + pub subaccounts: Vec, + /// Total rewards. + pub total_trading_rewards: BigDecimal, +} + +/// Subaccount response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct SubaccountResponse { + /// Subaccount. + pub subaccount: SubaccountResponseObject, +} + +/// Parent subaccount response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct ParentSubaccountResponse { + /// Subaccount. + pub subaccount: ParentSubaccountResponseObject, +} + +/// Asset positions response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct AssetPositionResponse { + /// Asset positions. + pub positions: Vec, +} + +/// Perpetual positions response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct PerpetualPositionResponse { + /// Perpetual positions. + pub positions: Vec, +} + +/// Transfers response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TransferResponse { + /// List of transfers. + pub transfers: Vec, +} + +/// Transfer response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TransferResponseObject { + /// Transfer id. + pub id: TransferId, + /// Time (UTC). + pub created_at: DateTime, + /// Block height. + pub created_at_height: Height, + /// Sender of transfer. + pub sender: Account, + /// Recipient of transfer. + pub recipient: Account, + /// Size of transfer. + pub size: BigDecimal, + /// Token symbol. + pub symbol: Symbol, + /// Transfer transaction hash. + pub transaction_hash: String, + /// Transfer type. + #[serde(rename = "type")] + pub transfer_type: TransferType, +} + +/// Orders list response. +pub type ListOrdersResponse = Vec; + +/// Fills response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct FillResponse { + /// List of fills. + pub fills: Vec, +} + +/// Fill response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct FillResponseObject { + /// Fill id. + pub id: FillId, + /// Side (buy/sell). + pub side: OrderSide, + /// Size. + pub size: BigDecimal, + /// Fee. + pub fee: BigDecimal, + /// Fill type. + #[serde(rename = "type")] + pub fill_type: FillType, + /// Liquidity. + pub liquidity: Liquidity, + /// Market ticker. + pub market: Ticker, + /// Market type. + pub market_type: MarketType, + /// Price. + pub price: Price, + /// Subaccount number. + pub subaccount_number: SubaccountNumber, + /// Block height. + pub created_at_height: Height, + /// Time (UTC). + pub created_at: DateTime, + /// Client metadata. + pub client_metadata: Option, + /// Order id. + pub order_id: Option, +} + +/// Profit and loss reports. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct HistoricalPnlResponse { + /// List of PnL reports. + pub historical_pnl: Vec, +} + +/// Profit and loss report. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct PnlTicksResponseObject { + /// Report id. + pub id: PnlTickId, + /// Subaccount id. + pub subaccount_id: SubaccountId, + /// Block height. + pub block_height: Height, + /// Time (UTC). + pub block_time: DateTime, + /// Time (UTC). + pub created_at: DateTime, + /// Equity. + pub equity: BigDecimal, + /// Total PnL. + pub total_pnl: BigDecimal, + /// Net transfers. + pub net_transfers: BigDecimal, +} + +/// Trading rewards reports. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct HistoricalBlockTradingRewardsResponse { + /// List of reports. + pub rewards: Vec, +} + +/// Trading rewards report. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct HistoricalBlockTradingReward { + /// Trading reward amount. + pub trading_reward: BigDecimal, + /// Block height. + pub created_at_height: Height, + /// Time (UTC). + pub created_at: DateTime, +} + +/// Trading rewards aggregation reports. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct HistoricalTradingRewardAggregationsResponse { + /// List of reports. + pub rewards: Vec, +} + +/// Trading rewards aggregation report. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct HistoricalTradingRewardAggregation { + /// Trading reward amount. + pub trading_reward: BigDecimal, + /// Block height. + pub started_at_height: Height, + /// Time (UTC). + pub started_at: DateTime, + /// Block height. + pub ended_at_height: Option, + /// Time (UTC). + pub ended_at: Option>, + /// Aggregation period. + pub period: TradingRewardAggregationPeriod, +} diff --git a/v4-client-rs/client/src/indexer/sock/config.rs b/v4-client-rs/client/src/indexer/sock/config.rs new file mode 100644 index 00000000..d1afa635 --- /dev/null +++ b/v4-client-rs/client/src/indexer/sock/config.rs @@ -0,0 +1,27 @@ +use serde::Deserialize; +use std::num::NonZeroU32; + +/// Websocket Indexer client configuration. +#[derive(Clone, Debug, Deserialize)] +pub struct SockConfig { + /// Websocket endpoint. + /// + /// You can select Websocket endpoints from [the list](https://docs.dydx.exchange/infrastructure_providers-network/resources#indexer-endpoints). + pub endpoint: String, + /// Reconnect interval. + #[serde(default = "default_timeout")] + pub timeout: u64, + /// Rate limit. + /// + /// See also [Rate Limiting](https://docs.dydx.exchange/api_integration-indexer/indexer_websocket#rate-limiting). + #[serde(default = "default_rate_limit")] + pub rate_limit: NonZeroU32, +} + +fn default_timeout() -> u64 { + 1_000 +} + +fn default_rate_limit() -> NonZeroU32 { + NonZeroU32::new(2).unwrap() +} diff --git a/v4-client-rs/client/src/indexer/sock/connector.rs b/v4-client-rs/client/src/indexer/sock/connector.rs new file mode 100644 index 00000000..117694a7 --- /dev/null +++ b/v4-client-rs/client/src/indexer/sock/connector.rs @@ -0,0 +1,316 @@ +use super::{config::SockConfig, messages::*}; +use anyhow::{anyhow as err, Error}; +use futures_util::{SinkExt, StreamExt}; +use governor::{DefaultDirectRateLimiter, Quota, RateLimiter}; +use std::collections::{hash_map::Entry, HashMap}; +use tokio::{ + net::TcpStream, + sync::mpsc, + time::{sleep, Duration}, +}; +use tokio_tungstenite::{ + connect_async, + tungstenite::{self, protocol::Message}, + MaybeTlsStream, WebSocketStream, +}; + +#[cfg(feature = "telemetry")] +use crate::telemetry::{ + LatencyMetric, TELEMETRY_DESC_WS_RECEIVED, TELEMETRY_DESC_WS_RECONNECTS, + TELEMETRY_DESC_WS_SENT, TELEMETRY_DESC_WS_SENT_DURATION, TELEMETRY_WS_RECEIVED, + TELEMETRY_WS_RECONNECTS, TELEMETRY_WS_SENT, TELEMETRY_WS_SENT_DURATION, +}; + +#[derive(Debug)] +pub enum ControlMsg { + Subscribe(Subscription, bool, ChannelSender), + Unsubscribe(Subscription), + #[allow(dead_code)] // TODO remove after completion. + Terminate, +} + +type WsStream = WebSocketStream>; + +#[derive(Debug)] +pub enum ChannelSender { + Subaccounts(mpsc::UnboundedSender>), + ParentSubaccounts(mpsc::UnboundedSender>), + Trades(mpsc::UnboundedSender>), + Orders(mpsc::UnboundedSender>), + Markets(mpsc::UnboundedSender>), + Candles(mpsc::UnboundedSender>), + BlockHeight(mpsc::UnboundedSender>), +} + +impl ChannelSender { + pub(crate) fn status(&self, msg: ConnectorStatusMessage) -> Result<(), Error> { + match self { + Self::Subaccounts(tx) => tx.send(ConnectorMessage::Status(msg))?, + Self::ParentSubaccounts(tx) => tx.send(ConnectorMessage::Status(msg))?, + Self::Trades(tx) => tx.send(ConnectorMessage::Status(msg))?, + Self::Orders(tx) => tx.send(ConnectorMessage::Status(msg))?, + Self::Markets(tx) => tx.send(ConnectorMessage::Status(msg))?, + Self::Candles(tx) => tx.send(ConnectorMessage::Status(msg))?, + Self::BlockHeight(tx) => tx.send(ConnectorMessage::Status(msg))?, + } + Ok(()) + } + + pub(crate) fn send(&self, msg: FeedMessage) -> Result<(), Error> { + match (self, msg) { + (Self::Subaccounts(tx), FeedMessage::Subaccounts(m)) => { + tx.send(ConnectorMessage::Feed(m))? + } + (Self::ParentSubaccounts(tx), FeedMessage::ParentSubaccounts(m)) => { + tx.send(ConnectorMessage::Feed(m))? + } + (Self::Trades(tx), FeedMessage::Trades(m)) => tx.send(ConnectorMessage::Feed(m))?, + (Self::Orders(tx), FeedMessage::Orders(m)) => tx.send(ConnectorMessage::Feed(m))?, + (Self::Markets(tx), FeedMessage::Markets(m)) => tx.send(ConnectorMessage::Feed(m))?, + (Self::Candles(tx), FeedMessage::Candles(m)) => tx.send(ConnectorMessage::Feed(m))?, + (Self::BlockHeight(tx), FeedMessage::BlockHeight(m)) => { + tx.send(ConnectorMessage::Feed(m))? + } + _ => return Err(err!("Mismatched ChannelSender and FeedMessage types")), + } + Ok(()) + } +} + +/// Connector to Client message +#[derive(Debug)] +pub enum ConnectorMessage> { + Status(ConnectorStatusMessage), + Feed(T), +} + +#[derive(Debug)] +pub enum ConnectorStatusMessage { + Connected, + Disconnected, + Resubscription, +} + +/// WebSockets connection manager, message router +pub(crate) struct Connector { + client_handle: bool, + timeout: Duration, + url: String, + rx: mpsc::UnboundedReceiver, + subscriptions: HashMap, + rate_limiter: DefaultDirectRateLimiter, +} + +impl Connector { + pub(crate) fn new(config: SockConfig, rx: mpsc::UnboundedReceiver) -> Self { + #[cfg(feature = "telemetry")] + { + metrics::describe_counter!( + TELEMETRY_WS_RECONNECTS, + metrics::Unit::Count, + TELEMETRY_DESC_WS_RECONNECTS + ); + metrics::describe_counter!( + TELEMETRY_WS_RECEIVED, + metrics::Unit::Count, + TELEMETRY_DESC_WS_RECEIVED + ); + metrics::describe_counter!( + TELEMETRY_WS_SENT, + metrics::Unit::Count, + TELEMETRY_DESC_WS_SENT + ); + + metrics::describe_histogram!( + TELEMETRY_WS_SENT_DURATION, + metrics::Unit::Milliseconds, + TELEMETRY_DESC_WS_SENT_DURATION + ); + } + Connector { + client_handle: true, + url: config.endpoint, + timeout: Duration::from_millis(config.timeout), + rx, + subscriptions: Default::default(), + rate_limiter: RateLimiter::direct(Quota::per_second(config.rate_limit)), + } + } + + pub(crate) async fn entrypoint(mut self) { + if let Err(err) = self.connection_loop().await { + log::error!("Connection failed: {err}"); + } + } + + async fn connection_loop(&mut self) -> Result<(), Error> { + let (mut wss, _) = connect_async(&self.url).await?; + while self.is_active() { + if let Err(err) = self.step(&mut wss).await { + match err { + SockError::Tungstenite(e) => { + log::error!( + "WebSocket interaction failed: {e}. Attempting reconnection..." + ); + sleep(self.timeout).await; + + #[cfg(feature = "telemetry")] + metrics::counter!(TELEMETRY_WS_RECONNECTS).increment(1); + + wss = self.reconnect().await?; + } + SockError::Protocol(e) => log::error!("WebSocket protocol failure: {e}"), + } + } + } + log::debug!("Stopping connector."); + self.unsubscribe_all(&mut wss).await?; + Ok(()) + } + + async fn step(&mut self, wss: &mut WsStream) -> Result<(), SockError> { + tokio::select! { + // Client -> Indexer + Some(msg) = self.rx.recv() => { + + if let Some(msg) = self.process_ctrl_msg(msg).await { + #[cfg(feature = "telemetry")] + LatencyMetric::new(TELEMETRY_WS_SENT_DURATION); + + self.send(wss, msg).await?; + + #[cfg(feature = "telemetry")] + metrics::counter!(TELEMETRY_WS_SENT).increment(1); + } + } + // Indexer -> Client + Some(msg) = wss.next() => { + self.process_wss_msg(msg?).await?; + + #[cfg(feature = "telemetry")] + metrics::counter!(TELEMETRY_WS_RECEIVED).increment(1); + } + } + Ok(()) + } + + async fn process_ctrl_msg(&mut self, ctrl_msg: ControlMsg) -> Option { + match ctrl_msg { + ControlMsg::Subscribe(sub, batched, tx) => { + let msg = sub.sub_message(batched); + match self.subscriptions.entry(sub) { + Entry::Vacant(entry) => { + tx.status(ConnectorStatusMessage::Connected).ok()?; + entry.insert(tx); + Some(RatedMessage::RateLimited(msg)) + } + Entry::Occupied(_) => { + tx.status(ConnectorStatusMessage::Resubscription).ok()?; + None + } + } + } + ControlMsg::Unsubscribe(sub) => { + let msg = sub.unsub_message(); + self.subscriptions.remove(&sub); + Some(RatedMessage::Free(msg)) + } + ControlMsg::Terminate => { + self.client_handle = false; + None + } + } + } + + async fn process_wss_msg(&mut self, wss_msg: Message) -> Result<(), Error> { + match wss_msg { + Message::Text(text) => { + let json: WsMessage = + serde_json::from_str(&text).map_err(|e| err!("{e} for message: {text}"))?; + match json { + WsMessage::Setup(setup) => { + log::debug!( + "Connected to WebSocket stream with ID: {}", + setup.connection_id + ); + Ok(()) + } + WsMessage::Error(error) => { + Err(err!("Server sent error message: {}", error.message)) + } + WsMessage::Data(data) => { + let sub = data.subscription().ok_or_else(|| { + err!("Could not match received FeedMessage with a subscription!") + })?; + let tx = self + .subscriptions + .get(&sub) + .ok_or_else(|| err!("Subscription {sub:?} is not found!"))?; + tx.send(data)?; + Ok(()) + } + WsMessage::Unsub(unsub) => { + log::debug!( + "Received unsubscribed message for: {} {}", + unsub.channel, + unsub.id.unwrap_or("".into()) + ); + Ok(()) + } + } + } + Message::Ping(_) | Message::Pong(_) => Ok(()), + evt => Err(err!("Unsupported WebSocket event: {evt:?}")), + } + } + + async fn reconnect(&mut self) -> Result { + let (mut wss, _) = connect_async(&self.url).await?; + // Resubscribe to all + for sub in &self.subscriptions { + let msg = sub.0.sub_message(false); + self.send(&mut wss, RatedMessage::RateLimited(msg)).await?; + } + Ok(wss) + } + + async fn unsubscribe_all(&mut self, wss: &mut WsStream) -> Result<(), Error> { + for sub in &self.subscriptions { + let msg = sub.0.unsub_message(); + self.send(wss, RatedMessage::Free(msg)).await?; + } + Ok(()) + } + + /// Run while `SockClient` wasn't dropped or there are any live subscriptions + fn is_active(&self) -> bool { + self.client_handle || !self.subscriptions.is_empty() + } + + /// Rate-limiting socket send + async fn send(&self, wss: &mut WsStream, msg: RatedMessage) -> Result<(), Error> { + let wmsg = match msg { + RatedMessage::RateLimited(wmsg) => { + self.rate_limiter.until_ready().await; + wmsg + } + RatedMessage::Free(wmsg) => wmsg, + }; + wss.send(wmsg).await?; + Ok(()) + } +} + +#[derive(Debug)] +enum RatedMessage { + RateLimited(Message), + Free(Message), +} + +#[derive(Debug, thiserror::Error)] +enum SockError { + #[error("Stream error: {0}")] + Tungstenite(#[from] tungstenite::Error), + #[error("Protocol error: {0}")] + Protocol(#[from] anyhow::Error), +} diff --git a/v4-client-rs/client/src/indexer/sock/feed.rs b/v4-client-rs/client/src/indexer/sock/feed.rs new file mode 100644 index 00000000..bf744c7a --- /dev/null +++ b/v4-client-rs/client/src/indexer/sock/feed.rs @@ -0,0 +1,101 @@ +use anyhow::anyhow as err; +use derive_more::Debug; +use std::ops::{Deref, DerefMut}; +use thiserror::Error; +use tokio::sync::mpsc; + +use super::connector::{ConnectorMessage, ConnectorStatusMessage}; +use super::{ControlMsg, FeedMessage, Subscription}; + +/// Realtime feed. +/// +/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/websockets.rs). +pub struct Feed> { + feed: mpsc::UnboundedReceiver>, + sub: Subscription, + ctrl: mpsc::UnboundedSender, +} + +impl Feed +where + T: TryFrom + Debug, +{ + pub(crate) async fn setup( + mut feed: mpsc::UnboundedReceiver>, + sub: Subscription, + ctrl: mpsc::UnboundedSender, + ) -> Result { + if let Some(msg) = feed.recv().await { + match msg { + ConnectorMessage::Status(ConnectorStatusMessage::Connected) => { + Ok(Self { feed, sub, ctrl }) + } + ConnectorMessage::Status(status) => Err(status.into()), + other => Err(err!("Connector sent {:?}. Expected Connected status.", other).into()), + } + } else { + Err(FeedError::Disconnected) + } + } + + // Can be made return Result + /// Receive feed update. + pub async fn recv(&mut self) -> Option { + match self.feed.recv().await { + Some(ConnectorMessage::Feed(feed)) => Some(feed), + _ => None, + } + } +} + +impl> Drop for Feed { + fn drop(&mut self) { + if let Err(err) = self.ctrl.send(ControlMsg::Unsubscribe(self.sub.clone())) { + log::error!("Sending of Unsubscribe control message to connector failed: {err}"); + } + } +} + +impl> Deref for Feed { + type Target = mpsc::UnboundedReceiver>; + + fn deref(&self) -> &Self::Target { + &self.feed + } +} + +impl> DerefMut for Feed { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.feed + } +} + +/// Feed error. +#[derive(Debug, Error)] +pub enum FeedError { + /// Channel is disconnected. + #[error("Channel disconnected")] + Disconnected, + /// Resubscription is detected. + #[error("Resubscription detected")] + Resubscription, + /// Other error. + #[error("Other error: {0}")] + Other(#[from] anyhow::Error), +} + +impl From for FeedError { + fn from(status: ConnectorStatusMessage) -> Self { + match status { + ConnectorStatusMessage::Disconnected => FeedError::Disconnected, + ConnectorStatusMessage::Resubscription => FeedError::Resubscription, + _ => FeedError::Other(err!("Unexpected ConnectorStatusMessage {:?}", status)), + } + } +} + +impl From> for FeedError { + fn from(_err: mpsc::error::SendError) -> Self { + FeedError::Disconnected + } +} diff --git a/v4-client-rs/client/src/indexer/sock/messages.rs b/v4-client-rs/client/src/indexer/sock/messages.rs new file mode 100644 index 00000000..219e6ec9 --- /dev/null +++ b/v4-client-rs/client/src/indexer/sock/messages.rs @@ -0,0 +1,1106 @@ +use crate::indexer::types::{ + CandleResponse as CandlesInitialMessageContents, CandleResponseObject as Candle, + HeightResponse as BlockHeightInitialMessageContents, + OrderBookResponseObject as OrdersInitialMessageContents, + OrderResponseObject as OrderMessageObject, + ParentSubaccountResponseObject as ParentSubaccountMessageObject, + PerpetualMarketResponse as MarketsInitialMessageContents, + SubaccountResponseObject as SubaccountMessageObject, + TradeResponse as TradesInitialMessageContents, *, +}; +use bigdecimal::BigDecimal; +use chrono::{DateTime, Utc}; +use serde::Deserialize; +use serde_json::{json, Value}; +use std::collections::HashMap; +use tokio_tungstenite::tungstenite::protocol::Message; + +/// Feed subscription options +/// Respective ticker is required for `Orders`, `Trades`, `Candles` +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub enum Subscription { + /// Subaccounts. + Subaccounts(Subaccount), + /// Orders. + Orders(Ticker), + /// Trades. + Trades(Ticker), + /// Markets. + Markets, + /// Candles. + Candles(Ticker, CandleResolution), + /// Parent subaccounts. + ParentSubaccounts(ParentSubaccount), + /// Block height. + BlockHeight, +} + +impl Subscription { + pub(crate) fn sub_message(&self, batched: bool) -> Message { + match self { + Self::Subaccounts(ref subacc) => subaccounts::sub_message(subacc, batched), + Self::Markets => markets::sub_message(batched), + Self::Orders(ref ticker) => orders::sub_message(ticker, batched), + Self::Trades(ref ticker) => trades::sub_message(ticker, batched), + Self::Candles(ref ticker, ref res) => candles::sub_message(ticker, res, batched), + Self::ParentSubaccounts(ref subacc) => parent_subaccounts::sub_message(subacc, batched), + Self::BlockHeight => block_height::sub_message(batched), + } + } + + pub(crate) fn unsub_message(&self) -> Message { + match self { + Self::Subaccounts(ref subacc) => subaccounts::unsub_message(subacc), + Self::Markets => markets::unsub_message(), + Self::Orders(ref ticker) => orders::unsub_message(ticker), + Self::Trades(ref ticker) => trades::unsub_message(ticker), + Self::Candles(ref ticker, ref res) => candles::unsub_message(ticker, res), + Self::ParentSubaccounts(ref subacc) => parent_subaccounts::unsub_message(subacc), + Self::BlockHeight => block_height::unsub_message(), + } + } +} + +struct MessageFormatter {} + +impl MessageFormatter { + pub(crate) fn sub_message(channel: &str, fields: Value) -> Message { + let message = json!({ + "type": "subscribe", + "channel": channel, + }); + Self::message(fields, message) + } + + pub(crate) fn unsub_message(channel: &str, fields: Value) -> Message { + let message = json!({ + "type": "unsubscribe", + "channel": channel, + }); + Self::message(fields, message) + } + + fn message(mut message: Value, fields: Value) -> Message { + if let Value::Object(ref mut map) = message { + if let Value::Object(fields) = fields { + map.extend(fields); + } + } + Message::Text(message.to_string()) + } +} + +pub(crate) mod subaccounts { + use super::{json, Message, MessageFormatter, Subaccount}; + pub(crate) const CHANNEL: &str = "v4_subaccounts"; + + pub(crate) fn sub_message(subacc: &Subaccount, batched: bool) -> Message { + let address = &subacc.address; + let number = &subacc.number; + MessageFormatter::sub_message( + CHANNEL, + json!({"id": format!("{address}/{number}"), "batched": batched}), + ) + } + + pub(crate) fn unsub_message(subacc: &Subaccount) -> Message { + let address = &subacc.address; + let number = &subacc.number; + MessageFormatter::unsub_message(CHANNEL, json!({"id": format!("{address}/{number}")})) + } +} + +pub(crate) mod parent_subaccounts { + use super::{json, Message, MessageFormatter, ParentSubaccount}; + pub(crate) const CHANNEL: &str = "v4_parent_subaccounts"; + + pub(crate) fn sub_message(subacc: &ParentSubaccount, batched: bool) -> Message { + let address = &subacc.address; + let number = &subacc.number; + MessageFormatter::sub_message( + CHANNEL, + json!({"id": format!("{address}/{number}"), "batched": batched}), + ) + } + + pub(crate) fn unsub_message(subacc: &ParentSubaccount) -> Message { + let address = &subacc.address; + let number = &subacc.number; + MessageFormatter::unsub_message(CHANNEL, json!({"id": format!("{address}/{number}")})) + } +} + +pub(crate) mod orders { + use super::{json, Message, MessageFormatter, Ticker}; + pub(crate) const CHANNEL: &str = "v4_orderbook"; + + pub(crate) fn sub_message(id: &Ticker, batched: bool) -> Message { + MessageFormatter::sub_message(CHANNEL, json!({"id": id, "batched": batched})) + } + + pub(crate) fn unsub_message(id: &Ticker) -> Message { + MessageFormatter::unsub_message(CHANNEL, json!({"id": id})) + } +} + +pub(crate) mod trades { + use super::{json, Message, MessageFormatter, Ticker}; + pub(crate) const CHANNEL: &str = "v4_trades"; + + pub(crate) fn sub_message(id: &Ticker, batched: bool) -> Message { + MessageFormatter::sub_message(CHANNEL, json!({"id": id, "batched": batched})) + } + + pub(crate) fn unsub_message(id: &Ticker) -> Message { + MessageFormatter::unsub_message(CHANNEL, json!({"id": id})) + } +} + +pub(crate) mod markets { + use super::{json, Message, MessageFormatter}; + pub const CHANNEL: &str = "v4_markets"; + + pub(crate) fn sub_message(batched: bool) -> Message { + MessageFormatter::sub_message(CHANNEL, json!({"batched": batched})) + } + + pub(crate) fn unsub_message() -> Message { + MessageFormatter::unsub_message(CHANNEL, json!({})) + } +} + +pub(crate) mod candles { + use super::{json, CandleResolution, Message, MessageFormatter, Ticker}; + pub(crate) const CHANNEL: &str = "v4_candles"; + + pub(crate) fn sub_message( + id: &Ticker, + resolution: &CandleResolution, + batched: bool, + ) -> Message { + let resolution_str = serde_json::to_string(resolution).unwrap_or_default(); + let resolution_str = resolution_str.trim_matches('"'); + MessageFormatter::sub_message( + CHANNEL, + json!({"id": format!("{id}/{resolution_str}"), "batched": batched}), + ) + } + + pub(crate) fn unsub_message(id: &Ticker, resolution: &CandleResolution) -> Message { + let resolution_str = serde_json::to_string(resolution).unwrap_or_default(); + let resolution_str = resolution_str.trim_matches('"'); + MessageFormatter::unsub_message(CHANNEL, json!({"id": format!("{id}/{resolution_str}")})) + } +} + +pub(crate) mod block_height { + use super::{json, Message, MessageFormatter}; + pub const CHANNEL: &str = "v4_block_height"; + + pub(crate) fn sub_message(batched: bool) -> Message { + MessageFormatter::sub_message(CHANNEL, json!({"batched": batched})) + } + + pub(crate) fn unsub_message() -> Message { + MessageFormatter::unsub_message(CHANNEL, json!({})) + } +} + +/* Main WS type */ +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub(crate) enum WsMessage { + #[serde(rename = "connected")] + Setup(StatusConnectedMessage), + #[serde(rename = "error")] + Error(StatusErrorMessage), + #[serde(rename = "unsubscribed")] + Unsub(StatusUnsubMessage), + #[serde(untagged)] + Data(FeedMessage), +} + +#[derive(Debug, Deserialize)] +pub(crate) struct StatusConnectedMessage { + pub(crate) connection_id: String, + #[allow(dead_code)] // TODO remove after completion. + pub(crate) message_id: u64, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct StatusErrorMessage { + pub(crate) message: String, + #[allow(dead_code)] // TODO remove after completion. + pub(crate) connection_id: String, + #[allow(dead_code)] // TODO remove after completion. + pub(crate) message_id: u64, +} + +#[derive(Debug, Deserialize)] +pub(crate) struct StatusUnsubMessage { + #[allow(dead_code)] // TODO remove after completion. + pub(crate) connection_id: String, + #[allow(dead_code)] // TODO remove after completion. + pub(crate) message_id: u64, + pub(crate) channel: String, + pub(crate) id: Option, +} + +/// Feed Types +#[derive(Debug, Deserialize)] +#[serde(tag = "channel")] +pub enum FeedMessage { + /// Subaccounts. + #[serde(rename = "v4_subaccounts")] + Subaccounts(SubaccountsMessage), + /// Orders. + #[serde(rename = "v4_orderbook")] + Orders(OrdersMessage), + /// Trades. + #[serde(rename = "v4_trades")] + Trades(TradesMessage), + /// Markets. + #[serde(rename = "v4_markets")] + Markets(MarketsMessage), + /// Candles. + #[serde(rename = "v4_candles")] + Candles(CandlesMessage), + /// Parent subaccounts. + #[serde(rename = "v4_parent_subaccounts")] + ParentSubaccounts(ParentSubaccountsMessage), + /// Block height. + #[serde(rename = "v4_block_height")] + BlockHeight(BlockHeightMessage), +} + +macro_rules! impl_feed_message_try_from { + ($target_type:ty, $variant:ident) => { + impl TryFrom for $target_type { + type Error = (); + fn try_from(value: FeedMessage) -> Result { + match value { + FeedMessage::$variant(a) => Ok(a), + _ => Err(()), + } + } + } + }; +} + +/// Subaccounts message. +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub enum SubaccountsMessage { + /// Initial. + #[serde(rename = "subscribed")] + Initial(SubaccountsInitialMessage), + /// Update. + #[serde(untagged)] + Update(SubaccountsUpdateMessage), +} +impl_feed_message_try_from!(SubaccountsMessage, Subaccounts); + +/// Subaccounts message. +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub enum ParentSubaccountsMessage { + /// Initial. + #[serde(rename = "subscribed")] + Initial(ParentSubaccountsInitialMessage), + /// Update. + #[serde(untagged)] + Update(ParentSubaccountsUpdateMessage), +} +impl_feed_message_try_from!(ParentSubaccountsMessage, ParentSubaccounts); + +/// Trades message. +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub enum TradesMessage { + /// Initial. + #[serde(rename = "subscribed")] + Initial(TradesInitialMessage), + /// Update. + #[serde(untagged)] + Update(TradesUpdateMessage), +} +impl_feed_message_try_from!(TradesMessage, Trades); + +/// Orders message. +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub enum OrdersMessage { + /// Initial. + #[serde(rename = "subscribed")] + Initial(OrdersInitialMessage), + /// Update. + #[serde(untagged)] + Update(OrdersUpdateMessage), +} +impl_feed_message_try_from!(OrdersMessage, Orders); + +/// Markets message. +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub enum MarketsMessage { + /// Initial. + #[serde(rename = "subscribed")] + Initial(MarketsInitialMessage), + /// Update. + #[serde(untagged)] + Update(MarketsUpdateMessage), +} +impl_feed_message_try_from!(MarketsMessage, Markets); + +/// Candles message. +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub enum CandlesMessage { + /// Initial. + #[serde(rename = "subscribed")] + Initial(CandlesInitialMessage), + /// Update. + #[serde(untagged)] + Update(CandlesUpdateMessage), +} +impl_feed_message_try_from!(CandlesMessage, Candles); + +/// Block height message. +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub enum BlockHeightMessage { + /// Initial. + #[serde(rename = "subscribed")] + Initial(BlockHeightInitialMessage), + /// Update. + #[serde(untagged)] + Update(BlockHeightUpdateMessage), +} +impl_feed_message_try_from!(BlockHeightMessage, BlockHeight); + +impl FeedMessage { + pub(crate) fn subscription(&self) -> Option { + let parse_subacc_id = |id: &str| -> Option { + // Parse "id": "Address/Number" + let mut id_split = id.split('/'); + let address = id_split.next()?.parse().ok()?; + let number_str = id_split.next()?; + let number = serde_json::from_str::(number_str).ok()?; + Some(Subaccount::new(address, number)) + }; + let parse_psubacc_id = |id: &str| -> Option { + // Parse "id": "Address/Number" + let mut id_split = id.split('/'); + let address = id_split.next()?.parse().ok()?; + let number_str = id_split.next()?; + let number = serde_json::from_str::(number_str).ok()?; + Some(ParentSubaccount::new(address, number)) + }; + let parse_candles_id = |id: &str| -> Option<(Ticker, CandleResolution)> { + // Parse "id": "TICKER/RESOLUTION" + let mut id_split = id.split('/'); + let ticker = Ticker(id_split.next()?.into()); + let resolution_str = format!("\"{}\"", id_split.next()?); + let resolution = serde_json::from_str(&resolution_str).ok()?; + Some((ticker, resolution)) + }; + + match self { + Self::Subaccounts(SubaccountsMessage::Initial(msg)) => { + let subacc = parse_subacc_id(&msg.id)?; + Some(Subscription::Subaccounts(subacc)) + } + Self::Subaccounts(SubaccountsMessage::Update(msg)) => { + let subacc = parse_subacc_id(&msg.id)?; + Some(Subscription::Subaccounts(subacc)) + } + + Self::ParentSubaccounts(ParentSubaccountsMessage::Initial(msg)) => { + let subacc = parse_psubacc_id(&msg.id)?; + Some(Subscription::ParentSubaccounts(subacc)) + } + Self::ParentSubaccounts(ParentSubaccountsMessage::Update(msg)) => { + let subacc = parse_psubacc_id(&msg.id)?; + Some(Subscription::ParentSubaccounts(subacc)) + } + + Self::Orders(OrdersMessage::Initial(msg)) => { + Some(Subscription::Orders(Ticker(msg.id.clone()))) + } + Self::Orders(OrdersMessage::Update(msg)) => { + Some(Subscription::Orders(Ticker(msg.id.clone()))) + } + + Self::Trades(TradesMessage::Initial(msg)) => { + Some(Subscription::Trades(Ticker(msg.id.clone()))) + } + Self::Trades(TradesMessage::Update(msg)) => { + Some(Subscription::Trades(Ticker(msg.id.clone()))) + } + + Self::Markets(MarketsMessage::Update(_)) => Some(Subscription::Markets), + Self::Markets(MarketsMessage::Initial(_)) => Some(Subscription::Markets), + + Self::Candles(CandlesMessage::Initial(msg)) => { + let (ticker, resolution) = parse_candles_id(&msg.id)?; + Some(Subscription::Candles(ticker, resolution)) + } + Self::Candles(CandlesMessage::Update(msg)) => { + let (ticker, resolution) = parse_candles_id(&msg.id)?; + Some(Subscription::Candles(ticker, resolution)) + } + + Self::BlockHeight(BlockHeightMessage::Initial(_)) => Some(Subscription::BlockHeight), + Self::BlockHeight(BlockHeightMessage::Update(_)) => Some(Subscription::BlockHeight), + } + } +} + +/// Subaccount initial. +#[derive(Debug, Deserialize)] +pub struct SubaccountsInitialMessage { + /// Connection id. + pub connection_id: String, + /// Subaccount. + pub contents: SubaccountsInitialMessageContents, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, +} + +/// Subaccount. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SubaccountsInitialMessageContents { + /// Subaccount. + pub subaccount: SubaccountMessageObject, + /// Orders. + pub orders: Vec, + /// Block height. + pub block_height: Height, +} + +/// Parent subaccount initial. +#[derive(Debug, Deserialize)] +pub struct ParentSubaccountsInitialMessage { + /// Connection id. + pub connection_id: String, + /// Subaccount. + pub contents: ParentSubaccountsInitialMessageContents, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, +} + +/// Parent subaccount. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ParentSubaccountsInitialMessageContents { + /// Subaccount. + pub subaccount: ParentSubaccountMessageObject, + /// Orders. + pub orders: Vec, + /// Block height. + pub block_height: Height, +} + +/// Orders initial message. +#[derive(Debug, Deserialize)] +pub struct OrdersInitialMessage { + /// Connection id. + pub connection_id: String, + /// Orders. + pub contents: OrdersInitialMessageContents, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, +} + +/// Trades initial message. +#[derive(Debug, Deserialize)] +pub struct TradesInitialMessage { + /// Connection id. + pub connection_id: String, + /// Trades. + pub contents: TradesInitialMessageContents, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, +} + +/// Markets initial message. +#[derive(Debug, Deserialize)] +pub struct MarketsInitialMessage { + /// Connection id. + pub connection_id: String, + /// Market. + pub contents: MarketsInitialMessageContents, + /// Message id. + pub message_id: u64, +} + +/// Candles initial message. +#[derive(Debug, Deserialize)] +pub struct CandlesInitialMessage { + /// Connection id. + pub connection_id: String, + /// Candles. + pub contents: CandlesInitialMessageContents, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, +} + +/// Block height initial message. +#[derive(Debug, Deserialize)] +pub struct BlockHeightInitialMessage { + /// Connection id. + pub connection_id: String, + /// Block height contents. + pub contents: BlockHeightInitialMessageContents, + /// Message id. + pub message_id: u64, +} + +// Updates +macro_rules! generate_contents_deserialize_function { + ($fn_name:ident, $result_type:ty) => { + fn $fn_name<'de, D>(deserializer: D) -> Result, D::Error> + where + D: serde::Deserializer<'de>, + { + let value = Value::deserialize(deserializer)?; + + match value { + // Batched + Value::Array(arr) => arr + .into_iter() + .map(|v| serde_json::from_value(v)) + .collect::, _>>() + .map_err(serde::de::Error::custom), + // Streamed + Value::Object(obj) => { + let item = serde_json::from_value::<$result_type>(Value::Object(obj.clone())) + .map_err(serde::de::Error::custom)?; + Ok(vec![item]) + } + _ => Err(serde::de::Error::custom("Expected array or object")), + } + } + }; +} + +/// Subaccount update. +#[derive(Debug, Deserialize)] +pub struct SubaccountsUpdateMessage { + /// Connection id. + pub connection_id: String, + /// Update. + #[serde(deserialize_with = "deserialize_subaccounts_contents")] + pub contents: Vec, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, + /// Version. + pub version: String, +} +generate_contents_deserialize_function!( + deserialize_subaccounts_contents, + SubaccountUpdateMessageContents +); + +/// Subaccount update contents. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SubaccountUpdateMessageContents { + /// Perpetual position updates on the subaccount. + pub perpetual_positions: Option>, + /// Asset position updates on the subaccount. + pub asset_positions: Option>, + /// Order updates on the subaccount. + pub orders: Option>, + /// Fills that occur on the subaccount. + pub fills: Option>, + /// Transfers that occur on the subaccount. + pub transfers: Option, + /// Rewards that occur on the subaccount. + pub trading_reward: Option, + /// Block height. + pub block_height: Option, +} + +/// Perpetual position on subaccount. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PerpetualPositionSubaccountMessageContents { + /// Address. + pub address: Address, + /// Subaccount number. + pub subaccount_number: SubaccountNumber, + /// Position id. + pub position_id: String, + /// Market ticker. + pub market: Ticker, + /// Side (buy/sell). + pub side: PositionSide, + /// Position status. + pub status: PerpetualPositionStatus, + /// Size. + pub size: Quantity, + /// Maximum size. + pub max_size: Quantity, + /// Net funding. + pub net_funding: BigDecimal, + /// Entry price. + pub entry_price: Price, + /// Exit price. + pub exit_price: Option, + /// Sum at open. + pub sum_open: BigDecimal, + /// Sum at close. + pub sum_close: BigDecimal, + /// Actual PnL. + pub realized_pnl: Option, + /// Potential PnL. + pub unrealized_pnl: Option, +} + +/// Asset position per subaccount. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AssetPositionSubaccountMessageContents { + /// Address. + pub address: Address, + /// Subaccount number. + pub subaccount_number: SubaccountNumber, + /// Position id. + pub position_id: String, + /// Asset id. + pub asset_id: AssetId, + /// Token symbol. + pub symbol: Symbol, + /// Side (buy/sell). + pub side: PositionSide, + /// Size. + pub size: Quantity, +} + +/// Order per subaccount. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OrderSubaccountMessageContents { + /// Id. + pub id: String, + /// Subaccount id. + pub subaccount_id: SubaccountId, + /// Client id. + pub client_id: ClientId, + /// Clob pair id. + pub clob_pair_id: Option, + /// Side (buy/sell). + pub side: Option, + /// Size. + pub size: Option, + /// Market ticker. + pub ticker: Option, + /// Price. + pub price: Option, + #[serde(rename = "type")] + /// Order type. + pub order_type: Option, + /// Time-in-force. + pub time_in_force: Option, + /// Post-only. + pub post_only: Option, + /// Reduce-only. + pub reduce_only: Option, + /// Order status. + pub status: ApiOrderStatus, + /// Order flags. + pub order_flags: OrderFlags, + /// Total filled. + pub total_filled: Option, + /// Total optimistic filled. + pub total_optimistic_filled: Option, + /// Block height. + pub good_til_block: Option, + /// Time(UTC). + pub good_til_block_time: Option>, + /// Trigger price. + pub trigger_price: Option, + /// Time(UTC). + pub updated_at: Option>, + /// Block height. + pub updated_at_height: Option, + /// Removal reason. + pub removal_reason: Option, + /// Block height. + pub created_at_height: Option, + /// Client metadata. + pub client_metadata: Option, +} + +/// Fill per subaccount. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct FillSubaccountMessageContents { + /// Fill id. + pub id: FillId, + /// Subaccount id. + pub subaccount_id: SubaccountId, + /// Order side. + pub side: OrderSide, + /// Liquidity. + pub liquidity: Liquidity, + /// Fill type. + #[serde(rename = "type")] + pub fill_type: FillType, + /// Clob pair id. + pub clob_pair_id: ClobPairId, + /// Size. + pub size: Quantity, + /// Price. + pub price: Price, + /// Quote amount. + pub quote_amount: String, + /// Event id. + pub event_id: String, + /// Transaction hash. + pub transaction_hash: String, + /// Time(UTC). + pub created_at: DateTime, + /// Block height. + pub created_at_height: Height, + /// Market ticker. + pub ticker: Ticker, + /// Order id. + pub order_id: Option, + /// Client metadata. + pub client_metadata: Option, +} + +/// Transfer per subaccount. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TransferSubaccountMessageContents { + /// Sender. + pub sender: Account, + /// Recipient. + pub recipient: Account, + /// Token symbol. + pub symbol: Symbol, + /// Size. + pub size: Quantity, + /// Transfer type. + #[serde(rename = "type")] + pub transfer_type: TransferType, + /// Transaction hash. + pub transaction_hash: String, + /// Time(UTC). + pub created_at: DateTime, + /// Block height. + pub created_at_height: Height, +} + +/// Trading reward. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TradingRewardSubaccountMessageContents { + /// Trading reward. + pub trading_reward: BigDecimal, + /// Time(UTC). + pub created_at: DateTime, + /// Block height. + pub created_at_height: Height, +} + +/// Subaccount update. +#[derive(Debug, Deserialize)] +pub struct ParentSubaccountsUpdateMessage { + /// Connection id. + pub connection_id: String, + /// Update. + #[serde(deserialize_with = "deserialize_subaccounts_contents")] + pub contents: Vec, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, + /// Version. + pub version: String, +} + +/// Subaccount update contents. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ParentSubaccountUpdateMessageContents { + /// Perpetual position updates on the subaccount. + pub perpetual_positions: Option>, + /// Asset position updates on the subaccount. + pub asset_positions: Option>, + /// Order updates on the subaccount. + pub orders: Option>, + /// Fills that occur on the subaccount. + pub fills: Option>, + /// Transfers that occur on the subaccount. + pub transfers: Option, + /// Rewards that occur on the subaccount. + pub trading_reward: Option, + /// Block height. + pub block_height: Option, +} + +/// Order update message. +#[derive(Debug, Deserialize)] +pub struct OrdersUpdateMessage { + /// Connection id. + pub connection_id: String, + /// Update. + #[serde(deserialize_with = "deserialize_orders_contents")] + pub contents: OrdersUpdateMessageContents, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, + /// Version. + pub version: String, +} + +fn deserialize_orders_contents<'de, D>( + deserializer: D, +) -> Result +where + D: serde::Deserializer<'de>, +{ + let value = Value::deserialize(deserializer)?; + + match value { + // Batched + Value::Array(arr) => { + let mut bids = Vec::new(); + let mut asks = Vec::new(); + + for v in arr { + let item: OrdersUpdateMessageContents = + serde_json::from_value(v).map_err(serde::de::Error::custom)?; + + if let Some(item_bids) = item.bids { + bids.extend(item_bids); + } + if let Some(item_asks) = item.asks { + asks.extend(item_asks); + } + } + + Ok(OrdersUpdateMessageContents { + bids: if bids.is_empty() { None } else { Some(bids) }, + asks: if asks.is_empty() { None } else { Some(asks) }, + }) + } + // Streamed + Value::Object(obj) => { + let item = + serde_json::from_value::(Value::Object(obj.clone())) + .map_err(serde::de::Error::custom)?; + Ok(item) + } + _ => Err(serde::de::Error::custom("Expected array or object")), + } +} + +/// Orderbook update. +#[derive(Deserialize, Debug, Clone)] +pub struct OrdersUpdateMessageContents { + /// Bids. + pub bids: Option>, + /// Asks. + pub asks: Option>, +} + +/// Trades update. +#[derive(Deserialize, Debug, Clone)] +pub struct TradesUpdateMessage { + /// Connection id. + pub connection_id: String, + /// Update. + #[serde(deserialize_with = "deserialize_trades_contents")] + pub contents: Vec, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, + /// Version. + pub version: String, +} +generate_contents_deserialize_function!(deserialize_trades_contents, TradesUpdateMessageContents); + +/// Trades updates. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TradesUpdateMessageContents { + /// Updates. + pub trades: Vec, +} + +/// Trade update. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TradeUpdate { + /// Unique id of the trade, which is the taker fill id. + pub id: TradeId, + /// Time(UTC). + pub created_at: DateTime, + /// Side (buy/sell). + pub side: OrderSide, + /// Price. + pub price: Price, + /// Size. + pub size: Quantity, + /// Trade type. + #[serde(rename = "type")] + pub trade_type: TradeType, +} + +/// Markets update message. +#[derive(Debug, Deserialize)] +pub struct MarketsUpdateMessage { + /// Connection id. + pub connection_id: String, + /// Updates. + #[serde(deserialize_with = "deserialize_markets_contents")] + pub contents: Vec, + /// Message id. + pub message_id: u64, + /// Version. + pub version: String, +} +generate_contents_deserialize_function!(deserialize_markets_contents, MarketsUpdateMessageContents); + +/// Markets update. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct MarketsUpdateMessageContents { + /// Trading. + pub trading: Option>, + /// Oracle prices. + pub oracle_prices: Option>, +} + +/// Perpetual market info. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TradingPerpetualMarket { + /// Atomic resolution + pub atomic_resolution: Option, + /// Base asset. + pub base_asset: Option, + /// Base open interest. + pub base_open_interest: Option, + /// Base position size. + pub base_position_size: Option, + /// Clob pair id. + pub clob_pair_id: Option, + /// Id. + pub id: Option, + /// Market id. + pub market_id: Option, + /// Incremental position size. + pub incremental_position_size: Option, + /// Initial margin fraction. + pub initial_margin_fraction: Option, + /// Maintenance margin fraction. + pub maintenance_margin_fraction: Option, + /// Max position size. + pub max_position_size: Option, + /// Open interest. + pub open_interest: Option, + /// Quantum conversion exponent. + pub quantum_conversion_exponent: Option, + /// Quote asset. + pub quote_asset: Option, + /// Market status + pub status: Option, + /// Step base quantums. + pub step_base_quantums: Option, + /// Subticks per tick. + pub subticks_per_tick: Option, + /// Market ticker. + pub ticker: Option, + /// 24-h price change. + #[serde(rename = "priceChange24H")] + pub price_change_24h: Option, + /// 24-h number of trades. + #[serde(rename = "trades24H")] + pub trades_24h: Option, + /// 24-h volume. + #[serde(rename = "volume24H")] + pub volume_24h: Option, + /// Next funding rate. + pub next_funding_rate: Option, +} + +/// Oracle price for market. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OraclePriceMarket { + /// Oracle price. + pub oracle_price: Price, + /// Time(UTC). + pub effective_at: DateTime, + /// Block height. + pub effective_at_height: Height, + /// Market id. + pub market_id: u64, +} + +/// Candles update. +#[derive(Debug, Deserialize)] +pub struct CandlesUpdateMessage { + /// Connection id. + pub connection_id: String, + /// Candle. + #[serde(deserialize_with = "deserialize_candles_contents")] + pub contents: Vec, + /// Id. + pub id: String, + /// Message id. + pub message_id: u64, + /// Version. + pub version: String, +} +generate_contents_deserialize_function!(deserialize_candles_contents, Candle); + +/// Block height update message. +#[derive(Debug, Deserialize)] +pub struct BlockHeightUpdateMessage { + /// Connection id. + pub connection_id: String, + /// Updates. + #[serde(deserialize_with = "deserialize_block_height_contents")] + pub contents: Vec, + /// Message id. + pub message_id: u64, + /// Version. + pub version: String, +} +generate_contents_deserialize_function!( + deserialize_block_height_contents, + BlockHeightUpdateMessageContents +); + +/// Block height update message contents. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BlockHeightUpdateMessageContents { + /// Block height. + pub block_height: Height, + /// Time of block. + pub time: DateTime, +} diff --git a/v4-client-rs/client/src/indexer/sock/mod.rs b/v4-client-rs/client/src/indexer/sock/mod.rs new file mode 100644 index 00000000..448ffdbd --- /dev/null +++ b/v4-client-rs/client/src/indexer/sock/mod.rs @@ -0,0 +1,176 @@ +mod config; +mod connector; +/// Realtime feeds. +pub mod feed; +mod messages; + +use anyhow::Result; +use connector::{ChannelSender, Connector, ControlMsg}; +use tokio::sync::mpsc; + +pub use config::SockConfig; + +use crate::indexer::{CandleResolution, ParentSubaccount, Subaccount, Ticker}; +pub use feed::*; +pub use messages::*; + +#[derive(Debug)] +pub(crate) struct SockClient { + conn_tx: mpsc::UnboundedSender, +} + +macro_rules! impl_subscribe { + ($method_name:ident, $message_type:ty, $channel_sender_variant:ident) => { + pub(crate) async fn $method_name( + &mut self, + sub: Subscription, + batched: bool, + ) -> Result, FeedError> { + let (tx, rx) = mpsc::unbounded_channel(); + self.conn_tx.send(ControlMsg::Subscribe( + sub.clone(), + batched, + ChannelSender::$channel_sender_variant(tx), + ))?; + Feed::setup(rx, sub, self.conn_tx.clone()).await + } + }; +} + +impl SockClient { + pub(crate) fn new(config: SockConfig) -> Self { + let (conn_tx, conn_rx) = mpsc::unbounded_channel(); + + let connector = Connector::new(config, conn_rx); + tokio::spawn(connector.entrypoint()); + + Self { conn_tx } + } + + impl_subscribe!(subaccounts, SubaccountsMessage, Subaccounts); + impl_subscribe!( + parent_subaccounts, + ParentSubaccountsMessage, + ParentSubaccounts + ); + impl_subscribe!(trades, TradesMessage, Trades); + impl_subscribe!(orders, OrdersMessage, Orders); + impl_subscribe!(markets, MarketsMessage, Markets); + impl_subscribe!(candles, CandlesMessage, Candles); + impl_subscribe!(block_height, BlockHeightMessage, BlockHeight); +} + +impl Drop for SockClient { + fn drop(&mut self) { + if let Err(e) = self.conn_tx.send(ControlMsg::Terminate) { + log::error!("Failed sending control Terminate to WebSocket connector: {e}"); + } + } +} + +/// Feeds dispatcher. +#[derive(Debug)] +pub struct Feeds<'a> { + sock: &'a mut SockClient, +} + +impl<'a> Feeds<'a> { + pub(crate) fn new(sock: &'a mut SockClient) -> Self { + Self { sock } + } + + /// This channel provides realtime information about orders, fills, transfers, perpetual positions, and perpetual assets for a subaccount. + /// + /// Initial message returns information on the subaccount like [`get_subaccount`](crate::indexer::Accounts::get_subaccount). + /// + /// Subsequent responses will contain any update to open orders, changes in account, changes in open positions, and/or transfers in a single message. + pub async fn subaccounts( + &mut self, + subaccount: Subaccount, + batched: bool, + ) -> Result, FeedError> { + self.sock + .subaccounts(Subscription::Subaccounts(subaccount), batched) + .await + } + + /// This channel provides realtime information about markets. + /// + /// Initial message returns information on markets like [`list_perpetual_markets`](crate::indexer::Markets::list_perpetual_markets). + /// + /// Subsequent responses will contain any update to markets. + pub async fn markets(&mut self, batched: bool) -> Result, FeedError> { + self.sock.markets(Subscription::Markets, batched).await + } + + /// This channel provides realtime information about trades for the market. + /// + /// Initial message returns information on trades like [`get_trades`](crate::indexer::Markets::get_trades). + /// + /// Subsequent responses will contain any update to trades for the market. + pub async fn trades( + &mut self, + ticker: &Ticker, + batched: bool, + ) -> Result, FeedError> { + self.sock + .trades(Subscription::Trades(ticker.clone()), batched) + .await + } + + /// This channel provides realtime information about the orderbook for the market. + /// + /// Initial message returns information on orderbook like [`get_perpetual_market_orderbook`](crate::indexer::Markets::get_perpetual_market_orderbook). + /// + /// Subsequent responses will contain any update to the orderbook for the market. + pub async fn orders( + &mut self, + ticker: &Ticker, + batched: bool, + ) -> Result, FeedError> { + self.sock + .orders(Subscription::Orders(ticker.clone()), batched) + .await + } + + /// This channel provides realtime information about the candles for the market. + /// + /// Initial message returns information on candles like [`get_candles`](crate::indexer::Markets::get_candles). + /// + /// Subsequent responses will contain any update to the candles for the market. + pub async fn candles( + &mut self, + ticker: &Ticker, + resolution: CandleResolution, + batched: bool, + ) -> Result, FeedError> { + self.sock + .candles(Subscription::Candles(ticker.clone(), resolution), batched) + .await + } + + /// This channel provides realtime information about orders, fills, transfers, perpetual positions, and perpetual assets for a parent subaccount and its children. + pub async fn parent_subaccounts( + &mut self, + subaccount: ParentSubaccount, + batched: bool, + ) -> Result, FeedError> { + self.sock + .parent_subaccounts(Subscription::ParentSubaccounts(subaccount), batched) + .await + } + + /// This channel provides realtime information about the chain's block height. + /// + /// Initial message returns information like [`get_height`](crate::indexer::Utility::get_height). + /// + /// Subsequent responses will contain following created blocks. + pub async fn block_height( + &mut self, + batched: bool, + ) -> Result, FeedError> { + self.sock + .block_height(Subscription::BlockHeight, batched) + .await + } +} diff --git a/v4-client-rs/client/src/indexer/tokens.rs b/v4-client-rs/client/src/indexer/tokens.rs new file mode 100644 index 00000000..b22aeb29 --- /dev/null +++ b/v4-client-rs/client/src/indexer/tokens.rs @@ -0,0 +1,116 @@ +use crate::indexer::Denom; +use anyhow::{anyhow as err, Error}; +use bigdecimal::{num_traits::ToPrimitive, BigDecimal, One}; +use derive_more::{Deref, DerefMut, From}; +use v4_proto_rs::cosmos_sdk_proto::cosmos::base::v1beta1::Coin as ProtoCoin; + +/// USDC token. +#[derive(Debug, Deref, DerefMut, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub struct Usdc(pub BigDecimal); + +impl Usdc { + const QUANTUMS_ATOMIC_RESOLUTION: i64 = -6; + + /// Create a micro USDC (1e-6) token from an integer. + pub fn from_quantums(quantums: impl Into) -> Self { + Self(quantums.into() / BigDecimal::new(One::one(), Self::QUANTUMS_ATOMIC_RESOLUTION)) + } + + /// Express a USDC token as an integer. + pub fn quantize(self) -> BigDecimal { + self.0 * BigDecimal::new(One::one(), Self::QUANTUMS_ATOMIC_RESOLUTION) + } + + /// Express a USDC token as a u64. + pub fn quantize_as_u64(self) -> Result { + self.quantize() + .to_u64() + .ok_or_else(|| err!("Failed converting USDC value to u64")) + } +} + +impl From for Usdc +where + T: Into, +{ + fn from(value: T) -> Self { + Usdc(value.into()) + } +} + +/// Token. +pub enum Token { + /// USDC. + Usdc(Usdc), + /// dYdX native token. + Dydx(BigDecimal), + /// dYdX testnet native token. + DydxTnt(BigDecimal), +} + +/// An entity which can be operated as a token. +pub trait Tokenized { + /// Gets Token [`Denom`]. + fn denom(&self) -> Denom; + + /// Convert to Cosmos [`Coin`](ProtoCoin). + fn coin(&self) -> Result; +} + +impl Tokenized for Token { + fn denom(&self) -> Denom { + match self { + Self::Usdc(_) => Denom::Usdc, + Self::Dydx(_) => Denom::Dydx, + Self::DydxTnt(_) => Denom::DydxTnt, + } + } + + fn coin(&self) -> Result { + let amount_res = match self { + Self::Usdc(usdc) => usdc.clone().quantize().to_u128(), + Self::Dydx(d) => d.to_u128(), + Self::DydxTnt(d) => d.to_u128(), + }; + Ok(ProtoCoin { + amount: amount_res + .ok_or_else(|| err!("Failed converting dYdX testnet token value into amount"))? + .to_string(), + denom: self.denom().to_string(), + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::str::FromStr; + + fn bigdecimal(val: &str) -> BigDecimal { + BigDecimal::from_str(val).expect("Failed converting str into BigDecimal") + } + + #[test] + fn token_quantums_to_usdc() { + let quantums = bigdecimal("20_000_000"); + let usdc = Usdc::from_quantums(quantums); + let expected = bigdecimal("20"); + assert_eq!(usdc.0, expected); + } + + #[test] + fn token_usdc_to_quantums() { + let usdc = bigdecimal("20"); + let quantums = Usdc::from(usdc).quantize(); + let expected = bigdecimal("20_000_000"); + assert_eq!(quantums, expected); + } + + #[test] + fn token_denom_parse() { + // Test if hardcoded denomination is parsed correctly + let _usdc = Token::Usdc(0.into()).denom(); + let _dydx = Token::Dydx(0.into()).denom(); + let _dydx_tnt = Token::DydxTnt(0.into()).denom(); + } +} diff --git a/v4-client-rs/client/src/indexer/types.rs b/v4-client-rs/client/src/indexer/types.rs new file mode 100644 index 00000000..956ab88a --- /dev/null +++ b/v4-client-rs/client/src/indexer/types.rs @@ -0,0 +1,1057 @@ +use crate::node::OrderMarketParams; +use anyhow::{anyhow as err, Error}; +use bigdecimal::BigDecimal; +use chrono::{DateTime, Utc}; +use cosmrs::{AccountId, Denom as CosmosDenom}; +use derive_more::{Add, Deref, DerefMut, Display, Div, From, Mul, Sub}; +use rand::{thread_rng, Rng}; +use serde::{Deserialize, Serialize}; +use serde_with::{serde_as, DisplayFromStr}; +use std::collections::HashMap; +use std::convert::TryFrom; +use std::{fmt, str::FromStr}; +use v4_proto_rs::dydxprotocol::subaccounts::SubaccountId as ProtoSubaccountId; + +// Shared types used by REST API, WS + +/// A trader's account. +#[derive(Deserialize, Debug, Clone, Eq, Hash, PartialOrd, Ord, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Account { + /// Address. + pub address: Address, +} + +/// [Address](https://dydx.exchange/crypto-learning/what-is-a-wallet-address). +#[derive( + Serialize, Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +pub struct Address(String); + +impl FromStr for Address { + type Err = Error; + fn from_str(value: &str) -> Result { + Ok(Self( + value.parse::().map_err(Error::msg)?.to_string(), + )) + } +} + +impl AsRef for Address { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl From
for String { + fn from(address: Address) -> Self { + address.0 + } +} + +/// Order status. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase", untagged)] +pub enum ApiOrderStatus { + /// Order status. + OrderStatus(OrderStatus), + /// Best effort. + BestEffort(BestEffortOpenedStatus), +} + +/// [Time-in-Force](https://docs.dydx.exchange/api_integration-trading/order_types#time-in-force). +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum ApiTimeInForce { + /// GTT represents Good-Til-Time, where an order will first match with existing orders on the book + /// and any remaining size will be added to the book as a maker order, which will expire at a + /// given expiry time. + Gtt, + /// FOK represents Fill-Or-KILl where it's enforced that an order will either be filled + /// completely and immediately by maker orders on the book or canceled if the entire amount can't + /// be filled. + Fok, + /// IOC represents Immediate-Or-Cancel, where it's enforced that an order only be matched with + /// maker orders on the book. If the order has remaining size after matching with existing orders + /// on the book, the remaining size is not placed on the book. + Ioc, +} + +/// Asset id. +#[derive( + Serialize, Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +pub struct AssetId(pub String); + +/// Best-effort opened status. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum BestEffortOpenedStatus { + /// Best-effort opened. + BestEffortOpened, +} + +/// Candle resolution. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum CandleResolution { + /// 1-minute. + #[serde(rename = "1MIN")] + M1, + /// 5-minutes. + #[serde(rename = "5MINS")] + M5, + /// 15-minutes. + #[serde(rename = "15MINS")] + M15, + /// 30-minutes. + #[serde(rename = "30MINS")] + M30, + /// 1-hour. + #[serde(rename = "1HOUR")] + H1, + /// 4-hours. + #[serde(rename = "4HOURS")] + H4, + /// 1-day. + #[serde(rename = "1DAY")] + D1, +} + +/// Representatio of an arbitrary ID. +#[derive(Clone, Debug)] +pub struct AnyId; + +/// Client ID defined by the user to identify orders. +/// +/// This value should be different for different orders. +/// To update a specific previously submitted order, the new [`Order`](v4_proto_rs::dydxprotocol::clob::Order) must have the same client ID, and the same [`OrderId`]. +/// See also: [How can I replace an order?](https://docs.dydx.exchange/introduction-onboarding_faqs). +#[serde_as] +#[derive(Deserialize, Debug, Clone, Display, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ClientId(#[serde_as(as = "DisplayFromStr")] pub u32); + +impl ClientId { + /// Creates a new `ClientId` from a provided `u32`. + pub fn new(id: u32) -> Self { + ClientId(id) + } + + /// Creates a random `ClientId` using the default rand::thread_rng. + pub fn random() -> Self { + ClientId(thread_rng().gen()) + } + + /// Creates a random `ClientId` using a user-provided RNG. + pub fn random_with_rng(rng: &mut R) -> Self { + ClientId(rng.gen()) + } +} + +impl From for ClientId { + fn from(value: u32) -> Self { + Self(value) + } +} + +impl From for ClientId { + fn from(_: AnyId) -> Self { + Self::random() + } +} + +/// Clob pair id. +#[serde_as] +#[derive(Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ClobPairId(#[serde_as(as = "DisplayFromStr")] pub u32); + +impl From for ClobPairId { + fn from(value: u32) -> Self { + Self(value) + } +} + +/// Client metadata. +#[serde_as] +#[derive(Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ClientMetadata(#[serde_as(as = "DisplayFromStr")] pub u32); + +impl From for ClientMetadata { + fn from(value: u32) -> Self { + Self(value) + } +} + +/// Fill id. +#[derive(Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct FillId(pub String); + +/// Fill type. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum FillType { + /// LIMIT is the fill type for a fill with a limit taker order. + Limit, + /// LIQUIDATED is for the taker side of the fill where the subaccount was liquidated. + /// + /// The subaccountId associated with this fill is the liquidated subaccount. + Liquidated, + /// LIQUIDATION is for the maker side of the fill, never used for orders. + Liquidation, + /// DELEVERAGED is for the subaccount that was deleveraged in a deleveraging event. + /// + /// The fill type will be set to taker. + Deleveraged, + /// OFFSETTING is for the offsetting subaccount in a deleveraging event. + /// + /// The fill type will be set to maker. + Offsetting, +} + +/// Block height. +#[serde_as] +#[derive( + Serialize, Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +pub struct Height(#[serde_as(as = "DisplayFromStr")] pub u32); + +impl Height { + /// Get the block which is n blocks ahead. + pub fn ahead(&self, n: u32) -> Height { + Height(self.0 + n) + } +} + +/// Liquidity position. +/// +/// See also [Market Makers vs Market Takers](https://dydx.exchange/crypto-learning/market-makers-vs-market-takers). +#[derive(Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum Liquidity { + /// [Taker](https://dydx.exchange/crypto-learning/glossary?#taker). + Taker, + /// [Maker](https://dydx.exchange/crypto-learning/glossary?#maker). + Maker, +} + +/// Perpetual market status +#[derive(Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum PerpetualMarketStatus { + /// Active. + Active, + /// Paused. + Paused, + /// Cancel-only. + CancelOnly, + /// Post-only. + PostOnly, + /// Initializing. + Initializing, + /// Final settlement. + FinalSettlement, +} + +/// Perpetual position status. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum PerpetualPositionStatus { + /// Open. + Open, + /// Closed. + Closed, + /// Liquidated. + Liquidated, +} + +/// Position. +/// +/// See also [How to Short Crypto: A Beginner’s Guide](https://dydx.exchange/crypto-learning/how-to-short-crypto). +#[derive(Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum PositionSide { + /// Long. + Long, + /// Short. + Short, +} + +/// Market type. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum MarketType { + /// [Perpetuals](https://dydx.exchange/crypto-learning/perpetuals-crypto). + Perpetual, + /// [Spot](https://dydx.exchange/crypto-learning/what-is-spot-trading). + Spot, +} + +/// Perpetual market type. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum PerpetualMarketType { + /// Cross. + Cross, + /// [Isolated](https://docs.dydx.exchange/api_integration-trading/isolated_markets). + Isolated, +} + +/// Order id. +#[derive(Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct OrderId(pub String); + +/// Order status. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum OrderStatus { + /// Opened. + Open, + /// Filled. + Filled, + /// Canceled. + Canceled, + /// Short term cancellations are handled best-effort, meaning they are only gossiped. + BestEffortCanceled, + /// Untriggered. + Untriggered, +} + +/// When the order enters the execution phase +/// +/// See also [Time in force](https://docs.dydx.exchange/api_integration-indexer/indexer_api#apitimeinforce). +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum OrderExecution { + /// Leaving order execution as unspecified/empty represents the default behavior + /// where an order will first match with existing orders on the book, and any remaining size + /// will be added to the book as a maker order. + Default, + /// IOC represents Immediate-Or-Cancel, where it's enforced that an order only be matched with + /// maker orders on the book. If the order has remaining size after matching with existing orders + /// on the book, the remaining size is not placed on the book. + Ioc, + /// FOK represents Fill-Or-KILl where it's enforced that an order will either be filled + /// completely and immediately by maker orders on the book or canceled if the entire amount can't + /// be filled. + Fok, + /// Post only enforces that an order only be placed on the book as a maker order. + /// Note this means that validators will cancel any newly-placed post only orders that would cross with other maker orders. + PostOnly, +} + +/// Order flags. +#[derive(Clone, Debug, Deserialize)] +pub enum OrderFlags { + /// Short-term order. + #[serde(rename = "0")] + ShortTerm = 0, + /// Conditional order. + #[serde(rename = "32")] + Conditional = 32, + /// Long-term (stateful) order. + #[serde(rename = "64")] + LongTerm = 64, +} + +// TODO: Consider using 12-bytes array, and deserialize from hex +/// Trade id. +#[derive( + Serialize, Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +pub struct TradeId(pub String); + +/// Order side. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum OrderSide { + /// Buy. + Buy, + /// Sell. + Sell, +} + +/// Order types. +/// +/// See also [OrderType](https://docs.dydx.exchange/api_integration-indexer/indexer_api#ordertype). +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum OrderType { + /// Limit. + Limit, + /// Market. + Market, + /// Stop-limit. + StopLimit, + /// Stop-market. + StopMarket, + /// Trailing-stop. + TrailingStop, + /// Take-profit. + TakeProfit, + /// Take-profit-market. + TakeProfitMarket, + /// Hard-trade. + HardTrade, + /// Failed-hard-trade. + FailedHardTrade, + /// Transfer-placeholder. + TransferPlaceholder, +} + +/// Subaccount. +#[derive(Deserialize, Debug, Clone, Eq, Hash, PartialOrd, Ord, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Subaccount { + /// Address. + pub address: Address, + /// Subaccount number. + pub number: SubaccountNumber, +} + +impl Subaccount { + /// Create a new Subaccount. + pub fn new(address: Address, number: SubaccountNumber) -> Self { + Self { address, number } + } + + /// Get the parent of this Subaccount. + pub fn parent(&self) -> ParentSubaccount { + let number = ParentSubaccountNumber(self.number.0 % 128); + ParentSubaccount::new(self.address.clone(), number) + } + + /// Check if this Subaccount is a parent? + pub fn is_parent(&self) -> bool { + self.number.0 < 128 + } +} + +impl From for ProtoSubaccountId { + fn from(subacc: Subaccount) -> Self { + ProtoSubaccountId { + owner: subacc.address.0, + number: subacc.number.0, + } + } +} + +/// Subaccount number. +#[derive(Serialize, Deserialize, Debug, Clone, Display, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct SubaccountNumber(pub(crate) u32); + +impl SubaccountNumber { + /// Get the subaccount number value. + pub fn value(&self) -> u32 { + self.0 + } +} + +impl TryFrom for SubaccountNumber { + type Error = Error; + fn try_from(number: u32) -> Result { + match number { + 0..=128_000 => Ok(SubaccountNumber(number)), + _ => Err(err!("Subaccount number must be [0, 128_000]")), + } + } +} + +impl From for SubaccountNumber { + fn from(parent: ParentSubaccountNumber) -> Self { + Self(parent.value()) + } +} + +/// Parent subaccount. +/// +/// A parent subaccount can have multiple positions opened and all posititions are cross-margined. +/// See also [how isolated positions are handled in dYdX](https://docs.dydx.exchange/api_integration-guides/how_to_isolated#mapping-of-isolated-positions-to-subaccounts). +#[derive(Deserialize, Debug, Clone, Eq, Hash, PartialOrd, Ord, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct ParentSubaccount { + /// Address. + pub address: Address, + /// Parent subaccount number. + pub number: ParentSubaccountNumber, +} + +impl ParentSubaccount { + /// Create a new Subaccount. + pub fn new(address: Address, number: ParentSubaccountNumber) -> Self { + Self { address, number } + } +} + +impl std::cmp::PartialEq for ParentSubaccount { + fn eq(&self, other: &Subaccount) -> bool { + self.address == other.address && self.number == other.number + } +} + +/// Subaccount number. +#[derive(Serialize, Deserialize, Debug, Clone, Display, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ParentSubaccountNumber(u32); + +impl ParentSubaccountNumber { + /// Get parent subaccount number value. + pub fn value(&self) -> u32 { + self.0 + } +} + +impl TryFrom for ParentSubaccountNumber { + type Error = Error; + fn try_from(number: u32) -> Result { + match number { + 0..=127 => Ok(ParentSubaccountNumber(number)), + _ => Err(err!("Parent subaccount number must be [0, 127]")), + } + } +} + +impl std::cmp::PartialEq for ParentSubaccountNumber { + fn eq(&self, other: &SubaccountNumber) -> bool { + self.0 == other.value() + } +} + +/// Subaccount id. +#[derive(Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct SubaccountId(pub String); + +/// Token symbol. +#[derive( + Serialize, Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +pub struct Symbol(pub String); + +/// Trade type. +#[derive(Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum TradeType { + /// LIMIT is the trade type for a fill with a limit taker order. + Limit, + /// LIQUIDATED is the trade type for a fill with a liquidated taker order. + Liquidated, + /// DELEVERAGED is the trade type for a fill with a deleveraged taker order. + Deleveraged, +} + +/// Transfer type. +#[derive(Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum TransferType { + /// Transfer-in. + TransferIn, + /// Transfer-out. + TransferOut, + /// Deposit. + Deposit, + /// Withdrawal. + Withdrawal, +} + +/// Ticker. +#[derive( + Serialize, Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +pub struct Ticker(pub String); + +impl<'a> From<&'a str> for Ticker { + fn from(value: &'a str) -> Self { + Self(value.into()) + } +} + +const USDC_DENOM: &str = "ibc/8E27BA2D5493AF5636760E354E46004562C46AB7EC0CC4C1CA14E9E20E2545B5"; +const DYDX_DENOM: &str = "adydx"; +const DYDX_TNT_DENOM: &str = "adv4tnt"; +#[cfg(feature = "noble")] +const NOBLE_USDC_DENOM: &str = "uusdc"; + +/// Denom. +/// +/// A more convenient type for Cosmos' [`Denom`](CosmosDenom). +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub enum Denom { + /// USDC IBC token. + #[serde(rename = "ibc/8E27BA2D5493AF5636760E354E46004562C46AB7EC0CC4C1CA14E9E20E2545B5")] + Usdc, + /// dYdX native mainnet token. + #[serde(rename = "adydx")] + Dydx, + /// dYdX native testnet token. + #[serde(rename = "adv4tnt")] + DydxTnt, + /// Noble USDC token. + #[cfg(feature = "noble")] + #[serde(rename = "uusdc")] + NobleUsdc, + /// Custom denom representation. + #[serde(untagged)] + Custom(CosmosDenom), +} + +impl Denom { + /// Gas price per atomic unit. + /// This price is only available for `Denom`s which can be used to cover transactions gas fees. + pub fn gas_price(&self) -> Option { + match self { + // Defined dYdX micro USDC per Gas unit. + // As defined in [1](https://docs.dydx.exchange/infrastructure_providers-validators/required_node_configs#base-configuration) and [2](https://github.com/dydxprotocol/v4-chain/blob/ba731b00e3163f7c3ff553b4300d564c11eaa81f/protocol/cmd/dydxprotocold/cmd/config.go#L15). + Self::Usdc => Some(BigDecimal::new(25.into(), 3)), + // Defined dYdX native tokens per Gas unit. Recommended to be roughly the same in value as 0.025 micro USDC. + // As defined in [1](https://github.com/dydxprotocol/v4-chain/blob/ba731b00e3163f7c3ff553b4300d564c11eaa81f/protocol/cmd/dydxprotocold/cmd/config.go#L21). + Self::Dydx | Self::DydxTnt => Some(BigDecimal::new(25_000_000_000u64.into(), 0)), + #[cfg(feature = "noble")] + Self::NobleUsdc => Some(BigDecimal::new(1.into(), 1)), + _ => None, + } + } +} + +impl FromStr for Denom { + type Err = Error; + fn from_str(value: &str) -> Result { + match value { + USDC_DENOM => Ok(Self::Usdc), + DYDX_DENOM => Ok(Self::Dydx), + DYDX_TNT_DENOM => Ok(Self::DydxTnt), + _ => Ok(Self::Custom( + value.parse::().map_err(Error::msg)?, + )), + } + } +} + +impl AsRef for Denom { + fn as_ref(&self) -> &str { + match self { + Self::Usdc => USDC_DENOM, + Self::Dydx => DYDX_DENOM, + Self::DydxTnt => DYDX_TNT_DENOM, + #[cfg(feature = "noble")] + Self::NobleUsdc => NOBLE_USDC_DENOM, + Self::Custom(denom) => denom.as_ref(), + } + } +} + +impl fmt::Display for Denom { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_ref()) + } +} + +impl TryFrom for CosmosDenom { + type Error = Error; + fn try_from(value: Denom) -> Result { + value.as_ref().parse().map_err(Self::Error::msg) + } +} + +/// Parent subaccount response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct ParentSubaccountResponseObject { + /// Address. + pub address: Address, + /// Subaccount number. + pub parent_subaccount_number: SubaccountNumber, + /// Equity. + pub equity: BigDecimal, + /// Free collateral. + pub free_collateral: BigDecimal, + /// Is margin enabled? + pub margin_enabled: Option, + /// Associated child subaccounts. + pub child_subaccounts: Vec, +} + +/// Subaccount response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct SubaccountResponseObject { + /// Address. + pub address: Address, + /// Subaccount number. + pub subaccount_number: SubaccountNumber, + /// Equity. + pub equity: BigDecimal, + /// Free collateral. + pub free_collateral: BigDecimal, + /// Is margin enabled? + pub margin_enabled: Option, + /// Asset positions. + pub asset_positions: AssetPositionsMap, + /// Opened perpetual positions. + pub open_perpetual_positions: PerpetualPositionsMap, +} + +/// Asset position response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct AssetPositionResponseObject { + /// Token symbol. + pub symbol: Symbol, + /// Position. + pub side: PositionSide, + /// Size. + pub size: Quantity, + /// Subaccount number. + pub subaccount_number: SubaccountNumber, + /// Asset id. + pub asset_id: AssetId, +} + +/// Perpetual position response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct PerpetualPositionResponseObject { + /// Market ticker. + pub market: Ticker, + /// Position status. + pub status: PerpetualPositionStatus, + /// Position. + pub side: PositionSide, + /// Size. + pub size: Quantity, + /// Maximum size. + pub max_size: Quantity, + /// Entry price. + pub entry_price: Price, + /// Actual PnL. + pub realized_pnl: BigDecimal, + /// Time(UTC). + pub created_at: DateTime, + /// Block height. + pub created_at_height: Height, + /// Sum at open. + pub sum_open: BigDecimal, + /// Sum at close. + pub sum_close: BigDecimal, + /// Net funding. + pub net_funding: BigDecimal, + /// Potential PnL. + pub unrealized_pnl: BigDecimal, + /// Time(UTC). + pub closed_at: Option>, + /// Exit price. + pub exit_price: Option, + /// Subaccount number. + pub subaccount_number: SubaccountNumber, +} + +/// Asset positions. +pub type AssetPositionsMap = HashMap; + +/// Perpetual positions. +pub type PerpetualPositionsMap = HashMap; + +/// Price. +#[derive( + Add, + Deserialize, + Debug, + Clone, + Div, + Display, + Deref, + DerefMut, + PartialEq, + Eq, + Mul, + PartialOrd, + Ord, + Hash, + Sub, +)] +#[serde(transparent)] +pub struct Price(pub BigDecimal); + +impl From for Price +where + T: Into, +{ + fn from(value: T) -> Self { + Self(value.into()) + } +} + +impl FromStr for Price { + type Err = bigdecimal::ParseBigDecimalError; + fn from_str(s: &str) -> Result { + s.parse().map(Self) + } +} + +/// Quantity. +#[derive( + Add, + Deserialize, + Debug, + Clone, + Div, + Display, + Deref, + DerefMut, + PartialEq, + Eq, + Mul, + PartialOrd, + Ord, + Hash, + Sub, +)] +#[serde(transparent)] +pub struct Quantity(pub BigDecimal); + +impl From for Quantity +where + T: Into, +{ + fn from(value: T) -> Self { + Self(value.into()) + } +} + +impl FromStr for Quantity { + type Err = bigdecimal::ParseBigDecimalError; + fn from_str(s: &str) -> Result { + s.parse().map(Self) + } +} + +/// Orderbook price level. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct OrderbookResponsePriceLevel { + /// Price. + pub price: Price, + /// Size. + pub size: Quantity, +} + +/// Orderbook response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct OrderBookResponseObject { + /// Bids. + pub bids: Vec, + /// Asks. + pub asks: Vec, +} + +/// Order response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct OrderResponseObject { + /// Client id. + pub client_id: ClientId, + /// Client metadata. + pub client_metadata: ClientMetadata, + /// Clob pair id. + pub clob_pair_id: ClobPairId, + /// Block height. + pub created_at_height: Option, + /// Block height. + pub good_til_block: Option, + /// Time(UTC). + pub good_til_block_time: Option>, + /// Id. + pub id: OrderId, + /// Order flags. + pub order_flags: OrderFlags, + /// Post-only. + pub post_only: bool, + /// Price. + pub price: Price, + /// Reduce-only. + pub reduce_only: bool, + /// Side (buy/sell). + pub side: OrderSide, + /// Size. + pub size: Quantity, + /// Order status. + pub status: ApiOrderStatus, + /// Subaccount id. + pub subaccount_id: SubaccountId, + /// Subaccount number. + pub subaccount_number: SubaccountNumber, + /// Market ticker. + pub ticker: Ticker, + /// Time-in-force. + pub time_in_force: ApiTimeInForce, + /// Total filled. + pub total_filled: BigDecimal, + /// Order type. + #[serde(rename = "type")] + pub order_type: OrderType, + /// Time(UTC). + pub updated_at: Option>, + /// Block height. + pub updated_at_height: Option, +} + +/// Trade response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TradeResponse { + /// Trades. + pub trades: Vec, +} + +/// Trade. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TradeResponseObject { + /// Trade id. + pub id: TradeId, + /// Block height. + pub created_at_height: Height, + /// Time(UTC). + pub created_at: DateTime, + /// Side (buy/sell). + pub side: OrderSide, + /// Price. + pub price: Price, + /// Size. + pub size: Quantity, + /// Trade type. + #[serde(rename = "type")] + pub trade_type: TradeType, +} + +/// Perpetual markets. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct PerpetualMarketResponse { + /// Perpetual markets. + pub markets: HashMap, +} + +/// Perpetual market. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct PerpetualMarket { + /// Atomic resolution + pub atomic_resolution: i32, + /// Base open interest. + pub base_open_interest: BigDecimal, + /// Clob pair id. + pub clob_pair_id: ClobPairId, + /// Initial margin fraction. + pub initial_margin_fraction: BigDecimal, + /// Maintenance margin fraction. + pub maintenance_margin_fraction: BigDecimal, + /// Market type. + pub market_type: PerpetualMarketType, + /// Next funding rate. + pub next_funding_rate: BigDecimal, + /// Open interest. + pub open_interest: BigDecimal, + /// Open interest lower capitalization. + pub open_interest_lower_cap: Option, + /// Open interest upper capitalization. + pub open_interest_upper_cap: Option, + /// Oracle price. + pub oracle_price: Option, + /// 24-h price change. + #[serde(rename = "priceChange24H")] + pub price_change_24h: BigDecimal, + /// Quantum conversion exponent. + pub quantum_conversion_exponent: i32, + /// Market status + pub status: PerpetualMarketStatus, + /// Step base quantums. + pub step_base_quantums: u64, + /// Step size. + pub step_size: BigDecimal, + /// Subticks per tick. + pub subticks_per_tick: u32, + /// Tick size. + pub tick_size: BigDecimal, + /// Market ticker. + pub ticker: Ticker, + /// 24-h number of trades. + #[serde(rename = "trades24H")] + pub trades_24h: u64, + /// 24-h volume. + #[serde(rename = "volume24H")] + pub volume_24h: Quantity, +} + +impl PerpetualMarket { + /// Creates a [`OrderMarketParams`], capable of performing price and size quantizations and other + /// operations based on market data. + /// These quantizations are required for `Order` placement. + pub fn order_params(&self) -> OrderMarketParams { + OrderMarketParams { + atomic_resolution: self.atomic_resolution, + clob_pair_id: self.clob_pair_id.clone(), + oracle_price: self.oracle_price.clone(), + quantum_conversion_exponent: self.quantum_conversion_exponent, + step_base_quantums: self.step_base_quantums, + subticks_per_tick: self.subticks_per_tick, + } + } +} + +/// Candle response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct CandleResponse { + /// List of candles. + pub candles: Vec, +} + +/// Candle response. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct CandleResponseObject { + /// Market ticker. + pub ticker: Ticker, + /// Number of trades. + pub trades: u64, + /// Time(UTC). + pub started_at: DateTime, + /// Base token volume. + pub base_token_volume: Quantity, + /// Token price at open. + pub open: Price, + /// Low price volume. + pub low: Price, + /// High price volume. + pub high: Price, + /// Token price at close. + pub close: Price, + /// Candle resolution. + pub resolution: CandleResolution, + /// USD volume. + pub usd_volume: Quantity, + /// Starting open interest. + pub starting_open_interest: BigDecimal, +} + +/// Block height parsed by Indexer. +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct HeightResponse { + /// Block height. + pub height: Height, + /// Time (UTC). + pub time: DateTime, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn denom_parse() { + // Test if hardcoded denom is parsed correctly + let _usdc = Denom::Usdc.to_string().parse::().unwrap(); + let _dydx = Denom::Dydx.to_string().parse::().unwrap(); + let _dydx_tnt = Denom::DydxTnt.to_string().parse::().unwrap(); + let _custom: Denom = "uusdc".parse().unwrap(); + } +} diff --git a/v4-client-rs/client/src/lib.rs b/v4-client-rs/client/src/lib.rs new file mode 100644 index 00000000..f7648f92 --- /dev/null +++ b/v4-client-rs/client/src/lib.rs @@ -0,0 +1,49 @@ +//! [dYdX](https://dydx.trade/) v4 asynchronous client. +//! +//! [dYdX v4 architecture](https://docs.dydx.exchange/concepts-architecture/architectural_overview) +//! introduces nodes and the indexer for read-write and read-only operations accordingly. +//! Multiple entrypoints to the system are reflected in the client interface to allow for a highly customized use. +//! +//! The client allows: +//! * manage orders and funds (via [`NodeClient`](crate::node::NodeClient)) +//! * query the dYdX network (via [`NodeClient`](crate::node::NodeClient) and [`IndexerClient`](crate::indexer::IndexerClient)) +//! * get testnet token funds (via [`FaucetClient`](crate::faucet::FaucetClient), gated by feature `faucet`, turned on by default) +//! * transfer funds between Noble and dYdX (via [`NobleClient`](crate::noble::NobleClient), gated by feature `noble`, turned on by default) +//! +//! ### Telemetry +//! +//! The feature `telemetry` is turned on by default and provides [`metrics`](https://github.com/metrics-rs/metrics/tree/main/metrics) collection in a vendor-agnostic way. +//! This allows to use any compatible [metrics exporter](https://github.com/metrics-rs/metrics?tab=readme-ov-file#project-layout). +//! To see what metrics are collected check the public constants of the module [`telemetry`]. +//! Many provided examples (see below) use [`metrics-observer`](https://github.com/metrics-rs/metrics/tree/main/metrics-observer) as an example metrics exporter +//! allowing to track them in a separate terminal. +//! +//! ### Examples +//! +//! Explore many elaborated examples in [the +//! repository](https://github.com/dydxprotocol/v4-clients/tree/main/v4-client-rs/client/examples). +//! Note - to run examples you need [`protoc`](https://grpc.io/docs/protoc-installation/) as `metrics-exporter-tcp` uses it during the build. + +#![deny(missing_docs)] + +/// Client configuration. +pub mod config; +/// Testnet tokens. +/// +/// Note that faucet is available under the compilation feature `faucet` (turned on by default). +#[cfg(feature = "faucet")] +pub mod faucet; +/// Indexer client. +pub mod indexer; +/// Noble client. +/// +/// Note that Noble client is available under the compilation feature `noble` (turned on by default). +#[cfg(feature = "noble")] +pub mod noble; +/// Node client. +pub mod node; +/// Telemetry. +/// +/// Note that telemetry is available under the compilation feature `telemetry` (turned on by default). +#[cfg(feature = "telemetry")] +pub mod telemetry; diff --git a/v4-client-rs/client/src/noble/config.rs b/v4-client-rs/client/src/noble/config.rs new file mode 100644 index 00000000..16053481 --- /dev/null +++ b/v4-client-rs/client/src/noble/config.rs @@ -0,0 +1,31 @@ +use crate::indexer::Denom; +use cosmrs::tendermint::chain::Id as ChainId; +use serde::Deserialize; + +/// Configuration for [`NobleClient`](crate::noble::NobleClient) +#[derive(Debug, Deserialize)] +pub struct NobleConfig { + /// Node endpoint. + pub endpoint: String, + /// Timeout in milliseconds + #[serde(default = "default_timeout")] + pub timeout: u64, + /// [`ChainId`] to specify the chain. + pub chain_id: ChainId, + /// Fee [`Denom`]. + pub fee_denom: Denom, + /// The sequence is a value that represents the number of transactions sent from an account. + /// Either the client manages it automatically via quering the network for the next + /// sequence number or it is a responsibility of a user. + /// It is a [replay prevention](https://docs.cosmos.network/v0.47/learn/beginner/tx-lifecycle). + #[serde(default = "default_manage_sequencing")] + pub manage_sequencing: bool, +} + +fn default_timeout() -> u64 { + 1_000 +} + +fn default_manage_sequencing() -> bool { + true +} diff --git a/v4-client-rs/client/src/noble/mod.rs b/v4-client-rs/client/src/noble/mod.rs new file mode 100644 index 00000000..3c0f162d --- /dev/null +++ b/v4-client-rs/client/src/noble/mod.rs @@ -0,0 +1,242 @@ +mod config; +mod tokens; +use crate::{ + indexer::{Address, Denom, Tokenized}, + node::{Account, TxBuilder, TxHash}, +}; +use anyhow::{anyhow as err, Error}; +use chrono::{TimeDelta, Utc}; +pub use config::NobleConfig; +use cosmrs::tx::{self, Tx}; +use ibc_proto::{ + cosmos::base::v1beta1::Coin as IbcProtoCoin, ibc::applications::transfer::v1::MsgTransfer, +}; +pub use tokens::NobleUsdc; +use tokio::time::Duration; +use tonic::transport::{Channel, ClientTlsConfig}; +use tower::timeout::Timeout; +use v4_proto_rs::{ + cosmos_sdk_proto::cosmos::{ + auth::v1beta1::{ + query_client::QueryClient as AuthClient, BaseAccount, QueryAccountRequest, + }, + bank::v1beta1::{ + query_client::QueryClient as BankClient, QueryAllBalancesRequest, QueryBalanceRequest, + }, + base::{abci::v1beta1::GasInfo, v1beta1::Coin}, + tx::v1beta1::{ + service_client::ServiceClient as TxClient, BroadcastMode, BroadcastTxRequest, + SimulateRequest, + }, + }, + ToAny, +}; + +/// Wrapper over standard [Cosmos modules](https://github.com/cosmos/cosmos-sdk/tree/main/x) clients. +pub struct Routes { + /// Authentication of accounts and transactions for Cosmos SDK applications. + pub auth: AuthClient>, + /// Token transfer functionalities. + pub bank: BankClient>, + /// Tx utilities for the Cosmos SDK. + pub tx: TxClient>, +} + +impl Routes { + /// Creates new modules clients wrapper. + pub fn new(channel: Timeout) -> Self { + Self { + auth: AuthClient::new(channel.clone()), + bank: BankClient::new(channel.clone()), + tx: TxClient::new(channel), + } + } +} + +/// Noble client. +pub struct NobleClient { + builder: TxBuilder, + #[allow(dead_code)] + config: NobleConfig, + routes: Routes, +} + +impl NobleClient { + /// Connect to the node. + pub async fn connect(config: NobleConfig) -> Result { + let tls = ClientTlsConfig::new(); + let endpoint = config.endpoint.clone(); + let channel = Channel::from_shared(endpoint)? + .tls_config(tls)? + .connect() + .await?; + let timeout = Duration::from_millis(config.timeout); + let timeout_channel = Timeout::new(channel, timeout); + let builder = TxBuilder::new(config.chain_id.clone(), config.fee_denom.clone()); + + Ok(Self { + builder, + config, + routes: Routes::new(timeout_channel), + }) + } + + /// Query all balances of an account/address. + pub async fn get_account_balances(&mut self, address: Address) -> Result, Error> { + let req = QueryAllBalancesRequest { + address: address.to_string(), + pagination: None, + }; + let balances = self + .routes + .bank + .all_balances(req) + .await? + .into_inner() + .balances; + Ok(balances) + } + + /// Query token balance of an account/address. + pub async fn get_account_balance( + &mut self, + address: Address, + denom: &Denom, + ) -> Result { + let req = QueryBalanceRequest { + address: address.into(), + denom: denom.to_string(), + }; + let balance = self + .routes + .bank + .balance(req) + .await? + .into_inner() + .balance + .ok_or_else(|| err!("Balance query response does not contain balance"))?; + Ok(balance) + } + + /// Query for [an account](https://github.com/cosmos/cosmos-sdk/tree/main/x/auth#account-1) + /// by it's address. + pub async fn get_account(&mut self, address: &Address) -> Result { + let req = QueryAccountRequest { + address: address.to_string(), + }; + let resp = self + .routes + .auth + .account(req) + .await? + .into_inner() + .account + .ok_or_else(|| err!("Query account request failure, account should exist."))? + .to_msg()?; + Ok(resp) + } + + async fn simulate(&mut self, tx_raw: &tx::Raw) -> Result { + let request = SimulateRequest { + tx_bytes: tx_raw + .to_bytes() + .map_err(|e| err!("Raw Tx to bytes failed: {}", e))?, + ..Default::default() + }; + + let gas = self + .routes + .tx + .simulate(request) + .await? + .into_inner() + .gas_info + .ok_or_else(|| err!("Tx simulation request failed, gas info should exist."))?; + Ok(gas) + } + + /// Fetch account's number and sequence number from the network. + pub async fn query_address(&mut self, address: &Address) -> Result<(u64, u64), Error> { + self.get_account(address) + .await + .map(|res| (res.account_number, res.sequence)) + } + + /// Transfer a token asset between Cosmos blockchain networks. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/noble_transfer.rs). + pub async fn send_token_ibc( + &mut self, + account: &mut Account, + sender: Address, + recipient: Address, + token: impl Tokenized, + source_channel: String, + ) -> Result { + let coin = token.coin()?; + let timeout = (Utc::now() + TimeDelta::seconds(60)) + .timestamp_nanos_opt() + .ok_or_else(|| err!("Failed calculating timeout ns timestamp"))? + .try_into()?; + + let msg = MsgTransfer { + receiver: recipient.to_string(), + sender: sender.to_string(), + source_port: "transfer".to_string(), + source_channel, + timeout_timestamp: timeout, + token: Some(IbcProtoCoin { + amount: coin.amount, + denom: coin.denom, + }), + timeout_height: None, + memo: Default::default(), + }; + + if self.config.manage_sequencing { + let (_, sequence_number) = self.query_address(account.address()).await?; + account.set_sequence_number(sequence_number); + } + + let tx_raw = + self.builder + .build_transaction(account, std::iter::once(msg.to_any()), None)?; + + let simulated = self.simulate(&tx_raw).await?; + let gas = simulated.gas_used; + let fee = self.builder.calculate_fee(Some(gas))?; + + let tx_bytes = tx_raw + .to_bytes() + .map_err(|e| err!("Raw Tx to bytes failed: {e}"))?; + let tx = Tx::from_bytes(&tx_bytes).map_err(|e| err!("Failed to decode Tx bytes: {e}"))?; + self.builder + .build_transaction(account, tx.body.messages, Some(fee))?; + + let request = BroadcastTxRequest { + tx_bytes: tx_raw + .to_bytes() + .map_err(|e| err!("Raw Tx to bytes failed: {}", e))?, + mode: BroadcastMode::Sync.into(), + }; + + let response = self + .routes + .tx + .broadcast_tx(request) + .await? + .into_inner() + .tx_response + .ok_or_else(|| err!("Tx not present in broadcast response"))?; + + if response.code == 0 { + Ok(response.txhash) + } else { + Err(err!( + "Tx broadcast failed with error {}: {}", + response.code, + response.raw_log, + )) + } + } +} diff --git a/v4-client-rs/client/src/noble/tokens.rs b/v4-client-rs/client/src/noble/tokens.rs new file mode 100644 index 00000000..966784df --- /dev/null +++ b/v4-client-rs/client/src/noble/tokens.rs @@ -0,0 +1,43 @@ +use crate::indexer::{Denom, Tokenized}; +use anyhow::{anyhow as err, Error}; +use bigdecimal::{num_traits::ToPrimitive, BigDecimal}; +use v4_proto_rs::cosmos_sdk_proto::cosmos::base::v1beta1::Coin as ProtoCoin; + +/// USDC Noble token. +pub struct NobleUsdc(pub BigDecimal); + +impl From for NobleUsdc +where + T: Into, +{ + fn from(value: T) -> Self { + NobleUsdc(value.into()) + } +} + +impl Tokenized for NobleUsdc { + fn denom(&self) -> Denom { + Denom::NobleUsdc + } + + fn coin(&self) -> Result { + Ok(ProtoCoin { + amount: self + .0 + .to_u128() + .ok_or_else(|| err!("Failed converting Noble USDC value into amount"))? + .to_string(), + denom: self.denom().to_string(), + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn noble_token_parse() { + let _usdc = NobleUsdc(0.into()).denom(); + } +} diff --git a/v4-client-rs/client/src/node/builder.rs b/v4-client-rs/client/src/node/builder.rs new file mode 100644 index 00000000..a799d7e9 --- /dev/null +++ b/v4-client-rs/client/src/node/builder.rs @@ -0,0 +1,68 @@ +use super::sequencer::Nonce; +use super::{fee, Account}; +use crate::indexer::Denom; +use anyhow::{anyhow as err, Error, Result}; +pub use cosmrs::tendermint::chain::Id; +use cosmrs::{ + tx::{self, Fee, SignDoc, SignerInfo}, + Any, +}; + +/// Transaction builder. +pub struct TxBuilder { + chain_id: Id, + fee_denom: Denom, +} + +impl TxBuilder { + /// Create a new transaction builder. + pub fn new(chain_id: Id, fee_denom: Denom) -> Self { + Self { + chain_id, + fee_denom, + } + } + + /// Estimate a transaction fee. + /// + /// See also [What Are Crypto Gas Fees?](https://dydx.exchange/crypto-learning/what-are-crypto-gas-fees). + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/withdraw_other.rs). + pub fn calculate_fee(&self, gas_used: Option) -> Result { + if let Some(gas) = gas_used { + fee::calculate(gas, &self.fee_denom) + } else { + Ok(fee::default()) + } + } + + /// Build a transaction for given messages. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/withdraw_other.rs). + pub fn build_transaction( + &self, + account: &Account, + msgs: impl IntoIterator, + fee: Option, + ) -> Result { + let tx_body = tx::BodyBuilder::new().msgs(msgs).memo("").finish(); + + let fee = fee.unwrap_or(self.calculate_fee(None)?); + + let nonce = match account.next_nonce() { + Some(Nonce::Sequence(number) | Nonce::Timestamp(number)) => *number, + None => return Err(err!("Account's next nonce not set")), + }; + let auth_info = SignerInfo::single_direct(Some(account.public_key()), nonce).auth_info(fee); + + let sign_doc = SignDoc::new( + &tx_body, + &auth_info, + &self.chain_id, + account.account_number(), + ) + .map_err(|e| err!("cannot create sign doc: {e}"))?; + + account.sign(sign_doc) + } +} diff --git a/v4-client-rs/client/src/node/client/error.rs b/v4-client-rs/client/src/node/client/error.rs new file mode 100644 index 00000000..a47fb93c --- /dev/null +++ b/v4-client-rs/client/src/node/client/error.rs @@ -0,0 +1,57 @@ +use thiserror::Error; +use tonic::Status; + +/// Node error. +#[derive(Error, Debug)] +pub enum NodeError { + /// General error. + #[error("General error: {0}")] + General(#[from] anyhow::Error), + /// Broadcast error. + #[error("Broadcast error: {0}")] + Broadcast(#[from] BroadcastError), +} + +/// Broadcast error. +#[derive(Error, Debug)] +#[error("Broadcast error {code:?} with log: {message}")] +pub struct BroadcastError { + /// Code. + /// + /// [Codes](https://github.com/dydxprotocol/v4-chain/blob/main/protocol/x/clob/types/errors.go). + pub code: Option, + /// Message. + pub message: String, +} + +impl From for BroadcastError { + fn from(error: Status) -> Self { + BroadcastError { + code: None, + message: error.message().to_string(), + } + } +} + +impl BroadcastError { + pub(crate) fn get_collateral_reason(&self) -> Option<&str> { + match self { + // A code is sent in BroadcastTxResponse + BroadcastError { + code: Some(3007), .. + } => { + Some("Broadcast error 3007 received (under collaterization), ignoring") + } + // Tonic::Status is unknown with a message string with the error + BroadcastError { + code: None, + message, + } if message.contains("StillUndercollateralized") + || message.contains("NewlyUndercollateralized") => + { + Some("Broadcast error 'StillUndercollateralized' / 'NewlyUndercollateralized', ignoring") + } + _ => None, + } + } +} diff --git a/v4-client-rs/client/src/node/client/methods.rs b/v4-client-rs/client/src/node/client/methods.rs new file mode 100644 index 00000000..c7fb3085 --- /dev/null +++ b/v4-client-rs/client/src/node/client/methods.rs @@ -0,0 +1,423 @@ +use super::{Address, NodeClient}; +use crate::indexer::{Denom, Height, Subaccount}; +use anyhow::{anyhow as err, Error}; +use v4_proto_rs::{ + cosmos::base::query::v1beta1::PageRequest as V4PageRequest, + cosmos_sdk_proto::cosmos::{ + auth::v1beta1::{BaseAccount, QueryAccountRequest}, + bank::v1beta1::{QueryAllBalancesRequest, QueryBalanceRequest}, + base::{ + query::v1beta1::PageRequest as CosmosPageRequest, + tendermint::v1beta1::{ + Block, GetLatestBlockRequest, GetNodeInfoRequest, GetNodeInfoResponse, + }, + v1beta1::Coin, + }, + staking::v1beta1::{ + DelegationResponse, QueryDelegatorDelegationsRequest, + QueryDelegatorUnbondingDelegationsRequest, QueryValidatorsRequest, UnbondingDelegation, + Validator, + }, + }, + dydxprotocol::{ + bridge::{DelayedCompleteBridgeMessage, QueryDelayedCompleteBridgeMessagesRequest}, + clob::{ + ClobPair, EquityTierLimitConfiguration, QueryAllClobPairRequest, + QueryEquityTierLimitConfigurationRequest, QueryGetClobPairRequest, + }, + feetiers::{PerpetualFeeTier, QueryPerpetualFeeParamsRequest, QueryUserFeeTierRequest}, + perpetuals::{Perpetual, QueryAllPerpetualsRequest, QueryPerpetualRequest}, + prices::{MarketPrice, QueryAllMarketPricesRequest, QueryMarketPriceRequest}, + rewards, + stats::{QueryUserStatsRequest, UserStats}, + subaccounts::{ + QueryAllSubaccountRequest, QueryGetSubaccountRequest, Subaccount as SubaccountInfo, + }, + }, +}; + +impl NodeClient { + /// Query for [account balances](https://github.com/cosmos/cosmos-sdk/tree/main/x/bank#allbalances) + /// by address for all denominations. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_account_balances(&mut self, address: &Address) -> Result, Error> { + let req = QueryAllBalancesRequest { + address: address.to_string(), + pagination: None, + }; + let balances = self.bank.all_balances(req).await?.into_inner().balances; + Ok(balances) + } + + /// Query for account [balance](https://github.com/cosmos/cosmos-sdk/tree/main/x/bank#balance) + /// by address for a given denomination. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_account_balance( + &mut self, + address: &Address, + denom: &Denom, + ) -> Result { + let req = QueryBalanceRequest { + address: address.to_string(), + denom: denom.to_string(), + }; + let balance = self + .bank + .balance(req) + .await? + .into_inner() + .balance + .ok_or_else(|| err!("Balance query response does not contain balance"))?; + Ok(balance) + } + + /// Query for [an account](https://github.com/cosmos/cosmos-sdk/tree/main/x/auth#account-1) + /// by it's address. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_account(&mut self, address: &Address) -> Result { + let req = QueryAccountRequest { + address: address.to_string(), + }; + let resp = self + .auth + .account(req) + .await? + .into_inner() + .account + .ok_or_else(|| err!("Query account request failure, account should exist."))? + .to_msg()?; + Ok(resp) + } + + /// Query for node info. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_node_info(&mut self) -> Result { + let req = GetNodeInfoRequest {}; + let info = self.base.get_node_info(req).await?.into_inner(); + Ok(info) + } + + /// Query for the latest block. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_latest_block(&mut self) -> Result { + let req = GetLatestBlockRequest::default(); + let latest_block = self + .base + .get_latest_block(req) + .await? + .into_inner() + .sdk_block + .ok_or_else(|| err!("The latest block is empty"))?; + Ok(latest_block) + } + + /// Query for the latest block height. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_latest_block_height(&mut self) -> Result { + let latest_block = self.get_latest_block().await?; + let header = latest_block + .header + .ok_or_else(|| err!("The block doesn't contain a header"))?; + let height = Height(header.height.try_into()?); + Ok(height) + } + + /// Query for user stats (Maker and Taker positions). + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_user_stats(&mut self, address: &Address) -> Result { + let req = QueryUserStatsRequest { + user: address.to_string(), + }; + let stats = self + .stats + .user_stats(req) + .await? + .into_inner() + .stats + .ok_or_else(|| err!("User stats query response does not contain stats"))?; + Ok(stats) + } + + /// Query for [all validators](https://github.com/cosmos/cosmos-sdk/tree/main/x/staking#validators-2) + /// that match the given status. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_all_validators( + &mut self, + status: Option, + ) -> Result, Error> { + let req = QueryValidatorsRequest { + status: status.unwrap_or_default(), + pagination: None, + }; + let validators = self.staking.validators(req).await?.into_inner().validators; + Ok(validators) + } + + /// Query for all subacccounts. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_subaccounts(&mut self) -> Result, Error> { + let req = QueryAllSubaccountRequest { pagination: None }; + let subaccounts = self + .subaccounts + .subaccount_all(req) + .await? + .into_inner() + .subaccount; + Ok(subaccounts) + } + + /// Query for the subacccount. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_subaccount( + &mut self, + subaccount: &Subaccount, + ) -> Result { + let req = QueryGetSubaccountRequest { + owner: subaccount.address.to_string(), + number: subaccount.number.0, + }; + let subaccount = self + .subaccounts + .subaccount(req) + .await? + .into_inner() + .subaccount + .ok_or_else(|| err!("Subaccount query response does not contain subaccount info"))?; + Ok(subaccount) + } + + /// Query for the orderbook pair by its id. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_clob_pair(&mut self, pair_id: u32) -> Result { + let req = QueryGetClobPairRequest { id: pair_id }; + let clob_pair = self + .clob + .clob_pair(req) + .await? + .into_inner() + .clob_pair + .ok_or_else(|| err!("Clob pair {pair_id} query response does not contain clob pair"))?; + Ok(clob_pair) + } + + /// Query for all orderbook pairs. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_clob_pairs( + &mut self, + pagination: Option, + ) -> Result, Error> { + let req = QueryAllClobPairRequest { pagination }; + let clob_pairs = self.clob.clob_pair_all(req).await?.into_inner().clob_pair; + Ok(clob_pairs) + } + + /// Query for the market price. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_price(&mut self, market_id: u32) -> Result { + let req = QueryMarketPriceRequest { id: market_id }; + let price = self + .prices + .market_price(req) + .await? + .into_inner() + .market_price + .ok_or_else(|| { + err!("Market {market_id} price query response does not contain price") + })?; + Ok(price) + } + + /// Query for all markets prices. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_prices( + &mut self, + pagination: Option, + ) -> Result, Error> { + let req = QueryAllMarketPricesRequest { pagination }; + let prices = self + .prices + .all_market_prices(req) + .await? + .into_inner() + .market_prices; + Ok(prices) + } + + /// Query for the perpetual. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_perpetual(&mut self, perpetual_id: u32) -> Result { + let req = QueryPerpetualRequest { id: perpetual_id }; + let perpetual = self + .perpetuals + .perpetual(req) + .await? + .into_inner() + .perpetual + .ok_or_else(|| { + err!("Perpetual {perpetual_id} query response does not contain perpetual") + })?; + Ok(perpetual) + } + + /// Query for all perpetuals. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_perpetuals( + &mut self, + pagination: Option, + ) -> Result, Error> { + let req = QueryAllPerpetualsRequest { pagination }; + let perpetuals = self + .perpetuals + .all_perpetuals(req) + .await? + .into_inner() + .perpetual; + Ok(perpetuals) + } + + /// Query for [`EquityTierLimitConfiguration`]. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_equity_tier_limit_config( + &mut self, + ) -> Result { + let req = QueryEquityTierLimitConfigurationRequest {}; + let etlc = self + .clob + .equity_tier_limit_configuration(req) + .await? + .into_inner() + .equity_tier_limit_config + .ok_or_else(|| { + err!("Equity tier limit config query response does not contain config") + })?; + Ok(etlc) + } + + /// Query for [all delegations](https://github.com/cosmos/cosmos-sdk/tree/main/x/staking#delegatordelegations) + /// of a given delegator address. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_delegator_delegations( + &mut self, + delegator_address: Address, + pagination: Option, + ) -> Result, Error> { + let req = QueryDelegatorDelegationsRequest { + delegator_addr: delegator_address.to_string(), + pagination, + }; + let delegations = self + .staking + .delegator_delegations(req) + .await? + .into_inner() + .delegation_responses; + Ok(delegations) + } + + /// Query for [all unbonding delegations](https://github.com/cosmos/cosmos-sdk/tree/main/x/staking#delegatorunbondingdelegations) + /// of a given delegator address. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_delegator_unbonding_delegations( + &mut self, + delegator_address: Address, + pagination: Option, + ) -> Result, Error> { + let req = QueryDelegatorUnbondingDelegationsRequest { + delegator_addr: delegator_address.to_string(), + pagination, + }; + let responses = self + .staking + .delegator_unbonding_delegations(req) + .await? + .into_inner() + .unbonding_responses; + Ok(responses) + } + + /// Query for delayed bridge messages for the address. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_delayed_complete_bridge_messages( + &mut self, + address: Address, + ) -> Result, Error> { + let req = QueryDelayedCompleteBridgeMessagesRequest { + address: address.to_string(), + }; + let messages = self + .bridge + .delayed_complete_bridge_messages(req) + .await? + .into_inner() + .messages; + Ok(messages) + } + + /// Query for fee tiers for perpetuals. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_fee_tiers(&mut self) -> Result, Error> { + let req = QueryPerpetualFeeParamsRequest {}; + let tiers = self + .feetiers + .perpetual_fee_params(req) + .await? + .into_inner() + .params + .ok_or_else(|| err!("Fee tiers query response does not contain params"))? + .tiers; + Ok(tiers) + } + + /// Query for perpetual fee tier for the address. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_user_fee_tier(&mut self, address: Address) -> Result { + let req = QueryUserFeeTierRequest { + user: address.to_string(), + }; + let tier = self + .feetiers + .user_fee_tier(req) + .await? + .into_inner() + .tier + .ok_or_else(|| err!("User fee tier query response does not contain tier"))?; + Ok(tier) + } + + /// Query for rewards params. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/validator_get.rs). + pub async fn get_rewards_params(&mut self) -> Result { + let req = rewards::QueryParamsRequest {}; + let params = self + .rewards + .params(req) + .await? + .into_inner() + .params + .ok_or_else(|| err!("Rewards query response does not contain params"))?; + Ok(params) + } +} diff --git a/v4-client-rs/client/src/node/client/mod.rs b/v4-client-rs/client/src/node/client/mod.rs new file mode 100644 index 00000000..600203c1 --- /dev/null +++ b/v4-client-rs/client/src/node/client/mod.rs @@ -0,0 +1,648 @@ +pub mod error; +mod methods; + +use super::{ + builder::TxBuilder, config::NodeConfig, order::*, sequencer::*, utils::*, wallet::Account, +}; + +pub use crate::indexer::{Address, ClientId, Height, OrderFlags, Subaccount, Tokenized, Usdc}; +use anyhow::{anyhow as err, Error, Result}; +use bigdecimal::{ + num_bigint::{BigInt, Sign}, + BigDecimal, Signed, +}; +#[cfg(feature = "noble")] +use chrono::{TimeDelta, Utc}; +use cosmrs::tx::{self, Tx}; +use derive_more::{Deref, DerefMut}; +pub use error::*; +#[cfg(feature = "noble")] +use ibc_proto::{ + cosmos::base::v1beta1::Coin as IbcProtoCoin, + ibc::applications::transfer::v1::MsgTransfer as IbcMsgTransfer, +}; +use std::iter; +use tokio::time::{sleep, Duration}; +use tonic::{ + transport::{Channel, ClientTlsConfig}, + Code, +}; +use tower::timeout::Timeout; +use v4_proto_rs::{ + cosmos_sdk_proto::cosmos::{ + auth::v1beta1::query_client::QueryClient as AuthClient, + bank::v1beta1::{query_client::QueryClient as BankClient, MsgSend}, + base::{ + abci::v1beta1::GasInfo, + tendermint::v1beta1::service_client::ServiceClient as BaseClient, + }, + staking::v1beta1::query_client::QueryClient as StakingClient, + tx::v1beta1::{ + service_client::ServiceClient as TxClient, BroadcastMode, BroadcastTxRequest, + GetTxRequest, SimulateRequest, + }, + }, + dydxprotocol::{ + bridge::query_client::QueryClient as BridgeClient, + clob::{ + query_client::QueryClient as ClobClient, MsgBatchCancel, MsgCancelOrder, MsgPlaceOrder, + Order, OrderBatch, + }, + feetiers::query_client::QueryClient as FeeTiersClient, + perpetuals::query_client::QueryClient as PerpetualsClient, + prices::query_client::QueryClient as PricesClient, + rewards::query_client::QueryClient as RewardsClient, + sending::{MsgCreateTransfer, MsgDepositToSubaccount, MsgWithdrawFromSubaccount, Transfer}, + stats::query_client::QueryClient as StatsClient, + subaccounts::query_client::QueryClient as SubaccountsClient, + }, + ToAny, +}; + +#[cfg(feature = "telemetry")] +use crate::telemetry::{ + LatencyMetric, TELEMETRY_BATCH_CANCEL_ORDER_DURATION, TELEMETRY_CANCEL_ORDER_DURATION, + TELEMETRY_DESC_BATCH_CANCEL_ORDER_DURATION, TELEMETRY_DESC_CANCEL_ORDER_DURATION, + TELEMETRY_DESC_ORDERS_CANCELLED, TELEMETRY_DESC_ORDERS_PLACED, + TELEMETRY_DESC_PLACE_ORDER_DURATION, TELEMETRY_DESC_QUERY_TX_DURATION, TELEMETRY_LABEL_ADDRESS, + TELEMETRY_ORDERS_CANCELLED, TELEMETRY_ORDERS_PLACED, TELEMETRY_PLACE_ORDER_DURATION, + TELEMETRY_QUERY_TX_DURATION, +}; + +const DEFAULT_QUERY_TIMEOUT_SECS: u64 = 15; +const DEFAULT_QUERY_INTERVAL_SECS: u64 = 2; + +/// Transaction hash. +/// +/// internally Cosmos uses tendermint::Hash +pub type TxHash = String; + +/// Wrapper over standard [Cosmos modules](https://github.com/cosmos/cosmos-sdk/tree/main/x) clients +/// and [dYdX modules](https://github.com/dydxprotocol/v4-chain/tree/main/protocol/x) clients. +pub struct Routes { + /// Authentication of accounts and transactions for Cosmos SDK applications. + pub auth: AuthClient>, + /// Token transfer functionalities. + pub bank: BankClient>, + /// Basic network information. + pub base: BaseClient>, + /// dYdX bridge to Ethereum + pub bridge: BridgeClient>, + /// dYdX orderbook + pub clob: ClobClient>, + /// dYdX fees + pub feetiers: FeeTiersClient>, + /// dYdX perpetuals + pub perpetuals: PerpetualsClient>, + /// dYdX prices + pub prices: PricesClient>, + /// dYdX rewards + pub rewards: RewardsClient>, + /// Proof-of-Stake layer for public blockchains. + pub staking: StakingClient>, + /// dYdX stats + pub stats: StatsClient>, + /// dYdX subaccounts + pub subaccounts: SubaccountsClient>, + /// Tx utilities for the Cosmos SDK. + pub tx: TxClient>, +} + +impl Routes { + /// Creates new modules clients wrapper. + pub fn new(channel: Timeout) -> Self { + Self { + auth: AuthClient::new(channel.clone()), + bank: BankClient::new(channel.clone()), + base: BaseClient::new(channel.clone()), + bridge: BridgeClient::new(channel.clone()), + clob: ClobClient::new(channel.clone()), + feetiers: FeeTiersClient::new(channel.clone()), + perpetuals: PerpetualsClient::new(channel.clone()), + prices: PricesClient::new(channel.clone()), + rewards: RewardsClient::new(channel.clone()), + staking: StakingClient::new(channel.clone()), + stats: StatsClient::new(channel.clone()), + subaccounts: SubaccountsClient::new(channel.clone()), + tx: TxClient::new(channel), + } + } +} + +/// Node (validator) client. +/// +/// Serves to manage [orders](OrderBuilder) and funds, query transactions. +#[derive(Deref, DerefMut)] +pub struct NodeClient { + config: NodeConfig, + /// Transactions builder. + pub builder: TxBuilder, + #[deref] + #[deref_mut] + routes: Routes, + sequencer: Box, +} + +impl NodeClient { + /// Connect to the node. + pub async fn connect(config: NodeConfig) -> Result { + let tls = ClientTlsConfig::new(); + let endpoint = config.endpoint.clone(); + let channel = Channel::from_shared(endpoint)? + .tls_config(tls)? + .connect() + .await?; + let timeout = Duration::from_millis(config.timeout); + let timeout_channel = Timeout::new(channel, timeout); + let chain_id = config.chain_id.clone().try_into()?; + let builder = TxBuilder::new(chain_id, config.fee_denom.clone()); + let sequencer = Box::new(QueryingSequencer::new(timeout_channel.clone())); + + #[cfg(feature = "telemetry")] + { + metrics::describe_counter!( + TELEMETRY_ORDERS_PLACED, + metrics::Unit::Count, + TELEMETRY_DESC_ORDERS_PLACED + ); + metrics::describe_counter!( + TELEMETRY_ORDERS_CANCELLED, + metrics::Unit::Count, + TELEMETRY_DESC_ORDERS_CANCELLED + ); + metrics::describe_histogram!( + TELEMETRY_PLACE_ORDER_DURATION, + metrics::Unit::Milliseconds, + TELEMETRY_DESC_PLACE_ORDER_DURATION + ); + metrics::describe_histogram!( + TELEMETRY_CANCEL_ORDER_DURATION, + metrics::Unit::Milliseconds, + TELEMETRY_DESC_CANCEL_ORDER_DURATION + ); + metrics::describe_histogram!( + TELEMETRY_BATCH_CANCEL_ORDER_DURATION, + metrics::Unit::Milliseconds, + TELEMETRY_DESC_BATCH_CANCEL_ORDER_DURATION + ); + metrics::describe_histogram!( + TELEMETRY_QUERY_TX_DURATION, + metrics::Unit::Milliseconds, + TELEMETRY_DESC_QUERY_TX_DURATION + ); + } + + Ok(Self { + config, + builder, + routes: Routes::new(timeout_channel), + sequencer, + }) + } + + /// Set `NodeClient`'s account sequence number mechanism + pub fn with_sequencer(&mut self, sequencer: impl Sequencer) { + self.sequencer = Box::new(sequencer); + } + + /// Place [`Order`]. + /// + /// Check [the short-order example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/place_order_short_term.rs) + /// and [the long-term order example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/place_order_long_term.rs). + pub async fn place_order( + &mut self, + account: &mut Account, + order: Order, + ) -> Result { + #[cfg(feature = "telemetry")] + LatencyMetric::new(TELEMETRY_PLACE_ORDER_DURATION); + let is_short_term = order + .order_id + .as_ref() + .is_some_and(|id| id.order_flags == OrderFlags::ShortTerm as u32); + + let msg = MsgPlaceOrder { order: Some(order) }; + + let tx_raw = self + .create_base_transaction(account, msg, !is_short_term) + .await?; + + let tx_hash = self.broadcast_transaction(tx_raw).await?; + + #[cfg(feature = "telemetry")] + { + let address = account.address(); + metrics::counter!( + TELEMETRY_ORDERS_PLACED, + &[(TELEMETRY_LABEL_ADDRESS, address.to_string())] + ) + .increment(1); + } + + Ok(tx_hash) + } + + /// Cancel [`Order`]. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/cancel_order.rs). + pub async fn cancel_order( + &mut self, + account: &mut Account, + order_id: OrderId, + until: impl Into, + ) -> Result { + #[cfg(feature = "telemetry")] + LatencyMetric::new(TELEMETRY_CANCEL_ORDER_DURATION); + + let until = until.into(); + let msg = MsgCancelOrder { + order_id: Some(order_id), + good_til_oneof: Some(until.try_into()?), + }; + + let tx_raw = self.create_base_transaction(account, msg, true).await?; + + let tx_hash = self.broadcast_transaction(tx_raw).await?; + + #[cfg(feature = "telemetry")] + { + let address = account.address(); + metrics::counter!( + TELEMETRY_ORDERS_CANCELLED, + &[(TELEMETRY_LABEL_ADDRESS, address.to_string())] + ) + .increment(1); + } + + Ok(tx_hash) + } + + /// Cancel a batch of short-terms [`Order`]s. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/batch_cancel_orders.rs). + pub async fn batch_cancel_orders( + &mut self, + account: &mut Account, + subaccount: Subaccount, + short_term_cancels: Vec, + until_block: Height, + ) -> Result { + #[cfg(feature = "telemetry")] + LatencyMetric::new(TELEMETRY_BATCH_CANCEL_ORDER_DURATION); + + #[cfg(feature = "telemetry")] + let count: u64 = short_term_cancels + .iter() + .map(|batch| batch.client_ids.len() as u64) + .sum(); + + let msg = MsgBatchCancel { + subaccount_id: Some(subaccount.into()), + short_term_cancels, + good_til_block: until_block.0, + }; + + let tx_raw = self.create_base_transaction(account, msg, true).await?; + + let tx_hash = self.broadcast_transaction(tx_raw).await?; + + #[cfg(feature = "telemetry")] + { + let address = account.address(); + metrics::counter!( + TELEMETRY_ORDERS_CANCELLED, + &[(TELEMETRY_LABEL_ADDRESS, address.to_string())] + ) + .increment(count); + } + + Ok(tx_hash) + } + + /// Deposit funds (USDC) from the address to the subaccount. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/deposit.rs). + pub async fn deposit( + &mut self, + account: &mut Account, + sender: Address, + recipient: Subaccount, + amount: impl Into, + ) -> Result { + let msg = MsgDepositToSubaccount { + sender: sender.to_string(), + recipient: Some(recipient.into()), + asset_id: 0, + quantums: amount.into().quantize_as_u64()?, + }; + + let tx_raw = self.create_transaction(account, msg).await?; + + self.broadcast_transaction(tx_raw).await + } + + /// Withdraw funds (USDC) from the subaccount to the address. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/withdraw.rs). + pub async fn withdraw( + &mut self, + account: &mut Account, + sender: Subaccount, + recipient: Address, + amount: impl Into, + ) -> Result { + let msg = MsgWithdrawFromSubaccount { + sender: Some(sender.into()), + recipient: recipient.to_string(), + asset_id: 0, + quantums: amount.into().quantize_as_u64()?, + }; + + let tx_raw = self.create_transaction(account, msg).await?; + + self.broadcast_transaction(tx_raw).await + } + + /// Transfer funds (USDC) between subaccounts. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/transfer.rs). + pub async fn transfer( + &mut self, + account: &mut Account, + sender: Subaccount, + recipient: Subaccount, + amount: impl Into, + ) -> Result { + let transfer = Transfer { + sender: Some(sender.into()), + recipient: Some(recipient.into()), + asset_id: 0, + amount: amount.into().quantize_as_u64()?, + }; + let msg = MsgCreateTransfer { + transfer: Some(transfer), + }; + + let tx_raw = self.create_transaction(account, msg).await?; + + self.broadcast_transaction(tx_raw).await + } + + /// Transfer a token asset from one address to another one. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/send_token.rs). + pub async fn send_token( + &mut self, + account: &mut Account, + sender: Address, + recipient: Address, + token: impl Tokenized, + ) -> Result { + let msg = MsgSend { + from_address: sender.to_string(), + to_address: recipient.to_string(), + amount: vec![token.coin()?], + }; + + let tx_raw = self.create_transaction(account, msg).await?; + + self.broadcast_transaction(tx_raw).await + } + + /// Transfer a token asset between blockchain networks. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/noble_transfer.rs). + #[cfg(feature = "noble")] + pub async fn send_token_ibc( + &mut self, + account: &mut Account, + sender: Address, + recipient: Address, + token: impl Tokenized, + source_channel: String, + ) -> Result { + let coin = token.coin()?; + let timeout = (Utc::now() + TimeDelta::seconds(60)) + .timestamp_nanos_opt() + .ok_or_else(|| err!("Failed calculating timeout ns timestamp"))? + .try_into() + .map_err(|e| err!("Failed converting timestamp into u64: {e}"))?; + + let msg = IbcMsgTransfer { + receiver: recipient.to_string(), + sender: sender.to_string(), + source_port: "transfer".to_string(), + source_channel, + timeout_timestamp: timeout, + token: Some(IbcProtoCoin { + amount: coin.amount, + denom: coin.denom, + }), + timeout_height: None, + memo: Default::default(), + }; + + let tx_raw = self.create_transaction(account, msg).await?; + + self.broadcast_transaction(tx_raw).await + } + + /// Close position for a given market. + /// + /// Opposite short-term market orders are used. + /// If provided, the position is only reduced by a size of `reduce_by`. + /// Note that at the moment dYdX [doesn't support](https://dydx.exchange/faq) spot trading. + /// + /// Check [the first example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/close_position.rs) + /// and [the second example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/close_all_positions.rs). + pub async fn close_position( + &mut self, + account: &mut Account, + subaccount: Subaccount, + market_params: impl Into, + reduce_by: Option, + client_id: impl Into, + ) -> Result, NodeError> { + let subaccount_info = self.get_subaccount(&subaccount).await?; + let market_params = market_params.into(); + let quantums_opt = subaccount_info + .perpetual_positions + .into_iter() + .find(|pos| pos.perpetual_id == market_params.clob_pair_id.0) + .map(|pos| BigInt::from_serializable_int(&pos.quantums)) + .transpose()?; + + let (side, size) = if let Some(quantums) = quantums_opt { + let side = match quantums.sign() { + Sign::Plus => OrderSide::Sell, + Sign::Minus => OrderSide::Buy, + _ => return Ok(None), + }; + let mut size = market_params.dequantize_quantums(quantums.abs()); + if let Some(reduce_by) = reduce_by { + // The quantity to reduce by should not be larger than the + // current position + size = size.min(reduce_by); + } + (side, size) + } else { + return Ok(None); + }; + + let height = self.get_latest_block_height().await?; + + let (_, order) = OrderBuilder::new(market_params, subaccount.clone()) + .market(side, size) + .until(height.ahead(SHORT_TERM_ORDER_MAXIMUM_LIFETIME)) + .build(client_id.into())?; + let tx_hash = self.place_order(account, order).await?; + + Ok(Some(tx_hash)) + } + + /// Simulate a transaction. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/withdraw_other.rs). + pub async fn simulate(&mut self, tx_raw: &tx::Raw) -> Result { + let request = SimulateRequest { + tx_bytes: tx_raw + .to_bytes() + .map_err(|e| err!("Raw Tx to bytes failed: {}", e))?, + ..Default::default() + }; + + // Move to client/methods.rs + self.tx + .simulate(request) + .await + .map_err(BroadcastError::from)? + .into_inner() + .gas_info + .ok_or_else(|| err!("Tx simulation request failed, gas info should exist.").into()) + } + + /// Fetch account's number and sequence number from the network. + pub async fn query_address(&mut self, address: &Address) -> Result<(u64, u64), Error> { + self.get_account(address) + .await + .map(|res| (res.account_number, res.sequence)) + } + + /// Create a transaction. + pub async fn create_transaction( + &mut self, + account: &mut Account, + msg: impl ToAny, + ) -> Result { + let tx_raw = self.create_base_transaction(account, msg, true).await?; + let tx_bytes = tx_raw + .to_bytes() + .map_err(|e| err!("Raw Tx to bytes failed: {e}"))?; + let tx = Tx::from_bytes(&tx_bytes) + .map_err(|e| err!("Failed to decode received Tx bytes: {e}"))?; + + let simulated = self.simulate(&tx_raw).await?; + let gas = simulated.gas_used; + let fee = self.builder.calculate_fee(Some(gas))?; + self.builder + .build_transaction(account, tx.body.messages, Some(fee)) + .map_err(|e| e.into()) + } + + async fn create_base_transaction( + &mut self, + account: &mut Account, + msg: impl ToAny, + seqnum_required: bool, + ) -> Result { + if seqnum_required && self.config.manage_sequencing { + let nonce = self.sequencer.next_nonce(account.address()).await?; + account.set_next_nonce(nonce); + } else if !seqnum_required { + account.set_next_nonce(Nonce::Sequence(account.sequence_number())) + } + + self.builder + .build_transaction(account, iter::once(msg.to_any()), None) + } + + /// Broadcast a transaction + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/withdraw_other.rs). + pub async fn broadcast_transaction(&mut self, tx_raw: tx::Raw) -> Result { + let request = BroadcastTxRequest { + tx_bytes: tx_raw + .to_bytes() + .map_err(|e| err!("Raw Tx to bytes failed: {}", e))?, + mode: BroadcastMode::Sync.into(), + }; + + let response = self + .tx + .broadcast_tx(request) + .await + .map_err(BroadcastError::from)? + .into_inner() + .tx_response + .ok_or_else(|| err!("Tx not present in broadcast response"))?; + + if response.code == 0 { + Ok(response.txhash) + } else { + Err(NodeError::Broadcast(BroadcastError { + code: Some(response.code), + message: response.raw_log, + })) + } + } + + /// Query the network for a transaction + pub async fn query_transaction(&mut self, tx_hash: &TxHash) -> Result { + #[cfg(feature = "telemetry")] + LatencyMetric::new(TELEMETRY_QUERY_TX_DURATION); + + let attempts = DEFAULT_QUERY_TIMEOUT_SECS / DEFAULT_QUERY_INTERVAL_SECS; + for _ in 0..attempts { + match self + .tx + .get_tx(GetTxRequest { + hash: tx_hash.clone(), + }) + .await + { + Ok(r) => { + let response = r + .into_inner() + .tx_response + .ok_or_else(|| err!("Tx not present in broadcast response"))?; + let tx_bytes = response + .tx + .ok_or_else(|| err!("TxResponse does not contain Tx bytes!"))? + .value; + let tx = Tx::from_bytes(&tx_bytes) + .map_err(|e| err!("Failed to decode received Tx bytes: {e}"))?; + return Ok(tx); + } + Err(status) if status.code() == Code::NotFound => { + sleep(Duration::from_secs(DEFAULT_QUERY_INTERVAL_SECS)).await; + } + Err(status) => { + return Err(err!("Error querying Tx {tx_hash}: {status}")); + } + } + } + Err(err!("Tx {tx_hash} not found after timeout")) + } + + /// Query the network for a transaction result + pub async fn query_transaction_result( + &mut self, + tx_hash: Result, + ) -> Result, Error> { + match tx_hash { + Ok(tx_hash) => self.query_transaction(&tx_hash).await.map(Some), + Err(NodeError::Broadcast(err)) if err.get_collateral_reason().is_some() => Ok(None), + Err(err) => Err(err.into()), + } + } +} diff --git a/v4-client-rs/client/src/node/config.rs b/v4-client-rs/client/src/node/config.rs new file mode 100644 index 00000000..4461adad --- /dev/null +++ b/v4-client-rs/client/src/node/config.rs @@ -0,0 +1,37 @@ +use crate::indexer::Denom; +use crate::node::ChainId; +use serde::Deserialize; + +/// Configuration for [`NodeClient`](crate::node::NodeClient) +#[derive(Clone, Debug, Deserialize)] +pub struct NodeConfig { + /// Node endpoint. + /// + /// You can select other gRPC endpoints from [the list](https://docs.dydx.exchange/infrastructure_providers-network/resources#full-node-endpoints). + pub endpoint: String, + /// [`Timeout`](tower::timeout::Timeout) applied to requests, in milliseconds. + #[serde(default = "default_timeout")] + pub timeout: u64, + /// [`ChainId`] to specify the chain. + pub chain_id: ChainId, + /// Fee [`Denom`]. + /// + /// See also [Understand IBC Denoms](https://tutorials.cosmos.network/tutorials/6-ibc-dev/). + pub fee_denom: Denom, + /// Have NodeClient manage transaction sequence numbering. + /// + /// Long-term (stateful) orders require managing a sequence number for an account. + /// Either the client manages it automatically via quering the network for the next + /// sequence number or it is a responsibility of a user. + /// It is a [replay prevention](https://docs.dydx.exchange/api_integration-trading/short_term_vs_stateful). + #[serde(default = "default_manage_sequencing")] + pub manage_sequencing: bool, +} + +fn default_timeout() -> u64 { + 1_000 +} + +fn default_manage_sequencing() -> bool { + true +} diff --git a/v4-client-rs/client/src/node/fee.rs b/v4-client-rs/client/src/node/fee.rs new file mode 100644 index 00000000..df89a367 --- /dev/null +++ b/v4-client-rs/client/src/node/fee.rs @@ -0,0 +1,43 @@ +use crate::indexer::Denom; +use anyhow::{anyhow as err, Result}; +use bigdecimal::{ + num_traits::{FromPrimitive, ToPrimitive}, + rounding::RoundingMode, + BigDecimal, +}; +use cosmrs::{tx::Fee, Coin}; + +/// Gas ajdustement value to avoid rejected transactions caused by gas understimation. +const GAS_MULTIPLIER: f64 = 1.4; + +pub(crate) fn default() -> Fee { + Fee { + amount: vec![], + gas_limit: 0, + payer: None, + granter: None, + } +} + +pub(crate) fn calculate(gas_used: u64, denom: &Denom) -> Result { + if let Some(gas_price) = denom.gas_price() { + let gas_multiplier = BigDecimal::from_f64(GAS_MULTIPLIER) + .ok_or_else(|| err!("Failed converting gas multiplier to BigDecimal"))?; + let gas_limit = gas_used * gas_multiplier; + // Ceil to avoid underestimation + let amount = (gas_price * &gas_limit).with_scale_round(0, RoundingMode::Up); + Ok(Fee::from_amount_and_gas( + Coin { + amount: amount + .to_u128() + .ok_or_else(|| err!("Failed converting gas cost to u128"))?, + denom: denom.clone().try_into()?, + }, + gas_limit + .to_u64() + .ok_or_else(|| err!("Failed converting gas limit to u64"))?, + )) + } else { + Err(err!("{denom:?} cannot be used to cover gas fees")) + } +} diff --git a/v4-client-rs/client/src/node/mod.rs b/v4-client-rs/client/src/node/mod.rs new file mode 100644 index 00000000..392f68d1 --- /dev/null +++ b/v4-client-rs/client/src/node/mod.rs @@ -0,0 +1,17 @@ +mod builder; +mod client; +mod config; +mod fee; +mod order; +/// Account number sequencing mechanisms +pub mod sequencer; +mod types; +mod utils; +mod wallet; + +pub use builder::TxBuilder; +pub use client::{error::*, Address, NodeClient, Subaccount, TxHash}; +pub use config::NodeConfig; +pub use order::*; +pub use types::ChainId; +pub use wallet::{Account, Wallet}; diff --git a/v4-client-rs/client/src/node/order.rs b/v4-client-rs/client/src/node/order.rs new file mode 100644 index 00000000..f1c0d29a --- /dev/null +++ b/v4-client-rs/client/src/node/order.rs @@ -0,0 +1,616 @@ +use crate::indexer::{ + ClientId, ClobPairId, Height, OrderExecution, OrderFlags, OrderType, PerpetualMarket, Price, + Quantity, Subaccount, +}; +use anyhow::{anyhow as err, Error}; +use bigdecimal::{num_traits::cast::ToPrimitive, BigDecimal, One}; +use chrono::{DateTime, Utc}; +use derive_more::From; +pub use v4_proto_rs::dydxprotocol::clob::{ + order::{Side as OrderSide, TimeInForce as OrderTimeInForce}, + OrderId, +}; +use v4_proto_rs::dydxprotocol::{ + clob::{ + msg_cancel_order, + order::{self, ConditionType}, + Order, + }, + subaccounts::SubaccountId, +}; + +/// Maximum short-term orders lifetime. +/// +/// See also [short-term vs long-term orders](https://help.dydx.trade/en/articles/166985-short-term-vs-long-term-order-types). +pub const SHORT_TERM_ORDER_MAXIMUM_LIFETIME: u32 = 20; + +/// Value used to identify the Rust client. +pub const DEFAULT_RUST_CLIENT_METADATA: u32 = 4; + +/// Order [expirations](https://docs.dydx.exchange/api_integration-trading/short_term_vs_stateful). +#[derive(From, Clone, Debug)] +pub enum OrderGoodUntil { + /// Block expiratin is used for short-term orders. + Block(Height), + /// Time expiratin is used for long-term orders. + Time(DateTime), +} + +impl TryFrom for order::GoodTilOneof { + type Error = Error; + fn try_from(until: OrderGoodUntil) -> Result { + match until { + OrderGoodUntil::Block(height) => Ok(Self::GoodTilBlock(height.0)), + OrderGoodUntil::Time(time) => Ok(Self::GoodTilBlockTime(time.timestamp().try_into()?)), + } + } +} + +impl TryFrom for msg_cancel_order::GoodTilOneof { + type Error = Error; + fn try_from(until: OrderGoodUntil) -> Result { + match until { + OrderGoodUntil::Block(height) => Ok(Self::GoodTilBlock(height.0)), + OrderGoodUntil::Time(time) => Ok(Self::GoodTilBlockTime(time.timestamp().try_into()?)), + } + } +} + +/// Market parameters required to perform price and size quantizations. +/// These quantizations are required for `Order` placement. +/// +/// See also [how to interpret block data for trades](https://docs.dydx.exchange/api_integration-guides/how_to_interpret_block_data_for_trades). +#[derive(Clone, Debug)] +pub struct OrderMarketParams { + /// Atomic resolution + pub atomic_resolution: i32, + /// Clob pair id. + pub clob_pair_id: ClobPairId, + /// Oracle price. + pub oracle_price: Option, + /// Quantum conversion exponent. + pub quantum_conversion_exponent: i32, + /// Step base quantums. + pub step_base_quantums: u64, + /// Subticks per tick. + pub subticks_per_tick: u32, +} + +impl OrderMarketParams { + /// Convert price into subticks. + pub fn quantize_price(&self, price: impl Into) -> BigDecimal { + const QUOTE_QUANTUMS_ATOMIC_RESOLUTION: i32 = -6; + let scale = -(self.atomic_resolution + - self.quantum_conversion_exponent + - QUOTE_QUANTUMS_ATOMIC_RESOLUTION); + let factor = BigDecimal::new(One::one(), scale.into()); + let raw_subticks = price.into().0 * factor; + let subticks_per_tick = BigDecimal::from(self.subticks_per_tick); + let quantums = quantize(&raw_subticks, &subticks_per_tick); + quantums.max(subticks_per_tick) + } + + /// Convert decimal into quantums. + pub fn quantize_quantity(&self, quantity: impl Into) -> BigDecimal { + let factor = BigDecimal::new(One::one(), self.atomic_resolution.into()); + let raw_quantums = quantity.into().0 * factor; + let step_base_quantums = BigDecimal::from(self.step_base_quantums); + let quantums = quantize(&raw_quantums, &step_base_quantums); + quantums.max(step_base_quantums) + } + + /// Convert subticks into decimal. + pub fn dequantize_subticks(&self, subticks: impl Into) -> BigDecimal { + const QUOTE_QUANTUMS_ATOMIC_RESOLUTION: i32 = -6; + let scale = -(self.atomic_resolution + - self.quantum_conversion_exponent + - QUOTE_QUANTUMS_ATOMIC_RESOLUTION); + let factor = BigDecimal::new(One::one(), scale.into()); + subticks.into() / factor + } + + /// Convert quantums into decimal. + pub fn dequantize_quantums(&self, quantums: impl Into) -> BigDecimal { + let factor = BigDecimal::new(One::one(), self.atomic_resolution.into()); + quantums.into() / factor + } + + /// Get orderbook pair id. + pub fn clob_pair_id(&self) -> &ClobPairId { + &self.clob_pair_id + } +} + +/// A `round`-line function that quantize a `value` to the `fraction`. +fn quantize(value: &BigDecimal, fraction: &BigDecimal) -> BigDecimal { + (value / fraction).round(0) * fraction +} + +impl From for OrderMarketParams { + fn from(market: PerpetualMarket) -> Self { + Self { + atomic_resolution: market.atomic_resolution, + clob_pair_id: market.clob_pair_id, + oracle_price: market.oracle_price, + quantum_conversion_exponent: market.quantum_conversion_exponent, + step_base_quantums: market.step_base_quantums, + subticks_per_tick: market.subticks_per_tick, + } + } +} + +/// [`Order`] builder. +/// +/// Note that the price input to the `OrderBuilder` is in the "common" units of the perpetual/currency, not the quantized/atomic value. +/// +/// Two main classes of orders in dYdX from persistence perspective are +/// [short-term and long-term (stateful) orders](https://docs.dydx.exchange/api_integration-trading/short_term_vs_stateful). +/// +/// For different types of orders +/// see also [Stop-Limit Versus Stop-Loss](https://dydx.exchange/crypto-learning/stop-limit-versus-stop-loss) and +/// [Perpetual order types on dYdX Chain](https://help.dydx.trade/en/articles/166981-perpetual-order-types-on-dydx-chain). +#[derive(Clone, Debug)] +pub struct OrderBuilder { + market_params: OrderMarketParams, + #[allow(dead_code)] // TODO remove after completion + subaccount_id: SubaccountId, + flags: OrderFlags, + side: Option, + ty: Option, + size: Option, + price: Option, + time_in_force: Option, + reduce_only: Option, + until: Option, + post_only: Option, + execution: Option, + trigger_price: Option, + slippage: BigDecimal, +} + +impl OrderBuilder { + /// Create a new [`Order`] builder. + pub fn new(market_for: impl Into, subaccount: Subaccount) -> Self { + Self { + market_params: market_for.into(), + subaccount_id: subaccount.into(), + flags: OrderFlags::ShortTerm, + side: Some(OrderSide::Buy), + ty: Some(OrderType::Market), + size: None, + price: None, + time_in_force: None, + reduce_only: None, + until: None, + post_only: None, + execution: None, + trigger_price: None, + slippage: BigDecimal::new(5.into(), 2), + } + } + + /// Set as Market order. + /// + /// An instruction to immediately buy or sell an asset at the best available price when the order is placed. + pub fn market(mut self, side: impl Into, size: impl Into) -> Self { + self.ty = Some(OrderType::Market); + self.side = Some(side.into()); + self.size = Some(size.into()); + self + } + + /// Set as Limit order. + /// + /// With a limit order, a trader specifies the price at which they’re willing to buy or sell an asset. + /// Unlike market orders, limit orders don’t go into effect until the market price hits a trader’s “limit price.” + pub fn limit( + mut self, + side: impl Into, + price: impl Into, + size: impl Into, + ) -> Self { + self.ty = Some(OrderType::Limit); + self.price = Some(price.into()); + self.side = Some(side.into()); + self.size = Some(size.into()); + self + } + + /// Set as Stop Limit order + /// + /// Stop-limit orders use a stop `trigger_price` and a limit `price` to give investors greater control over their trades. + /// When setting up a stop-limit order, traders set a `trigger_price` when their order enters the market + /// and a limit `price` when they want the order to execute. + pub fn stop_limit( + mut self, + side: impl Into, + price: impl Into, + trigger_price: impl Into, + size: impl Into, + ) -> Self { + self.ty = Some(OrderType::StopLimit); + self.price = Some(price.into()); + self.trigger_price = Some(trigger_price.into()); + self.side = Some(side.into()); + self.size = Some(size.into()); + self.conditional() + } + + /// Set as Stop Market order. + /// + /// When using a stop order, the trader sets a `trigger_price` to trigger a buy or sell order on their exchange. + /// The moment that condition is met, it triggers a market order executed at the current market price. + /// This means that, unlike limit orders, the execution price of a stop order may be different from the price set by the trader. + pub fn stop_market( + mut self, + side: impl Into, + trigger_price: impl Into, + size: impl Into, + ) -> Self { + self.ty = Some(OrderType::StopMarket); + self.trigger_price = Some(trigger_price.into()); + self.side = Some(side.into()); + self.size = Some(size.into()); + self.conditional() + } + + /// Set as Take Profit Limit order. + /// + /// The order enters in force if the price reaches `trigger_price` and is executed at `price` after that. + pub fn take_profit_limit( + mut self, + side: impl Into, + price: impl Into, + trigger_price: impl Into, + size: impl Into, + ) -> Self { + self.ty = Some(OrderType::TakeProfit); + self.price = Some(price.into()); + self.trigger_price = Some(trigger_price.into()); + self.side = Some(side.into()); + self.size = Some(size.into()); + self.conditional() + } + + /// Set as Take Profit Market order. + /// + /// The order enters in force if the price reaches `trigger_price` and converst to an ordinary market order, + /// i.e. it is executed at the best available market price. + pub fn take_profit_market( + mut self, + side: impl Into, + trigger_price: impl Into, + size: impl Into, + ) -> Self { + self.ty = Some(OrderType::TakeProfitMarket); + self.trigger_price = Some(trigger_price.into()); + self.side = Some(side.into()); + self.size = Some(size.into()); + self.conditional() + } + + /// Set order as a long-term. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/place_order_long_term.rs). + pub fn long_term(mut self) -> Self { + self.flags = OrderFlags::LongTerm; + self + } + + /// Set order as a short-term. + /// + /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/place_order_short_term.rs). + pub fn short_term(mut self) -> Self { + self.flags = OrderFlags::ShortTerm; + self + } + + /// Set order as a conditional, triggered using `trigger_price`. + pub fn conditional(mut self) -> Self { + self.flags = OrderFlags::Conditional; + self + } + + /* Single setters */ + /// Set the limit price for Limit orders (and related types), + /// or the up-to allowed price for Market orders (and related types). + pub fn price(mut self, price: impl Into) -> Self { + self.price = Some(price.into()); + self + } + + /// [Position size](https://dydx.exchange/crypto-learning/glossary?#total-order-size). + pub fn size(mut self, size: impl Into) -> Self { + self.size = Some(size.into()); + self + } + + /// Set [time execution options](https://docs.dydx.exchange/api_integration-trading/order_types#time-in-force). + /// + /// Basically, it places of the order in the range between being + /// a Taker order and a Maker order. + /// + /// `IOC` (Taker) <---> `Unspecified` (Taker/Maker) <---> `Post` (Maker). + /// + /// See also [Market Makers vs Market Takers](https://dydx.exchange/crypto-learning/market-makers-vs-market-takers). + pub fn time_in_force(mut self, tif: impl Into) -> Self { + self.time_in_force = Some(tif.into()); + self + } + + /// Set an order as [reduce-only](https://docs.dydx.exchange/api_integration-trading/order_types#reduce-only-order-ro). + pub fn reduce_only(mut self, reduce: impl Into) -> Self { + self.reduce_only = Some(reduce.into()); + self + } + + /// Set order's expiration. + pub fn until(mut self, gtof: impl Into) -> Self { + self.until = Some(gtof.into()); + self + } + + /// Time execution pattern. + /// + /// See also [`OrderBuilder::time_in_force`]. + pub fn execution(mut self, execution: impl Into) -> Self { + self.execution = Some(execution.into()); + self + } + + /// Allowed slippage (%) for long term and conditional Market orders. + /// + /// Allowed price slippage is calculated based on the provided PerpetualMarket oracle price, + /// or, a user-provided `price()` taking precedence. + /// + /// See also [What is Slippage in Crypto?](https://dydx.exchange/crypto-learning/what-is-slippage-in-crypto). + pub fn allowed_slippage(mut self, slippage_percent: impl Into) -> Self { + self.slippage = slippage_percent.into() / 100.0; + self + } + + /// Update the generator's market. + /// + /// Note that at the moment dYdX [doesn't support](https://dydx.exchange/faq) spot trading. + pub fn update_market(&mut self, market_for: impl Into) { + self.market_params = market_for.into(); + } + + /// Update the generator's market oracle price. + pub fn update_market_price(&mut self, price: impl Into) { + self.market_params.oracle_price = Some(price.into()); + } + + /* Builder */ + /// Build an [`Order`] and a corresponding [`OrderId`]. + /// + /// `client_id` [impacts](https://docs.dydx.exchange/api_integration-clients/validator_client#replacing-an-order) an order id. + /// So it is important to provide its uniqueness as otherwise some orders may overwrite others. + pub fn build(self, client_id: impl Into) -> Result<(OrderId, Order), Error> { + let side = self + .side + .ok_or_else(|| err!("Missing Order side (Buy/Sell)"))?; + let size = self + .size + .as_ref() + .ok_or_else(|| err!("Missing Order size"))?; + let ty = self.ty.as_ref().ok_or_else(|| err!("Missing Order type"))?; + let post_only = self.post_only.as_ref().unwrap_or(&false); + let execution = self.execution.as_ref().unwrap_or(&OrderExecution::Default); + let time_in_force = ty.time_in_force( + &self.time_in_force.unwrap_or(OrderTimeInForce::Unspecified), + *post_only, + execution, + )?; + let reduce_only = *self.reduce_only.as_ref().unwrap_or(&false); + let until = self + .until + .as_ref() + .ok_or_else(|| err!("Missing Order until (good-til-oneof)"))?; + let quantums = self + .market_params + .quantize_quantity(size.clone()) + .to_u64() + .ok_or_else(|| err!("Failed converting BigDecimal size into u64"))?; + let conditional_order_trigger_subticks = match ty { + OrderType::StopLimit + | OrderType::StopMarket + | OrderType::TakeProfit + | OrderType::TakeProfitMarket => self + .market_params + .quantize_price( + self.trigger_price + .clone() + .ok_or_else(|| err!("Missing Order trigger price"))?, + ) + .to_u64() + .ok_or_else(|| err!("Failed converting BigDecimal trigger-price into u64"))?, + _ => 0, + }; + + let clob_pair_id = self.market_params.clob_pair_id().0; + + let order_id = OrderId { + subaccount_id: Some(self.subaccount_id.clone()), + client_id: client_id.into().0, + order_flags: self.flags.clone() as u32, + clob_pair_id, + }; + + let order = Order { + order_id: Some(order_id.clone()), + side: side.into(), + quantums, + subticks: self.calculate_subticks()?, + time_in_force: time_in_force.into(), + reduce_only, + client_metadata: DEFAULT_RUST_CLIENT_METADATA, + condition_type: ty.condition_type()?.into(), + conditional_order_trigger_subticks, + good_til_oneof: Some(until.clone().try_into()?), + }; + + Ok((order_id, order)) + } + + /* Helpers */ + fn calculate_subticks(&self) -> Result { + let ty = self.ty.as_ref().ok_or_else(|| err!("Missing Order type"))?; + let price = match ty { + OrderType::Market | OrderType::StopMarket | OrderType::TakeProfitMarket => { + // Use user-provided slippage price + if let Some(price) = self.price.clone() { + price + // Calculate slippage price based on oracle price + } else if let Some(oracle_price) = self.market_params.oracle_price.clone() { + let side = self + .side + .as_ref() + .ok_or_else(|| err!("Missing Order side"))?; + let one = ::one(); + match side { + OrderSide::Buy => oracle_price * (one + &self.slippage), + OrderSide::Sell => oracle_price * (one - &self.slippage), + _ => return Err(err!("Order side {side:?} not supported")), + } + } else { + return Err(err!("Failed to calculate Market order slippage price")); + } + } + _ => self + .price + .clone() + .ok_or_else(|| err!("Missing Order price"))?, + }; + + self.market_params + .quantize_price(price) + .to_u64() + .ok_or_else(|| err!("Failed converting BigDecimal price into u64")) + } +} + +impl OrderType { + /// Validate time execution options. + /// + /// See also [`OrderBuilder::time_in_force`]. + pub fn time_in_force( + &self, + time_in_force: &OrderTimeInForce, + post_only: bool, + execution: &OrderExecution, + ) -> Result { + match self { + OrderType::Market => Ok(OrderTimeInForce::Ioc), + OrderType::Limit => { + if post_only { + Ok(OrderTimeInForce::PostOnly) + } else { + Ok(*time_in_force) + } + } + OrderType::StopLimit | OrderType::TakeProfit => match execution { + OrderExecution::Default => Ok(OrderTimeInForce::Unspecified), + OrderExecution::PostOnly => Ok(OrderTimeInForce::PostOnly), + OrderExecution::Fok => Ok(OrderTimeInForce::FillOrKill), + OrderExecution::Ioc => Ok(OrderTimeInForce::Ioc), + }, + OrderType::StopMarket | OrderType::TakeProfitMarket => match execution { + OrderExecution::Default | OrderExecution::PostOnly => Err(err!( + "Execution value {execution:?} not supported for order type {self:?}" + )), + OrderExecution::Fok => Ok(OrderTimeInForce::FillOrKill), + OrderExecution::Ioc => Ok(OrderTimeInForce::Ioc), + }, + _ => Err(err!( + "Invalid combination of order type, time in force, and execution" + )), + } + } + + /// Get [the condition type](https://docs.dydx.exchange/api_integration-trading/order_types#condition-types) for the order. + pub fn condition_type(&self) -> Result { + match self { + OrderType::Limit | OrderType::Market => Ok(ConditionType::Unspecified), + OrderType::StopLimit | OrderType::StopMarket => Ok(ConditionType::StopLoss), + OrderType::TakeProfit | OrderType::TakeProfitMarket => Ok(ConditionType::TakeProfit), + _ => Err(err!("Order type unsupported for condition type")), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::indexer::{ClobPairId, PerpetualMarketStatus, PerpetualMarketType, Ticker}; + use std::str::FromStr; + + fn sample_market_params() -> OrderMarketParams { + PerpetualMarket { + ticker: Ticker::from("BTC-USD"), + + atomic_resolution: -10, + clob_pair_id: ClobPairId(0), + market_type: PerpetualMarketType::Cross, + quantum_conversion_exponent: -9, + step_base_quantums: 1_000_000, + subticks_per_tick: 100_000, + + base_open_interest: Default::default(), + initial_margin_fraction: Default::default(), + maintenance_margin_fraction: Default::default(), + next_funding_rate: Default::default(), + open_interest: Default::default(), + open_interest_lower_cap: None, + open_interest_upper_cap: None, + oracle_price: Default::default(), + price_change_24h: Default::default(), + status: PerpetualMarketStatus::Active, + step_size: Default::default(), + tick_size: Default::default(), + trades_24h: 0, + volume_24h: Quantity(0.into()), + } + .into() + } + + fn bigdecimal(val: &str) -> BigDecimal { + BigDecimal::from_str(val).expect("Failed converting str into BigDecimal") + } + + #[test] + fn market_size_to_quantums() { + let market = sample_market_params(); + let size = bigdecimal("0.01"); + let quantums = market.quantize_quantity(size); + let expected = bigdecimal("100_000_000"); + assert_eq!(quantums, expected); + } + + #[test] + fn market_price_to_subticks() { + let market = sample_market_params(); + let price = bigdecimal("50_000"); + let subticks = market.quantize_price(price); + let expected = bigdecimal("5_000_000_000"); + assert_eq!(subticks, expected); + } + + #[test] + fn market_quantums_to_size() { + let market = sample_market_params(); + let quantums = bigdecimal("100_000_000"); + let size = market.dequantize_quantums(quantums); + let expected = bigdecimal("0.01"); + assert_eq!(size, expected); + } + + #[test] + fn market_subticks_to_price() { + let market = sample_market_params(); + let subticks = bigdecimal("5_000_000_000"); + let price = market.dequantize_subticks(subticks); + let expected = bigdecimal("50_000"); + assert_eq!(price, expected); + } +} diff --git a/v4-client-rs/client/src/node/sequencer.rs b/v4-client-rs/client/src/node/sequencer.rs new file mode 100644 index 00000000..2887419a --- /dev/null +++ b/v4-client-rs/client/src/node/sequencer.rs @@ -0,0 +1,130 @@ +use anyhow::{anyhow as err, Error, Result}; +use async_trait::async_trait; +#[allow(unused_imports)] +use chrono::{DateTime, Utc}; + +use super::Address; +use std::collections::HashMap; +use tonic::{transport::Channel, Request}; +use tower::timeout::Timeout; +use v4_proto_rs::cosmos_sdk_proto::{ + cosmos::auth::v1beta1::{ + query_client::QueryClient as QueryGrpcClient, BaseAccount, QueryAccountRequest, + }, + traits::Message, +}; + +/// Transaction sequence number validation value used to enable protection against replay attacks. +#[derive(Clone, Debug)] +pub enum Nonce { + /// A sequence number is incremental, associated with the number of transactions (short-term + /// orders excluded) issued by an [`Account`](super::wallet::Account). + Sequence(u64), + /// A valid timestamp nonce folows the rules: + /// 1. now - 30s ≤ timestamp ≤ now + 30s; + /// 2. timestamp is strictly larger than any of the largest 20 timestamp nonces previously submitted in the account’s lifetime; + /// 3. timestamp has never been used before. + Timestamp(u64), +} + +impl Nonce { + /// Create a new timestamp `Nonce` using the current timestamp + pub fn now() -> Self { + Self::Timestamp(Utc::now().timestamp_millis() as u64) + } +} + +/// A trait to produce [`Nonce`]s for [`Account`](super::Account). +#[async_trait] +pub trait Sequencer: Send + 'static { + /// Returns the next nonce. + async fn next_nonce(&mut self, address: &Address) -> Result; +} + +/// A simple incremental sequencer. +/// An internal counter is increased in every `next_nonce()` call. +#[allow(dead_code)] // TODO remove after completion +#[derive(Clone, Debug)] +pub struct IncrementalSequencer { + counters: HashMap, +} + +impl IncrementalSequencer { + /// Add relevant `Address`es and respective starting counter values + #[allow(dead_code)] // TODO remove after completion + pub fn new(addresses: &[(Address, u64)]) -> Self { + Self { + counters: addresses.iter().cloned().collect(), + } + } + + /// Adds an `Address` with a starting counter value to the sequencer + #[allow(dead_code)] // TODO remove after completion + pub fn add_address(&mut self, address: Address, start_at: u64) -> Option { + self.counters.insert(address, start_at) + } +} + +#[async_trait] +impl Sequencer for IncrementalSequencer { + async fn next_nonce(&mut self, address: &Address) -> Result { + let counter = self + .counters + .get_mut(address) + .ok_or_else(|| err!("Address {address} not found in sequencer"))?; + *counter += 1; + Ok(Nonce::Sequence(*counter - 1)) + } +} + +/// A sequencer which fetches the next sequence number from the network. +#[allow(dead_code)] // TODO remove after completion +#[derive(Clone, Debug)] +pub struct QueryingSequencer { + querier: QueryGrpcClient>, +} + +impl QueryingSequencer { + /// Creates a new `QueryingSequencer` using a gRPC [`Channel`]. + #[allow(dead_code)] // TODO remove after completion + pub fn new(channel: Timeout) -> Self { + Self { + querier: QueryGrpcClient::new(channel), + } + } +} + +#[async_trait] +impl Sequencer for QueryingSequencer { + async fn next_nonce(&mut self, address: &Address) -> Result { + let response = self + .querier + .account(Request::new(QueryAccountRequest { + address: address.to_string(), + })) + .await? + .into_inner(); + let sequence = BaseAccount::decode( + &response + .account + .ok_or_else(|| err!("Query account request failure, account should exist."))? + .value[..], + ) + .map(|res| res.sequence) + .map_err(|e| err!("Query account request decode failure: {e}"))?; + + Ok(Nonce::Sequence(sequence)) + } +} + +/// A sequencer which uses a current timestamp as a sequence number. +#[allow(dead_code)] // TODO remove after completion +#[derive(Clone, Debug)] +pub struct TimestamperSequencer; + +#[async_trait] +impl Sequencer for TimestamperSequencer { + async fn next_nonce(&mut self, _: &Address) -> Result { + Ok(Nonce::now()) + } +} diff --git a/v4-client-rs/client/src/node/types.rs b/v4-client-rs/client/src/node/types.rs new file mode 100644 index 00000000..8267081b --- /dev/null +++ b/v4-client-rs/client/src/node/types.rs @@ -0,0 +1,27 @@ +use cosmrs::tendermint::{chain::Id, error::Error}; +use serde::Deserialize; +use strum::{AsRefStr, Display}; + +/// [Chain ID](https://docs.dydx.exchange/infrastructure_providers-network/network_constants#chain-id) +/// serves as a unique chain identificator to prevent replay attacks. +/// +/// See also [Cosmos ecosystem](https://cosmos.directory/). +#[derive(Debug, Eq, PartialEq, Clone, Display, AsRefStr, Deserialize)] +pub enum ChainId { + /// Testnet. + #[strum(serialize = "dydx-testnet-4")] + #[serde(rename = "dydx-testnet-4")] + Testnet4, + /// Mainnet. + #[strum(serialize = "dydx-mainnet-1")] + #[serde(rename = "dydx-mainnet-1")] + Mainnet1, +} + +impl TryFrom for Id { + type Error = Error; + + fn try_from(chain_id: ChainId) -> Result { + chain_id.as_ref().parse() + } +} diff --git a/v4-client-rs/client/src/node/utils.rs b/v4-client-rs/client/src/node/utils.rs new file mode 100644 index 00000000..e2df75b0 --- /dev/null +++ b/v4-client-rs/client/src/node/utils.rs @@ -0,0 +1,96 @@ +use anyhow::{anyhow as err, Error}; +use bigdecimal::num_bigint::{BigInt, Sign}; + +/// An extension trait for [`BigInt`]. +pub trait BigIntExt { + /// Initialize a heap-allocated big integer from a bytes slice. + fn from_serializable_int(bytes: &[u8]) -> Result; +} + +impl BigIntExt for BigInt { + fn from_serializable_int(bytes: &[u8]) -> Result { + if bytes.is_empty() { + return Ok(BigInt::from(0)); + } + + let sign = match bytes[0] { + 2 => Sign::Plus, + 3 => Sign::Minus, + _ => return Err(err!("Invalid sign byte, must be 2 or 3.")), + }; + + Ok(BigInt::from_bytes_be(sign, &bytes[1..])) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::str::FromStr; + + #[test] + fn node_utils_to_bigint() -> Result<(), Error> { + assert_eq!( + BigInt::from_str("0")?, + BigInt::from_serializable_int(&[0x02])? + ); + assert_eq!( + BigInt::from_str("-0")?, + BigInt::from_serializable_int(&[0x02])? + ); + assert_eq!( + BigInt::from_str("1")?, + BigInt::from_serializable_int(&[0x02, 0x01])? + ); + assert_eq!( + BigInt::from_str("-1")?, + BigInt::from_serializable_int(&[0x03, 0x01])? + ); + assert_eq!( + BigInt::from_str("255")?, + BigInt::from_serializable_int(&[0x02, 0xFF])? + ); + assert_eq!( + BigInt::from_str("-255")?, + BigInt::from_serializable_int(&[0x03, 0xFF])? + ); + assert_eq!( + BigInt::from_str("256")?, + BigInt::from_serializable_int(&[0x02, 0x01, 0x00])? + ); + assert_eq!( + BigInt::from_str("-256")?, + BigInt::from_serializable_int(&[0x03, 0x01, 0x00])? + ); + assert_eq!( + BigInt::from_str("123456789")?, + BigInt::from_serializable_int(&[0x02, 0x07, 0x5b, 0xcd, 0x15])? + ); + assert_eq!( + BigInt::from_str("-123456789")?, + BigInt::from_serializable_int(&[0x03, 0x07, 0x5b, 0xcd, 0x15])? + ); + assert_eq!( + BigInt::from_str("123456789123456789")?, + BigInt::from_serializable_int(&[0x02, 0x01, 0xb6, 0x9b, 0x4b, 0xac, 0xd0, 0x5f, 0x15])? + ); + assert_eq!( + BigInt::from_str("-123456789123456789")?, + BigInt::from_serializable_int(&[0x03, 0x01, 0xb6, 0x9b, 0x4b, 0xac, 0xd0, 0x5f, 0x15])? + ); + assert_eq!( + BigInt::from_str("123456789123456789123456789")?, + BigInt::from_serializable_int(&[ + 0x02, 0x66, 0x1e, 0xfd, 0xf2, 0xe3, 0xb1, 0x9f, 0x7c, 0x04, 0x5f, 0x15 + ])? + ); + assert_eq!( + BigInt::from_str("-123456789123456789123456789")?, + BigInt::from_serializable_int(&[ + 0x03, 0x66, 0x1e, 0xfd, 0xf2, 0xe3, 0xb1, 0x9f, 0x7c, 0x04, 0x5f, 0x15 + ])? + ); + + Ok(()) + } +} diff --git a/v4-client-rs/client/src/node/wallet.rs b/v4-client-rs/client/src/node/wallet.rs new file mode 100644 index 00000000..9ea4ab06 --- /dev/null +++ b/v4-client-rs/client/src/node/wallet.rs @@ -0,0 +1,180 @@ +use super::client::NodeClient; +use super::sequencer::Nonce; +use crate::indexer::{Address, Subaccount}; +use anyhow::{anyhow as err, Error}; +use bip32::{DerivationPath, Language, Mnemonic, Seed}; +use cosmrs::{ + crypto::{secp256k1::SigningKey, PublicKey}, + tx, AccountId, +}; +use std::str::FromStr; + +/// account prefix https://docs.cosmos.network/main/learn/beginner/accounts +const BECH32_PREFIX_DYDX: &str = "dydx"; + +/// Hierarchical Deterministic (HD) [wallet](https://dydx.exchange/crypto-learning/glossary?#wallet) +/// which allows to have multiple addresses and signing keys from one master seed. +/// +/// [BIP-44](https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki) introduced a wallet standard to derive multiple accounts +/// for different chains from a single seed (which allows to recover the whole tree of keys). +/// This `Wallet` hardcodes Cosmos ATOM token so it can derive multiple addresses from their corresponding indices. +/// +/// See also [Mastering Bitcoin](https://github.com/bitcoinbook/bitcoinbook/blob/develop/ch05_wallets.adoc). +pub struct Wallet { + seed: Seed, +} + +impl Wallet { + /// Derive a seed from a 24-words English mnemonic phrase. + pub fn from_mnemonic(mnemonic: &str) -> Result { + let seed = Mnemonic::new(mnemonic, Language::English)?.to_seed(""); + Ok(Self { seed }) + } + + /// Derive a dYdX account with updated account and sequence numbers. + pub async fn account(&self, index: u32, client: &mut NodeClient) -> Result { + let mut account = self.account_offline(index)?; + (account.account_number, account.sequence_number) = + client.query_address(account.address()).await?; + Ok(account) + } + + /// Derive a dYdX account with zero'ed account and sequence numbers. + pub fn account_offline(&self, index: u32) -> Result { + self.derive_account(index, BECH32_PREFIX_DYDX) + } + + #[cfg(feature = "noble")] + /// Noble-specific `Wallet` operations. + pub fn noble(&self) -> noble::WalletOps<'_> { + noble::WalletOps::new(self) + } + + fn derive_account(&self, index: u32, prefix: &str) -> Result { + // https://github.com/satoshilabs/slips/blob/master/slip-0044.md + let derivation_str = format!("m/44'/118'/0'/0/{index}"); + let derivation_path = DerivationPath::from_str(&derivation_str)?; + let private_key = SigningKey::derive_from_path(&self.seed, &derivation_path)?; + let public_key = private_key.public_key(); + let account_id = public_key.account_id(prefix).map_err(Error::msg)?; + let address = account_id.to_string().parse()?; + Ok(Account { + index, + account_id, + address, + key: private_key, + account_number: 0, + sequence_number: 0, + next_nonce: None, + }) + } +} + +/// Represents a derived account. +/// +/// See also [`Wallet`]. +pub struct Account { + index: u32, + #[allow(dead_code)] // TODO remove after completion + account_id: AccountId, + // The `String` representation of the `AccountId` + address: Address, + key: SigningKey, + // Online attributes + account_number: u64, + sequence_number: u64, + next_nonce: Option, +} + +impl Account { + /// An address of the account. + pub fn address(&self) -> &Address { + &self.address + } + + /// A public key associated with the account. + pub fn public_key(&self) -> PublicKey { + self.key.public_key() + } + + /// An index of the derived account. + pub fn index(&self) -> &u32 { + &self.index + } + + /// A subaccount from a corresponding index. + pub fn subaccount(&self, number: u32) -> Result { + Ok(Subaccount::new(self.address.clone(), number.try_into()?)) + } + + /// Sign [`SignDoc`](tx::SignDoc) with a corresponding private key. + pub fn sign(&self, doc: tx::SignDoc) -> Result { + doc.sign(&self.key) + .map_err(|e| err!("Failure to sign doc: {e}")) + } + + /// The account number. + pub fn account_number(&self) -> u64 { + self.account_number + } + + /// The account sequence number. + pub fn sequence_number(&self) -> u64 { + self.sequence_number + } + + /// Set a new sequence number. + pub fn set_sequence_number(&mut self, sequence_number: u64) { + self.sequence_number = sequence_number; + } + + /// Gets the [`Nonce`] to be used in the next transaction. + pub fn next_nonce(&self) -> Option<&Nonce> { + self.next_nonce.as_ref() + } + + /// Set the [`Nonce`] to be used in the next transaction. + pub fn set_next_nonce(&mut self, nonce: Nonce) { + if let Nonce::Sequence(number) = nonce { + self.sequence_number = number + } + self.next_nonce = Some(nonce); + } +} + +#[cfg(feature = "noble")] +mod noble { + use super::*; + use crate::noble::NobleClient; + + const BECH32_PREFIX_NOBLE: &str = "noble"; + + /// Noble-specific wallet operations + pub struct WalletOps<'w> { + wallet: &'w Wallet, + } + + impl<'w> WalletOps<'w> { + /// Create a new Noble-specific wallet operations dispatcher. + pub fn new(wallet: &'w Wallet) -> Self { + Self { wallet } + } + + /// Derive a Noble account with updated account and sequence numbers. + pub async fn account( + &self, + index: u32, + client: &mut NobleClient, + ) -> Result { + let mut account = self.account_offline(index)?; + (account.account_number, account.sequence_number) = + client.query_address(account.address()).await?; + Ok(account) + } + + /// Derive a Noble account with zero'ed account and sequence numbers. + pub fn account_offline(&self, index: u32) -> Result { + self.wallet.derive_account(index, BECH32_PREFIX_NOBLE) + } + } +} diff --git a/v4-client-rs/client/src/telemetry.rs b/v4-client-rs/client/src/telemetry.rs new file mode 100644 index 00000000..243f70e3 --- /dev/null +++ b/v4-client-rs/client/src/telemetry.rs @@ -0,0 +1,72 @@ +use std::time::Instant; + +// Metrics +/// Counter for orders opened with [`place_order`](crate::node::NodeClient::place_order). +pub const TELEMETRY_ORDERS_PLACED: &str = "orders.placed"; +/// Counter for orders cancelled with [`cancel_order`](crate::node::NodeClient::cancel_order). +pub const TELEMETRY_ORDERS_CANCELLED: &str = "orders.cancelled"; +/// Histogram for [`place_order`](crate::node::NodeClient::place_order) duration in milliseconds. +pub const TELEMETRY_PLACE_ORDER_DURATION: &str = "place_order.duration"; +/// Histogram for [`cancel_order`](crate::node::NodeClient::cancel_order) duration in milliseconds. +pub const TELEMETRY_CANCEL_ORDER_DURATION: &str = "cancel_order.duration"; +/// Histogram for [`batch_cancel_orders`](crate::node::NodeClient::batch_cancel_orders) duration in milliseconds. +pub const TELEMETRY_BATCH_CANCEL_ORDER_DURATION: &str = "batch_cancel_orders.duration"; +/// Histogram for [`query_transaction`](crate::node::NodeClient::query_transaction) duration in milliseconds +pub const TELEMETRY_QUERY_TX_DURATION: &str = "query_transaction.duration"; +/// Counter for reconnection attempts for Indexer Websocket feed +pub const TELEMETRY_WS_RECONNECTS: &str = "ws.reconnects"; +/// Counter for messages received by Indexer Websocket feed. +pub const TELEMETRY_WS_RECEIVED: &str = "ws.received"; +/// Counter for messages sent by Indexer Websocket feed. +pub const TELEMETRY_WS_SENT: &str = "ws.sent"; +/// Histogram for sending duration in milliseconds, Indexer Websocket feed messages. +pub const TELEMETRY_WS_SENT_DURATION: &str = "ws.sent.duration"; +// Descriptions +/// Description for [`TELEMETRY_ORDERS_PLACED`]. +pub const TELEMETRY_DESC_ORDERS_PLACED: &str = "Orders opened with `place_order`"; +/// Description for [`TELEMETRY_ORDERS_CANCELLED`]. +pub const TELEMETRY_DESC_ORDERS_CANCELLED: &str = "Orders cancelled with `cancel_order`"; +/// Description for [`TELEMETRY_PLACE_ORDER_DURATION`]. +pub const TELEMETRY_DESC_PLACE_ORDER_DURATION: &str = "`place_order` duration in milliseconds"; +/// Description for [`TELEMETRY_CANCEL_ORDER_DURATION`]. +pub const TELEMETRY_DESC_CANCEL_ORDER_DURATION: &str = "`cancel_order` duration in milliseconds"; +/// Description for [`TELEMETRY_BATCH_CANCEL_ORDER_DURATION`]. +pub const TELEMETRY_DESC_BATCH_CANCEL_ORDER_DURATION: &str = + "`batch_cancel_orders` duration in milliseconds"; +/// Description for [`TELEMETRY_QUERY_TX_DURATION`]. +pub const TELEMETRY_DESC_QUERY_TX_DURATION: &str = "`query_transaction` duration in milliseconds"; +/// Description for [`TELEMETRY_WS_RECONNECTS`]. +pub const TELEMETRY_DESC_WS_RECONNECTS: &str = "Reconnection attempts for Indexer Websocket feed"; +/// Description for [`TELEMETRY_WS_RECEIVED`]. +pub const TELEMETRY_DESC_WS_RECEIVED: &str = "Messages received by Indexer Websocket feed"; +/// Description for [`TELEMETRY_WS_SENT`]. +pub const TELEMETRY_DESC_WS_SENT: &str = "Messages sent by Indexer Websocket feed"; +/// Description for [`TELEMETRY_WS_SENT_DURATION`]. +pub const TELEMETRY_DESC_WS_SENT_DURATION: &str = + "Indexer Websocket feed messages, sending duration in milliseconds"; +// Labels +/// Label for address. +pub const TELEMETRY_LABEL_ADDRESS: &str = "address"; + +pub(crate) struct LatencyMetric { + name: &'static str, + start: Instant, +} + +impl LatencyMetric { + pub(crate) fn new(name: &'static str) -> Self { + let start = Instant::now(); + Self { name, start } + } +} + +impl Drop for LatencyMetric { + fn drop(&mut self) { + // TODO replace with https://doc.rust-lang.org/stable/std/time/struct.Duration.html#method.as_millis_f64 + // when stable + let duration = self.start.elapsed(); + let latency = (duration.as_secs() as f64) * (1_000_f64) + + (duration.subsec_nanos() as f64) / (1_000_000_f64); + metrics::histogram!(self.name).record(latency); + } +} diff --git a/v4-client-rs/client/tests/env.rs b/v4-client-rs/client/tests/env.rs new file mode 100644 index 00000000..70f0bc24 --- /dev/null +++ b/v4-client-rs/client/tests/env.rs @@ -0,0 +1,118 @@ +use anyhow::{anyhow as err, Error, Result}; +use chrono::{TimeDelta, Utc}; +#[cfg(feature = "faucet")] +use dydx_v4_rust::faucet::FaucetClient; +#[cfg(feature = "noble")] +use dydx_v4_rust::noble::NobleClient; +use dydx_v4_rust::{ + config::ClientConfig, + indexer::{ClientId, Height, IndexerClient, PerpetualMarket, Ticker}, + node::{Account, Address, NodeClient, OrderBuilder, OrderId, OrderSide, Subaccount, Wallet}, +}; + +const TEST_MNEMONIC: &str = "mirror actor skill push coach wait confirm orchard lunch mobile athlete gossip awake miracle matter bus reopen team ladder lazy list timber render wait"; + +pub enum TestEnv {} + +#[allow(dead_code)] +impl TestEnv { + pub async fn testnet() -> Result { + TestnetEnv::bootstrap().await + } + + pub async fn mainnet() -> Result { + MainnetEnv::bootstrap().await + } +} + +#[allow(dead_code)] +pub struct MainnetEnv { + pub indexer: IndexerClient, + pub ticker: Ticker, +} + +impl MainnetEnv { + async fn bootstrap() -> Result { + let path = "tests/mainnet.toml"; + let config = ClientConfig::from_file(path).await?; + let indexer = IndexerClient::new(config.indexer); + let ticker = Ticker::from("ETH-USD"); + Ok(Self { indexer, ticker }) + } +} + +#[allow(dead_code)] +pub struct TestnetEnv { + pub node: NodeClient, + pub indexer: IndexerClient, + #[cfg(feature = "faucet")] + pub faucet: FaucetClient, + #[cfg(feature = "noble")] + pub noble: NobleClient, + pub wallet: Wallet, + pub account: Account, + pub address: Address, + pub subaccount: Subaccount, + pub ticker: Ticker, +} + +#[allow(dead_code)] +impl TestnetEnv { + async fn bootstrap() -> Result { + let path = "tests/testnet.toml"; + let config = ClientConfig::from_file(path).await?; + let mut node = NodeClient::connect(config.node).await?; + let indexer = IndexerClient::new(config.indexer); + #[cfg(feature = "faucet")] + let faucet = FaucetClient::new(config.faucet.ok_or_else(|| { + err!("Configuration file must contain a [faucet] configuration for testing") + })?); + #[cfg(feature = "noble")] + let noble = NobleClient::connect(config.noble.ok_or_else(|| { + err!("Configuration file must contain a [noble] configuration for testing") + })?) + .await?; + let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?; + let account = wallet.account(0, &mut node).await?; + let ticker = Ticker::from("ETH-USD"); + let address = account.address().clone(); + let subaccount = account.subaccount(0)?; + Ok(Self { + node, + indexer, + #[cfg(feature = "faucet")] + faucet, + #[cfg(feature = "noble")] + noble, + wallet, + account, + address, + subaccount, + ticker, + }) + } + + pub async fn get_market(&self) -> Result { + self.indexer + .markets() + .get_perpetual_market(&self.ticker) + .await + } + + pub async fn get_height(&self) -> Result { + Ok(self.indexer.utility().get_height().await?.height) + } + + pub async fn spawn_order(&mut self) -> Result { + let market = self.get_market().await?; + let subaccount = self.account.subaccount(0)?; + let (id, order) = OrderBuilder::new(market, subaccount) + .limit(OrderSide::Buy, 1, 1) + .until(Utc::now() + TimeDelta::seconds(60)) + .long_term() + .build(ClientId::random())?; + let tx_res = self.node.place_order(&mut self.account, order).await; + self.node.query_transaction_result(tx_res).await?; + Ok(id) + } +} diff --git a/v4-client-rs/client/tests/mainnet.toml b/v4-client-rs/client/tests/mainnet.toml new file mode 100644 index 00000000..542c462d --- /dev/null +++ b/v4-client-rs/client/tests/mainnet.toml @@ -0,0 +1,9 @@ +[node] +# You can select other gRPC endpoints from [the list](https://docs.dydx.exchange/infrastructure_providers-network/resources#full-node-endpoints). +endpoint = "https://dydx-ops-grpc.kingnodes.com:443" +chain_id = "dydx-mainnet-1" +fee_denom = "ibc/8E27BA2D5493AF5636760E354E46004562C46AB7EC0CC4C1CA14E9E20E2545B5" + +[indexer] +http.endpoint = "https://indexer.dydx.trade/v4" +ws.endpoint = "wss://indexer.dydx.trade/v4/ws" diff --git a/v4-client-rs/client/tests/test_faucet.rs b/v4-client-rs/client/tests/test_faucet.rs new file mode 100644 index 00000000..28f0c49f --- /dev/null +++ b/v4-client-rs/client/tests/test_faucet.rs @@ -0,0 +1,40 @@ +mod env; + +#[cfg(feature = "faucet")] +mod faucet_tests { + use super::env; + use env::TestEnv; + + use anyhow::Error; + use dydx_v4_rust::indexer::Usdc; + + const FILL_AMOUNT: u64 = 1_000_000; + + #[tokio::test] + #[ignore] + async fn test_faucet_fill() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let faucet = env.faucet; + let subaccount = env.account.subaccount(0)?; + + println!( + "before equity: {:?}", + env.indexer + .accounts() + .get_subaccount(&subaccount) + .await? + .equity + ); + faucet.fill(&subaccount, &Usdc(FILL_AMOUNT.into())).await?; + println!( + "after equity: {:?}", + env.indexer + .accounts() + .get_subaccount(&subaccount) + .await? + .equity + ); + + Ok(()) + } +} diff --git a/v4-client-rs/client/tests/test_indexer_rest.rs b/v4-client-rs/client/tests/test_indexer_rest.rs new file mode 100644 index 00000000..fcb4af92 --- /dev/null +++ b/v4-client-rs/client/tests/test_indexer_rest.rs @@ -0,0 +1,304 @@ +mod env; +use env::TestEnv; + +use anyhow::{anyhow as err, Result}; +use bigdecimal::BigDecimal; +use dydx_v4_rust::indexer::*; +use std::str::FromStr; + +#[tokio::test] +async fn test_indexer_markets_list_perpetual_markets() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer.markets().list_perpetual_markets(None).await?; + + let opts = ListPerpetualMarketsOpts { + ticker: Some(env.ticker), + ..Default::default() + }; + env.indexer + .markets() + .list_perpetual_markets(Some(opts)) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_markets_get_perpetual_market() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .markets() + .get_perpetual_market(&env.ticker) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_markets_get_perpetual_market_orderbook() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .markets() + .get_perpetual_market_orderbook(&env.ticker) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_markets_get_trades() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer.markets().get_trades(&env.ticker, None).await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_markets_get_candles() -> Result<()> { + let env = TestEnv::testnet().await?; + let res = CandleResolution::M1; + env.indexer + .markets() + .get_candles(&env.ticker, res, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_markets_get_historical_funding() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .markets() + .get_historical_funding(&env.ticker, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_markets_get_sparklines() -> Result<()> { + let env = TestEnv::testnet().await?; + let period = SparklineTimePeriod::OneDay; + env.indexer.markets().get_sparklines(period).await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_utility_get_time() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer.utility().get_time().await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_utility_get_height() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer.utility().get_height().await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_utility_get_screen() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer.utility().get_screen(&env.address).await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_subaccounts() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer.accounts().get_subaccounts(&env.address).await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_subaccount() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_subaccount(&env.subaccount) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_parent_subaccount() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_parent_subaccount(&env.subaccount.parent()) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_list_positions() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .list_positions(&env.subaccount, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_list_parent_positions() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .list_parent_positions(&env.subaccount.parent(), None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_asset_positions() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_asset_positions(&env.subaccount) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_parent_asset_positions() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_parent_asset_positions(&env.subaccount.parent()) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_transfers() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_transfers(&env.subaccount, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_parent_transfers() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_parent_transfers(&env.subaccount.parent(), None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_list_orders() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .list_orders(&env.subaccount, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_list_parent_orders() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .list_parent_orders(&env.subaccount.parent(), None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_order() -> Result<()> { + let env = TestEnv::testnet().await?; + let orders = env + .indexer + .accounts() + .list_orders(&env.subaccount, None) + .await?; + let order = orders + .first() + .ok_or_else(|| err!("at least one order is required for testing"))?; + env.indexer.accounts().get_order(&order.id).await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_fills() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_fills(&env.subaccount, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_parent_fills() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_parent_fills(&env.subaccount.parent(), None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_historical_pnl() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_historical_pnl(&env.subaccount, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_parent_historical_pnl() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_parent_historical_pnl(&env.subaccount.parent(), None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_rewards() -> Result<()> { + let env = TestEnv::testnet().await?; + env.indexer + .accounts() + .get_rewards(&env.address, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_indexer_account_get_rewards_aggregated() -> Result<()> { + let env = TestEnv::testnet().await?; + let period = TradingRewardAggregationPeriod::Daily; + env.indexer + .accounts() + .get_rewards_aggregated(&env.address, period, None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_perpetual_market_quantization() -> Result<()> { + let env = TestEnv::testnet().await?; + let markets = env.indexer.markets().list_perpetual_markets(None).await?; + let params = markets + .get(&env.ticker) + .ok_or_else(|| err!("The ticker {} has not found!", env.ticker))? + .order_params(); + + let price = BigDecimal::from_str("4321.1234")?; + let quantized = params.quantize_price(price); + let expected = BigDecimal::from_str("4321100000")?; + assert_eq!(quantized, expected); + + let size = BigDecimal::from_str("4321.1234")?; + let quantized = params.quantize_quantity(size); + let expected = BigDecimal::from_str("4321123000000")?; + assert_eq!(quantized, expected); + Ok(()) +} diff --git a/v4-client-rs/client/tests/test_indexer_sock.rs b/v4-client-rs/client/tests/test_indexer_sock.rs new file mode 100644 index 00000000..b6dedf24 --- /dev/null +++ b/v4-client-rs/client/tests/test_indexer_sock.rs @@ -0,0 +1,635 @@ +mod env; +use env::TestEnv; + +use anyhow::{anyhow as err, Error}; +use dydx_v4_rust::indexer::*; +use tokio::time::{sleep, Duration, Instant}; + +#[tokio::test] +async fn test_indexer_sock_trades() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().trades(&env.ticker, false).await?; + + match feed.recv().await { + Some(TradesMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Trades event is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_trades_with_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().trades(&env.ticker, false).await?; + + match feed.recv().await { + Some(TradesMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Trades event is received: {other:?}")); + } + } + + match feed.recv().await { + Some(TradesMessage::Update(_)) => {} + other => { + return Err(err!("Not the Trades update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_trades_with_batched_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().trades(&env.ticker, true).await?; + + match feed.recv().await { + Some(TradesMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Trades event is received: {other:?}")); + } + } + + match feed.recv().await { + Some(TradesMessage::Update(_)) => {} + other => { + return Err(err!("Not the Trades update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_orders() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().orders(&env.ticker, false).await?; + + match feed.recv().await { + Some(OrdersMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Orders event is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_orders_with_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().orders(&env.ticker, false).await?; + + match feed.recv().await { + Some(OrdersMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Orders event is received: {other:?}")); + } + } + + match feed.recv().await { + Some(OrdersMessage::Update(_)) => {} + other => { + return Err(err!("Not the Orders update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_orders_with_batched_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().orders(&env.ticker, true).await?; + + match feed.recv().await { + Some(OrdersMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Orders event is received: {other:?}")); + } + } + + match feed.recv().await { + Some(OrdersMessage::Update(_)) => {} + other => { + return Err(err!("Not the Orders update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_markets() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().markets(false).await?; + + match feed.recv().await { + Some(MarketsMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Markets event is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_markets_with_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().markets(false).await?; + + match feed.recv().await { + Some(MarketsMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Markets event is received: {other:?}")); + } + } + + match feed.recv().await { + Some(MarketsMessage::Update(_)) => {} + other => { + return Err(err!("Not the Markets update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_markets_with_batched_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().markets(true).await?; + + match feed.recv().await { + Some(MarketsMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Markets event is received: {other:?}")); + } + } + + match feed.recv().await { + Some(MarketsMessage::Update(_)) => {} + other => { + return Err(err!("Not the Markets update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_blockheight() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().block_height(false).await?; + + match feed.recv().await { + Some(BlockHeightMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the BlockHeight initial event is received: {other:?}" + )); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_blockheight_with_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().block_height(false).await?; + + match feed.recv().await { + Some(BlockHeightMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the BlockHeight initial event is received: {other:?}" + )); + } + } + + match feed.recv().await { + Some(BlockHeightMessage::Update(_)) => {} + other => { + return Err(err!("Not the BlockHeight update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_blockheight_with_batched_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env.indexer.feed().block_height(true).await?; + + match feed.recv().await { + Some(BlockHeightMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the BlockHeight initial event is received: {other:?}" + )); + } + } + + match feed.recv().await { + Some(BlockHeightMessage::Update(_)) => {} + other => { + return Err(err!("Not the BlockHeight update is received: {other:?}")); + } + } + + Ok(()) +} + +// Candles +#[tokio::test] +async fn test_indexer_sock_candles() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env + .indexer + .feed() + .candles(&env.ticker, CandleResolution::M1, false) + .await?; + + match feed.recv().await { + Some(CandlesMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Candles event is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_candles_with_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env + .indexer + .feed() + .candles(&env.ticker, CandleResolution::M1, false) + .await?; + + match feed.recv().await { + Some(CandlesMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Candles event is received: {other:?}")); + } + } + + match feed.recv().await { + Some(CandlesMessage::Update(_)) => {} + other => { + return Err(err!("Not the Candles update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_candles_with_batched_updates() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed = env + .indexer + .feed() + .candles(&env.ticker, CandleResolution::M1, true) + .await?; + + match feed.recv().await { + Some(CandlesMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Candles event is received: {other:?}")); + } + } + + match feed.recv().await { + Some(CandlesMessage::Update(m)) => { + println!("{:?}", m); + } + other => { + return Err(err!("Not the Candles update is received: {other:?}")); + } + } + + Ok(()) +} + +// Subaccounts +#[tokio::test] +async fn test_indexer_sock_subaccounts() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + let mut feed = env + .indexer + .feed() + .subaccounts(env.subaccount, false) + .await?; + + match feed.recv().await { + Some(SubaccountsMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the Subaccounts initial event is received: {other:?}" + )); + } + } + + Ok(()) +} + +#[tokio::test] +#[ignore] +async fn test_indexer_sock_subaccounts_with_updates() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + let mut feed = env + .indexer + .feed() + .subaccounts(env.subaccount, false) + .await?; + + match feed.recv().await { + Some(SubaccountsMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the Subaccounts initial event is received: {other:?}" + )); + } + } + + match feed.recv().await { + Some(SubaccountsMessage::Update(_)) => {} + other => { + return Err(err!("Not the Subaccounts update is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +#[ignore] +async fn test_indexer_sock_subaccounts_with_batched_updates() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + let mut feed = env.indexer.feed().subaccounts(env.subaccount, true).await?; + + match feed.recv().await { + Some(SubaccountsMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the Subaccounts initial event is received: {other:?}" + )); + } + } + + match feed.recv().await { + Some(SubaccountsMessage::Update(_)) => {} + other => { + return Err(err!("Not the Subaccounts update is received: {other:?}")); + } + } + + Ok(()) +} + +// Parent subaccounts +#[tokio::test] +async fn test_indexer_sock_parentsubaccounts() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + let mut feed = env + .indexer + .feed() + .parent_subaccounts(env.subaccount.parent(), false) + .await?; + + match feed.recv().await { + Some(ParentSubaccountsMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the ParentSubaccounts initial event is received: {other:?}" + )); + } + } + + Ok(()) +} + +#[tokio::test] +#[ignore] +async fn test_indexer_sock_parentsubaccounts_with_updates() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + let mut feed = env + .indexer + .feed() + .parent_subaccounts(env.subaccount.parent(), false) + .await?; + + match feed.recv().await { + Some(ParentSubaccountsMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the ParentSubaccounts initial event is received: {other:?}" + )); + } + } + + match feed.recv().await { + Some(ParentSubaccountsMessage::Update(_)) => {} + other => { + return Err(err!( + "Not the ParentSubaccounts update is received: {other:?}" + )); + } + } + + Ok(()) +} + +#[tokio::test] +#[ignore] +async fn test_indexer_sock_parentsubaccounts_with_batched_updates() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + let mut feed = env + .indexer + .feed() + .parent_subaccounts(env.subaccount.parent(), true) + .await?; + + match feed.recv().await { + Some(ParentSubaccountsMessage::Initial(_)) => {} + other => { + return Err(err!( + "Not the ParentSubaccounts initial event is received: {other:?}" + )); + } + } + + match feed.recv().await { + Some(ParentSubaccountsMessage::Update(_)) => {} + other => { + return Err(err!( + "Not the ParentSubaccounts update is received: {other:?}" + )); + } + } + + Ok(()) +} + +// Misc +struct Feeder> { + feed: Feed, + n_init: usize, + n_upd: usize, +} + +impl> Feeder { + fn new(feed: Feed) -> Self { + Self { + feed, + n_init: 0, + n_upd: 0, + } + } +} + +#[tokio::test] +async fn test_indexer_sock_handle_several_feeds() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + + let mut trades = Feeder::new(env.indexer.feed().trades(&env.ticker, false).await?); + let mut orders = Feeder::new(env.indexer.feed().orders(&env.ticker, false).await?); + let mut markets = Feeder::new(env.indexer.feed().markets(false).await?); + let mut candles = Feeder::new( + env.indexer + .feed() + .candles(&env.ticker, CandleResolution::M1, false) + .await?, + ); + + let start = Instant::now(); + let duration = Duration::from_secs(20); + while start.elapsed() < duration { + tokio::select! { + Some(msg) = trades.feed.recv() => { + match msg { + TradesMessage::Initial(_) => { trades.n_init += 1; }, + TradesMessage::Update(_) => { trades.n_upd += 1; }, + } + } + Some(msg) = orders.feed.recv() => { + match msg { + OrdersMessage::Initial(_) => { orders.n_init += 1; }, + OrdersMessage::Update(_) => { orders.n_upd += 1; }, + } + } + Some(msg) = markets.feed.recv() => { + match msg { + MarketsMessage::Initial(_) => { markets.n_init += 1; }, + MarketsMessage::Update(_) => { markets.n_upd += 1; }, + } + } + Some(msg) = candles.feed.recv() => { + match msg { + CandlesMessage::Initial(_) => { candles.n_init += 1; }, + CandlesMessage::Update(_) => { candles.n_upd += 1; }, + } + } + _ = sleep(Duration::from_millis(200)) => { + continue; + } + } + } + + assert!(trades.n_init == 1); + assert!(orders.n_init == 1); + assert!(markets.n_init == 1); + assert!(candles.n_init == 1); + assert!(trades.n_upd + orders.n_upd + markets.n_upd + candles.n_upd > 0); + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_resub_protection() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + let mut feed0 = env + .indexer + .feed() + .candles(&env.ticker, CandleResolution::M1, false) + .await?; + + let feed1 = env + .indexer + .feed() + .candles(&env.ticker, CandleResolution::M1, false) + .await; + match feed1 { + Err(FeedError::Resubscription) => {} + _ => return Err(err!("Expected Resubscription error")), + } + + match feed0.recv().await { + Some(CandlesMessage::Initial(_)) => {} + other => { + return Err(err!("Not the Candles event is received: {other:?}")); + } + } + + Ok(()) +} + +#[tokio::test] +async fn test_indexer_sock_rapid_requests() -> Result<(), Error> { + let mut env = TestEnv::mainnet().await?; + + for _ in 0..5 { + env.indexer.feed().trades(&env.ticker, false).await?; + env.indexer.feed().orders(&env.ticker, false).await?; + env.indexer.feed().markets(false).await?; + env.indexer + .feed() + .candles(&env.ticker, CandleResolution::M1, false) + .await?; + } + let mut trades = Feeder::new(env.indexer.feed().trades(&env.ticker, false).await?); + let mut orders = Feeder::new(env.indexer.feed().orders(&env.ticker, false).await?); + let mut markets = Feeder::new(env.indexer.feed().markets(false).await?); + let mut candles = Feeder::new( + env.indexer + .feed() + .candles(&env.ticker, CandleResolution::M1, false) + .await?, + ); + + let start = Instant::now(); + let duration = Duration::from_secs(10); + while start.elapsed() < duration { + tokio::select! { + Some(TradesMessage::Update(_)) = trades.feed.recv() => { + trades.n_upd += 1; + } + Some(OrdersMessage::Update(_)) = orders.feed.recv() => { + orders.n_upd += 1; + } + Some(MarketsMessage::Update(_)) = markets.feed.recv() => { + markets.n_upd += 1; + } + Some(CandlesMessage::Update(_)) = candles.feed.recv() => { + candles.n_upd += 1; + } + _ = sleep(Duration::from_millis(200)) => { + continue; + } + } + } + + assert!(trades.n_upd + orders.n_upd + markets.n_upd + candles.n_upd > 0); + + Ok(()) +} diff --git a/v4-client-rs/client/tests/test_noble.rs b/v4-client-rs/client/tests/test_noble.rs new file mode 100644 index 00000000..735645dd --- /dev/null +++ b/v4-client-rs/client/tests/test_noble.rs @@ -0,0 +1,72 @@ +mod env; + +#[cfg(feature = "noble")] +mod noble_tests { + use super::env; + use env::TestEnv; + + use anyhow::Error; + use dydx_v4_rust::indexer::Denom; + use dydx_v4_rust::noble::NobleUsdc; + use serial_test::serial; + + #[tokio::test] + #[serial] + async fn test_noble_get_balance() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let mut noble = env.noble; + + let account = env.wallet.noble().account_offline(0)?; + let denom = Denom::NobleUsdc; + + let balance = noble + .get_account_balance(account.address().clone(), &denom) + .await?; + + assert_eq!(balance.denom, Denom::NobleUsdc.as_ref()); + + Ok(()) + } + + #[tokio::test] + #[serial] + async fn test_noble_get_balances() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let mut noble = env.noble; + + let account = env.wallet.noble().account_offline(0)?; + + noble + .get_account_balances(account.address().clone()) + .await?; + + Ok(()) + } + + #[tokio::test] + #[serial] + #[ignore] + async fn test_noble_send_token() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let mut noble = env.noble; + + let mut noble_account = env.wallet.noble().account(0, &mut noble).await?; + let dydx_account = env.wallet.account_offline(0)?; + + let sender = noble_account.address().clone(); + let recipient = dydx_account.address().clone(); + let source_channel = "channel-33".to_string(); + + noble + .send_token_ibc( + &mut noble_account, + sender, + recipient, + NobleUsdc::from(1000), + source_channel, + ) + .await?; + + Ok(()) + } +} diff --git a/v4-client-rs/client/tests/test_node.rs b/v4-client-rs/client/tests/test_node.rs new file mode 100644 index 00000000..95782d5c --- /dev/null +++ b/v4-client-rs/client/tests/test_node.rs @@ -0,0 +1,355 @@ +mod env; +use env::TestEnv; + +use anyhow::{anyhow as err, Error}; +use bigdecimal::{num_traits::cast::ToPrimitive, BigDecimal, One}; +use chrono::{TimeDelta, Utc}; +use dydx_v4_rust::{ + indexer::{OrderExecution, Token}, + node::*, +}; +use rand::{thread_rng, Rng}; +use serial_test::serial; +use std::str::FromStr; +use tokio::time::{sleep, Duration}; +use v4_proto_rs::dydxprotocol::{ + clob::{ + order::{self, ConditionType, Side, TimeInForce}, + Order, OrderBatch, OrderId, + }, + subaccounts::SubaccountId, +}; + +const ETH_USD_PAIR_ID: u32 = 1; // information on market id can be fetch from indexer API + +#[tokio::test] +async fn test_node_order_generator() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let market = env.get_market().await?; + let height = env.get_height().await?; + let account = env.account; + + // Test values + let price = BigDecimal::from_str("4000.0")?; + let subticks = 4_000_000_000_u64; + let quantity = BigDecimal::from_str("0.1")?; + let quantums = 100_000_000_u64; + let client_id = 123456; + + let until_height = height.ahead(SHORT_TERM_ORDER_MAXIMUM_LIFETIME); + let now = Utc::now(); + let until_time = now + TimeDelta::seconds(60); + + let oracle_price = market + .oracle_price + .clone() + .expect("Market oracle price required for testing"); + let allowed_slippage = BigDecimal::from_str("1.5")?; // % + let one = ::one(); + let slippaged_price = oracle_price * (one - allowed_slippage.clone() / BigDecimal::from(100)); + let slippaged_subticks = market + .order_params() + .quantize_price(slippaged_price) + .to_u64() + .ok_or_else(|| err!("Failed converting slippage subticks to u64"))?; + + let generator = OrderBuilder::new(market, account.subaccount(0)?); + + // Short-term market order + let order_ms = generator + .clone() + .market(OrderSide::Sell, quantity.clone()) + .allowed_slippage(allowed_slippage.clone()) + .until(until_height.clone()) + .build(client_id)?; + + let order_ms_r = Order { + order_id: Some(OrderId { + subaccount_id: Some(SubaccountId { + owner: account.address().to_string(), + number: 0, + }), + client_id, + order_flags: 0_u32, + clob_pair_id: 1_u32, + }), + side: Side::Sell.into(), + quantums, + subticks: slippaged_subticks, + time_in_force: TimeInForce::Ioc.into(), + reduce_only: false, + client_metadata: DEFAULT_RUST_CLIENT_METADATA, + condition_type: ConditionType::Unspecified.into(), + conditional_order_trigger_subticks: 0u64, + good_til_oneof: Some(order::GoodTilOneof::GoodTilBlock(until_height.0)), + }; + + // Conditional stop market order + let order_mc = generator + .clone() + .stop_market(OrderSide::Sell, price.clone(), quantity.clone()) + .until(until_time) + // Optional, defaults to 5% + .allowed_slippage(allowed_slippage) + .execution(OrderExecution::Ioc) + .build(client_id)?; + + let order_mc_r = Order { + order_id: Some(OrderId { + subaccount_id: Some(SubaccountId { + owner: account.address().to_string(), + number: 0, + }), + client_id, + order_flags: 32_u32, + clob_pair_id: 1_u32, + }), + side: Side::Sell.into(), + quantums, + subticks: slippaged_subticks, + time_in_force: TimeInForce::Ioc.into(), + reduce_only: false, + client_metadata: DEFAULT_RUST_CLIENT_METADATA, + condition_type: ConditionType::StopLoss.into(), + conditional_order_trigger_subticks: subticks, + good_til_oneof: Some(order::GoodTilOneof::GoodTilBlockTime( + until_time.timestamp().try_into().unwrap(), + )), + }; + + // Long-term limit order + let order_ll = generator + .clone() + .limit(OrderSide::Buy, price, quantity) + .long_term() + .until(until_time) + .build(client_id)?; + + let order_ll_r = Order { + order_id: Some(OrderId { + subaccount_id: Some(SubaccountId { + owner: account.address().to_string(), + number: 0, + }), + client_id, + order_flags: 64_u32, + clob_pair_id: 1_u32, + }), + side: Side::Buy.into(), + quantums, + subticks, + time_in_force: TimeInForce::Unspecified.into(), + reduce_only: false, + client_metadata: DEFAULT_RUST_CLIENT_METADATA, + condition_type: ConditionType::Unspecified.into(), + conditional_order_trigger_subticks: 0u64, + good_til_oneof: Some(order::GoodTilOneof::GoodTilBlockTime( + until_time.timestamp().try_into().unwrap(), + )), + }; + + assert_eq!(order_ms.1, order_ms_r); + assert_eq!(order_mc.1, order_mc_r); + assert_eq!(order_ll.1, order_ll_r); + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_place_order() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let market = env.get_market().await?; + let mut node = env.node; + let mut account = env.account; + let subaccount = account.subaccount(0)?; + + let (_id, order) = OrderBuilder::new(market, subaccount) + .limit(OrderSide::Buy, 1, 1) + .long_term() + .until(Utc::now() + TimeDelta::seconds(60)) + .build(thread_rng().gen_range(0..100_000_000))?; + + let tx_res = node.place_order(&mut account, order).await; + + node.query_transaction_result(tx_res).await?; + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_place_order_market_short_term() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let market = env.get_market().await?; + let height = env.get_height().await?; + let mut node = env.node; + let mut account = env.account; + + let (_id, order) = OrderBuilder::new(market, account.subaccount(0)?) + .market(OrderSide::Buy, BigDecimal::from_str("0.001")?) + .price(10) // Low slippage price to not execute + .until(height.ahead(10)) + .build(thread_rng().gen_range(0..100_000_000))?; + + node.place_order(&mut account, order).await?; + + Ok(()) +} + +#[tokio::test] +#[serial] +#[ignore] +async fn test_node_cancel_order() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let market = env.get_market().await?; + let mut node = env.node; + let mut account = env.account; + let subaccount = account.subaccount(0)?; + + let (id, order) = OrderBuilder::new(market, subaccount) + .limit(OrderSide::Buy, 1, 1) + .until(Utc::now() + TimeDelta::seconds(60)) + .long_term() + .build(thread_rng().gen_range(0..100_000_000))?; + let order_tx_hash = node.place_order(&mut account, order).await?; + node.query_transaction(&order_tx_hash).await?; + + sleep(Duration::from_secs(2)).await; + + // Following requests will fail if account does not have funds + let until = OrderGoodUntil::Time(Utc::now() + TimeDelta::seconds(60)); + let tx_res = node.cancel_order(&mut account, id, until).await; + node.query_transaction_result(tx_res).await?; + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_deposit() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let mut account = env.account; + + let sender = account.address().clone(); + let recipient = account.subaccount(0)?; + + let tx_res = node.deposit(&mut account, sender, recipient, 1).await; + node.query_transaction_result(tx_res).await?; + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_withdraw() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let mut account = env.account; + + let sender = account.subaccount(0)?; + let recipient = account.address().clone(); + + let tx_res = node.withdraw(&mut account, sender, recipient, 1).await; + + node.query_transaction_result(tx_res).await?; + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_transfer() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let mut account = env.account; + + let sender = account.subaccount(0)?; + let recipient = account.subaccount(1)?; + + let tx_res = node.transfer(&mut account, sender, recipient, 1).await; + + node.query_transaction_result(tx_res).await?; + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_send_token() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let mut account = env.account; + + let sender = account.address().clone(); + let recipient = account.address().clone(); + + let tx_res = node + .send_token(&mut account, sender, recipient, Token::DydxTnt(1000.into())) + .await; + + node.query_transaction_result(tx_res).await?; + + Ok(()) +} + +#[tokio::test] +#[serial] +#[ignore] +async fn test_node_batch_cancel_orders() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + + let order_id0 = env.spawn_order().await?; + sleep(Duration::from_secs(2)).await; + let order_id1 = env.spawn_order().await?; + sleep(Duration::from_secs(2)).await; + + let mut node = env.node; + let mut account = env.account; + + let subaccount = account.subaccount(0)?; + + let batch = OrderBatch { + clob_pair_id: ETH_USD_PAIR_ID, + client_ids: vec![order_id0.client_id, order_id1.client_id], + }; + let cancels = vec![batch]; + let good_til = node + .get_latest_block_height() + .await? + .ahead(SHORT_TERM_ORDER_MAXIMUM_LIFETIME); + + let tx_res = node + .batch_cancel_orders(&mut account, subaccount, cancels, good_til) + .await; + node.query_transaction_result(tx_res).await?; + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_close_position() -> Result<(), Error> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let mut account = env.account; + + let subaccount = account.subaccount(0)?; + let market = env + .indexer + .markets() + .get_perpetual_market(&env.ticker) + .await?; + + node.close_position( + &mut account, + subaccount, + market, + None, + thread_rng().gen_range(0..100_000_000), + ) + .await?; + + Ok(()) +} diff --git a/v4-client-rs/client/tests/test_node_methods.rs b/v4-client-rs/client/tests/test_node_methods.rs new file mode 100644 index 00000000..8b961a8a --- /dev/null +++ b/v4-client-rs/client/tests/test_node_methods.rs @@ -0,0 +1,228 @@ +mod env; +use env::TestEnv; + +use anyhow::Result; +use dydx_v4_rust::indexer::Denom; + +#[tokio::test] +async fn test_node_get_account_balances() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let address = env.account.address(); + + let balances = node.get_account_balances(address).await?; + assert!(!balances.is_empty()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_account_balance() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let address = env.account.address(); + let denom = Denom::Usdc; + + let balance = node.get_account_balance(address, &denom).await?; + assert_eq!(balance.denom, denom.as_ref()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_account() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let address = env.account.address(); + + let response = node.get_account(address).await?; + assert_eq!(response.address, env.account.address().as_ref()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_node_info() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let info = node.get_node_info().await?; + assert!(info.default_node_info.is_some() || info.application_version.is_some()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_latest_block() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let height = node.get_latest_block_height().await?.0; + assert!(height > 18_476_624); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_user_stats() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let address = env.account.address(); + + let _stats = node.get_user_stats(address).await?; + Ok(()) +} + +#[tokio::test] +async fn test_node_get_all_validators() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let validators = node.get_all_validators(None).await?; + assert!(validators.len() > 2); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_subaccounts() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let subaccounts = node.get_subaccounts().await?; + assert!(!subaccounts.is_empty()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_subaccount() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let subaccount = env.account.subaccount(0)?; + + let subaccount_info = node.get_subaccount(&subaccount).await?; + assert!(subaccount_info.asset_positions.len() + subaccount_info.perpetual_positions.len() > 0); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_clob_pair() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + let pair_id = 0; + + let pair = node.get_clob_pair(pair_id).await?; + assert!(pair.id == pair_id); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_clob_pairs() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let pairs = node.get_clob_pairs(None).await?; + assert!(!pairs.is_empty()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_price() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + let market_id = 0; + + let market_price = node.get_price(market_id).await?; + assert!(market_price.id == market_id); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_prices() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let prices = node.get_prices(None).await?; + assert!(!prices.is_empty()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_perpetual() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + let perpetual_id = 0; + + let perpetual = node.get_perpetual(perpetual_id).await?; + let params = perpetual.params.unwrap(); + assert!(params.id == perpetual_id); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_perpetuals() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let perpetuals = node.get_perpetuals(None).await?; + assert!(!perpetuals.is_empty()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_equity_tier_limit_config() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let config = node.get_equity_tier_limit_config().await?; + assert!( + config.stateful_order_equity_tiers.len() + config.short_term_order_equity_tiers.len() > 0 + ); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_delegator_delegations() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let address = env.account.address(); + + let _response = node + .get_delegator_delegations(address.clone(), None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_node_get_delegator_unbonding_delegations() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let address = env.account.address(); + + let _delegations = node + .get_delegator_unbonding_delegations(address.clone(), None) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_node_get_delayed_complete_bridge_messages() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let address = env.account.address(); + + let _messages = node + .get_delayed_complete_bridge_messages(address.clone()) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_node_get_fee_tiers() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let fee_tiers = node.get_fee_tiers().await?; + assert!(!fee_tiers.is_empty()); + Ok(()) +} + +#[tokio::test] +async fn test_node_get_user_fee_tier() -> Result<()> { + let env = TestEnv::testnet().await?; + let mut node = env.node; + let address = env.account.address(); + + let _tier = node.get_user_fee_tier(address.clone()).await?; + Ok(()) +} + +#[tokio::test] +async fn test_node_get_rewards_params() -> Result<()> { + let mut node = TestEnv::testnet().await?.node; + + let _params = node.get_rewards_params().await?; + Ok(()) +} diff --git a/v4-client-rs/client/tests/test_node_sequencer.rs b/v4-client-rs/client/tests/test_node_sequencer.rs new file mode 100644 index 00000000..8016c511 --- /dev/null +++ b/v4-client-rs/client/tests/test_node_sequencer.rs @@ -0,0 +1,66 @@ +mod env; +use env::TestEnv; + +use anyhow::Error; +use dydx_v4_rust::node::sequencer::*; +use serial_test::serial; + +#[tokio::test] +#[serial] +async fn test_node_sequencer_query() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + + let seqnum_before = env.account.sequence_number(); + + // account.sequence_number() holds the correct and next sequence number to be used. + // We spawn two orders because wallet.account() produces an account with an already + // correct and updated sequence number. + // In the first order this value should not change, changing only then in the second order. + env.spawn_order().await?; + env.spawn_order().await?; + + let seqnum_after = env.account.sequence_number(); + + assert_eq!(seqnum_after, seqnum_before + 1); + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_sequencer_incremental() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + let address = env.account.address().clone(); + let sequencer = IncrementalSequencer::new(&[(address, env.account.sequence_number())]); + env.node.with_sequencer(sequencer); + + let seqnum_before = env.account.sequence_number(); + + env.spawn_order().await?; + env.spawn_order().await?; + + let seqnum_after = env.account.sequence_number(); + + assert_eq!(seqnum_after, seqnum_before + 1); + + Ok(()) +} + +#[tokio::test] +#[serial] +async fn test_node_sequencer_timestamp() -> Result<(), Error> { + let mut env = TestEnv::testnet().await?; + env.node.with_sequencer(TimestamperSequencer); + + let seqnum_before = env.account.sequence_number(); + + env.spawn_order().await?; + env.spawn_order().await?; + env.spawn_order().await?; + + let seqnum_after = env.account.sequence_number(); + + assert!(seqnum_after == seqnum_before); + + Ok(()) +} diff --git a/v4-client-rs/client/tests/testnet.toml b/v4-client-rs/client/tests/testnet.toml new file mode 100644 index 00000000..f99783af --- /dev/null +++ b/v4-client-rs/client/tests/testnet.toml @@ -0,0 +1,17 @@ +[node] +# You can select other gRPC endpoints from [the list](https://docs.dydx.exchange/infrastructure_providers-network/resources#full-node-endpoints). +endpoint = "https://test-dydx-grpc.kingnodes.com" +chain_id = "dydx-testnet-4" +fee_denom = "ibc/8E27BA2D5493AF5636760E354E46004562C46AB7EC0CC4C1CA14E9E20E2545B5" + +[indexer] +http.endpoint = "https://indexer.v4testnet.dydx.exchange" +ws.endpoint = "wss://indexer.v4testnet.dydx.exchange/v4/ws" + +[faucet] # optional +endpoint = "https://faucet.v4testnet.dydx.exchange" + +[noble] # optional +endpoint = "http://noble-testnet-grpc.polkachu.com:21590" +chain_id = "grand-1" +fee_denom = "uusdc" diff --git a/v4-client-rs/deny.toml b/v4-client-rs/deny.toml new file mode 100644 index 00000000..0632b7ba --- /dev/null +++ b/v4-client-rs/deny.toml @@ -0,0 +1,60 @@ +[graph] +targets = [ + { triple = "x86_64-unknown-linux-gnu" }, + { triple = "aarch64-unknown-linux-gnu" }, + { triple = "x86_64-unknown-linux-musl" }, + { triple = "aarch64-apple-darwin" }, + { triple = "x86_64-apple-darwin" }, + { triple = "x86_64-pc-windows-msvc" }, +] +all-features = false +no-default-features = false + +[output] +feature-depth = 1 + +[advisories] +db-path = "~/.cargo/advisory-db" +db-urls = ["https://github.com/rustsec/advisory-db"] + +[licenses] +allow = [ + "MIT", + "Apache-2.0", + "BSD-3-Clause", + "ISC", + "Unicode-DFS-2016", + "OpenSSL", +] +confidence-threshold = 0.8 +[[licenses.exceptions]] +allow = ["AGPL-3.0"] +name = "v4-proto-rs" +version = "*" + +[[licenses.clarify]] +name = "ring" +expression = "MIT AND ISC AND OpenSSL" +license-files = [ + { path = "LICENSE", hash = 0xbd0eed23 } +] + +[licenses.private] +ignore = false +registries = [] + +[bans] +multiple-versions = "warn" +wildcards = "allow" +highlight = "all" + +[sources] +unknown-registry = "warn" +unknown-git = "warn" +allow-registry = ["https://github.com/rust-lang/crates.io-index"] +allow-git = ["https://github.com/therustmonk/v4-chain.git"] + +[sources.allow-org] +github = [] +gitlab = [] +bitbucket = [] \ No newline at end of file