diff --git a/.github/workflows/commitlint.yml b/.github/workflows/commitlint.yml
index a4025bc1..0c6f8d40 100644
--- a/.github/workflows/commitlint.yml
+++ b/.github/workflows/commitlint.yml
@@ -1,9 +1,10 @@
-name: "[v4-client-js] Conventional PR Title"
+name: "Conventional PR Title"
on:
pull_request:
types: ['opened', 'edited', 'reopened', 'synchronize']
paths:
- 'v4-client-js/**'
+ - 'v4-client-rs/**'
jobs:
conventional-pr-title:
runs-on: ubuntu-latest
diff --git a/.github/workflows/rs-build-fmt-clippy-audit-test.yml b/.github/workflows/rs-build-fmt-clippy-audit-test.yml
new file mode 100644
index 00000000..3d5fe006
--- /dev/null
+++ b/.github/workflows/rs-build-fmt-clippy-audit-test.yml
@@ -0,0 +1,49 @@
+name: "[v4-client-rs] Build, Fmt, Clippy, Audit, & Test"
+
+on:
+ pull_request:
+ paths:
+ - 'v4-client-rs/**'
+ push:
+ paths:
+ - 'v4-client-rs/**'
+ branches:
+ - main
+ - "release/*"
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ working-directory: ./v4-client-rs
+
+ steps:
+ - name: Checkout source code
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ - name: Install protoc # for dev dependencies only
+ run: sudo apt install -y protobuf-compiler
+
+ - name: Build
+ run: cargo build
+
+ - name: Check formatting
+ run: cargo fmt -- --check
+
+ - name: Linter
+ shell: bash
+ run: cargo clippy -- -D warnings
+
+ - name: Install audit
+ shell: bash
+ run: cargo install cargo-deny
+
+ - name: Security audit, licenses
+ shell: bash
+ run: cargo deny check licenses advisories sources
+
+ - name: Test
+ run: cargo test
diff --git a/README.md b/README.md
index 48ab0631..a2680c02 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,12 @@ Python client for dYdX Chain. Developed and maintained by the Nethermind team.
- [Saul M.](https://github.com/samtin0x)
- [Piotr P.](https://github.com/piwonskp)
+## v4-client-rs
+Rust client for dYdX Chain. Developed and maintained by the Nethermind team.
+- [Emanuel V.](https://github.com/v0-e)
+- [Denis K.](https://github.com/therustmonk)
+- [Maksim R.](https://github.com/maksimryndin)
+
## v4-client-cpp (Third Party Client)
To pull the latest C++ client, run `git submodule update --init --recursive`
diff --git a/v4-client-rs/.gitignore b/v4-client-rs/.gitignore
new file mode 100644
index 00000000..a9d37c56
--- /dev/null
+++ b/v4-client-rs/.gitignore
@@ -0,0 +1,2 @@
+target
+Cargo.lock
diff --git a/v4-client-rs/Cargo.toml b/v4-client-rs/Cargo.toml
new file mode 100644
index 00000000..b4235010
--- /dev/null
+++ b/v4-client-rs/Cargo.toml
@@ -0,0 +1,20 @@
+[workspace]
+resolver = "2"
+members = [
+ "client",
+]
+
+[workspace.package]
+version = "0.1.0"
+edition = "2021"
+license = "AGPL-3.0"
+
+[workspace.dependencies]
+anyhow = "1"
+async-trait = "0.1"
+bigdecimal = { version = "0.4", features = ["serde"] }
+derive_more = { version = "1", features = ["full"] }
+log = "0.4"
+thiserror = "1"
+tokio = { version = "1.39", features = ["full"] }
+v4-proto-rs = { git = "https://github.com/therustmonk/v4-chain", rev = "a6265bbf4cd9812382a89d32c9304c08551f7bae" }
diff --git a/v4-client-rs/LICENSE b/v4-client-rs/LICENSE
new file mode 100644
index 00000000..22304f63
--- /dev/null
+++ b/v4-client-rs/LICENSE
@@ -0,0 +1,802 @@
+Copyright (C) 2023 dYdX Trading Inc.
+
+Subject to your compliance with applicable law and the v4 Terms of Use, available at dydx.exchange/legal, you are granted the right to use the Program or Licensed Work (defined below) under the terms of the GNU Affero General Public License as set forth below; provided, however, that if you violate any such applicable law in your use of the Program or Licensed Work, all of your rights and licenses to use (including any rights to reproduce, distribute, install or modify) the Program or Licensed Work will automatically and immediately terminate.
+
+
+The “Program” or “Licensed Work” shall mean any of the following: dydxprotocol/cosmos-sdk, dydxprotocol/cometbft, dydxprotocol/v4-chain, dydxprotocol/v4-clients, dydxprotocol/v4-web, dydxprotocol/v4-abacus, dydxprotocol/v4-localization, dydxprotocol/v4-documentation, and any dYdX or dYdX Trading Inc. repository reflecting a copy of, or link to, this license.
+
+
+The GNU Affero General Public License
+Version 3, 19 November 2007
+
+
+Copyright (C) 2007 Free Software Foundation, Inc.
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+
+ Preamble
+
+
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+
+
+ TERMS AND CONDITIONS
+
+
+ 0. Definitions.
+
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+
+
+ 1. Source Code.
+
+
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+
+ 2. Basic Permissions.
+
+
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; Section 10
+makes it unnecessary.
+
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+
+
+ 4. Conveying Verbatim Copies.
+
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with Section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+
+ 5. Conveying Modified Source Versions.
+
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of Section 4, provided that you also meet all of these conditions:
+
+
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under Section
+ 7. This requirement modifies the requirement in Section 4 to
+ "keep intact all notices".
+
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable Section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+
+
+ 6. Conveying Non-Source Forms.
+
+
+
+ You may convey a covered work in object code form under the terms
+of Sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with Subsection 6b.
+
+
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under Subsection 6d.
+
+
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+
+
+ 7. Additional Terms.
+
+
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of Sections 15 and 16 of this License; or
+
+
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of Section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+
+
+ 8. Termination.
+
+
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of Section 11).
+
+
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under Section 10.
+
+
+
+ 9. Acceptance Not Required for Having Copies.
+
+
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+
+
+ 11. Patents.
+
+
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+
+
+ 12. No Surrender of Others' Freedom.
+
+
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+
+
+ 14. Revised Versions of this License.
+
+
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+
+
+ 15. Disclaimer of Warranty.
+
+
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+
+
+ 16. Limitation of Liability.
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+ 17. Interpretation of Sections 15 and 16.
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+
+For more information about this software, see https://dydx.exchange.
+ Copyright (C) 2023 dYdX Trading Inc.
diff --git a/v4-client-rs/README.md b/v4-client-rs/README.md
new file mode 100644
index 00000000..84bcc694
--- /dev/null
+++ b/v4-client-rs/README.md
@@ -0,0 +1,64 @@
+# Rust client for dYdX v4
+
+The crate implements interaction with the dYdX API.
+
+The following features are implemented:
+- `NodeClient`, `IndexerClient` + WebSockets, `FaucetClient`, `NobleClient`
+- Fully asynchronous implementation
+- Telemetry
+- Convenient builder for constructing requests
+- Automatic WS connection support
+
+## Install
+
+To add the crate to your project, use the command:
+
+```sh
+cargo add dydx-v4-rust
+```
+
+## Development
+
+Workspace consists of a single crate:
+* `client` - to provide connection management with dYdX, common types and utils
+
+### Prerequisites
+
+* [Rust](https://www.rust-lang.org/tools/install)
+* [cargo deny](https://github.com/EmbarkStudios/cargo-deny)
+* [protoc](https://grpc.io/docs/protoc-installation/) for dev dependencies (`metrics-exporter-tcp`)
+
+
+### Examples
+
+To run the example, you need to use the `cargo` command as follows:
+
+```sh
+cargo run --example bot_basic_adder
+```
+
+You can find the full set of examples in the [examples](client/examples) folder.
+
+### Code quality assurance
+
+Before publishing make sure to run (and fix all warnings and errors)
+
+```sh
+cargo fmt
+cargo clippy
+cargo deny check licenses advisories sources
+```
+
+### Documentation
+
+To generate the documentation, use the command
+
+```sh
+cargo doc -p dydx-v4-rust
+```
+
+## Acknowledgements
+
+Built by Nethermind: [@v0-e](https://github.com/v0-e), [@therustmonk](https://github.com/therustmonk), [@maksimryndin](https://github.com/maksimryndin)
+
+For more details about the grant see [link](https://www.dydxgrants.com/grants/rust-trading-client).
diff --git a/v4-client-rs/client/Cargo.toml b/v4-client-rs/client/Cargo.toml
new file mode 100644
index 00000000..e6ffa235
--- /dev/null
+++ b/v4-client-rs/client/Cargo.toml
@@ -0,0 +1,59 @@
+[package]
+name = "dydx-v4-rust"
+version.workspace = true
+edition.workspace = true
+license.workspace = true
+
+description = "dYdX v4 asynchronous client."
+homepage = "https://github.com/dydxprotocol/v4-clients/v4-client-rs"
+repository = "https://github.com/dydxprotocol/v4-clients/v4-clients-rs"
+readme = "README.md"
+
+# https://crates.io/categories
+categories = ["api-bindings", "asynchronous", "finance"]
+keywords = ["trading", "dex"]
+
+[features]
+default = ["faucet", "noble", "telemetry"]
+faucet = []
+noble = [
+ "dep:ibc-proto"
+]
+telemetry = [
+ "dep:metrics",
+]
+
+[dependencies]
+anyhow.workspace = true
+async-trait.workspace = true
+bigdecimal.workspace = true
+bip32 = { version = "0.5", default-features = false, features = ["bip39", "alloc", "secp256k1"] }
+cosmrs = "0.16"
+chrono = { version = "0.4", features = ["serde"] }
+derive_more.workspace = true
+futures-util = "0.3"
+governor = "0.6"
+ibc-proto = { version = "0.46", optional = true }
+log.workspace = true
+rand = "0.8"
+reqwest = { version = "0.12", features = ["json"] }
+serde = { version = "1", features = ["derive"] }
+serde_json = "1"
+serde_with = "3.9"
+strum = { version = "0.26", features = ["derive"] }
+thiserror.workspace = true
+tonic = { version = "0.11", features = ["tls", "tls-roots", "transport", "channel"] }
+tokio.workspace = true
+tokio-tungstenite = { version = "0.23", features = ["native-tls"] }
+toml = "0.8"
+tower = "0.4"
+v4-proto-rs.workspace = true
+
+# Telemetry
+metrics = { version = "0.23", optional = true }
+
+[dev-dependencies]
+metrics-exporter-tcp = "0.10.0"
+serial_test = "3.1.1"
+tracing = "0.1"
+tracing-subscriber = "0.3"
diff --git a/v4-client-rs/client/examples/account_endpoint.rs b/v4-client-rs/client/examples/account_endpoint.rs
new file mode 100644
index 00000000..1c4d427d
--- /dev/null
+++ b/v4-client-rs/client/examples/account_endpoint.rs
@@ -0,0 +1,205 @@
+mod support;
+
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{
+ GetAggregationsOpts, GetFillsOpts, GetHistoricalPnlOpts, GetTradingRewardsOpts,
+ GetTransfersOpts, IndexerClient, ListOrdersOpts, ListPositionsOpts, MarketType, OrderSide,
+ PerpetualPositionStatus, Ticker, TradingRewardAggregationPeriod,
+};
+use dydx_v4_rust::node::Wallet;
+use support::constants::TEST_MNEMONIC;
+
+pub struct Rester {
+ indexer: IndexerClient,
+ wallet: Wallet,
+}
+
+impl Rester {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { indexer, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let rester = Rester::connect().await?;
+ let account = rester.wallet.account_offline(0)?;
+ let indexer = rester.indexer;
+
+ // Test values
+ let address = account.address();
+ let subaccount = account.subaccount(0)?;
+ let parent_subaccount = subaccount.parent();
+
+ let subaccounts = indexer.accounts().get_subaccounts(address).await?;
+ tracing::info!("Subaccounts response: {:?}", subaccounts);
+
+ let subaccount_resp = indexer.accounts().get_subaccount(&subaccount).await?;
+ tracing::info!("Subaccount response: {:?}", subaccount_resp);
+
+ let asset_positions = indexer.accounts().get_asset_positions(&subaccount).await?;
+ tracing::info!("Asset positions response: {:?}", asset_positions);
+
+ let pos_opts = ListPositionsOpts {
+ status: PerpetualPositionStatus::Closed.into(),
+ limit: Some(3),
+ ..Default::default()
+ };
+ let positions = indexer
+ .accounts()
+ .list_positions(&subaccount, Some(pos_opts))
+ .await?;
+ tracing::info!("Perpetual positions response: {:?}", positions);
+
+ let trf_opts = GetTransfersOpts {
+ limit: Some(3),
+ ..Default::default()
+ };
+ let transfers = indexer
+ .accounts()
+ .get_transfers(&subaccount, Some(trf_opts))
+ .await?;
+ tracing::info!("Transfers response: {:?}", transfers);
+
+ let ord_opts = ListOrdersOpts {
+ ticker: Some(Ticker::from("ETH-USD")),
+ limit: Some(3),
+ side: OrderSide::Buy.into(),
+ ..Default::default()
+ };
+ let orders = indexer
+ .accounts()
+ .list_orders(&subaccount, Some(ord_opts))
+ .await?;
+ tracing::info!("Orders response: {:?}", orders);
+
+ let fill_opts = GetFillsOpts {
+ limit: Some(3),
+ market: Some(Ticker::from("ETH-USD")),
+ market_type: Some(MarketType::Perpetual),
+ ..Default::default()
+ };
+ let fills = indexer
+ .accounts()
+ .get_fills(&subaccount, Some(fill_opts))
+ .await?;
+ tracing::info!("Fills response: {:?}", fills);
+
+ let pnl_opts = GetHistoricalPnlOpts {
+ limit: Some(3),
+ ..Default::default()
+ };
+ let pnls = indexer
+ .accounts()
+ .get_historical_pnl(&subaccount, Some(pnl_opts))
+ .await?;
+ tracing::info!("Historical PnLs response: {:?}", pnls);
+
+ let rwds_opts = GetTradingRewardsOpts {
+ limit: Some(3),
+ ..Default::default()
+ };
+ let rewards = indexer
+ .accounts()
+ .get_rewards(account.address(), Some(rwds_opts))
+ .await?;
+ tracing::info!("Trading rewards response: {:?}", rewards);
+
+ let aggr_opts = GetAggregationsOpts {
+ limit: Some(3),
+ ..Default::default()
+ };
+ let aggregated = indexer
+ .accounts()
+ .get_rewards_aggregated(
+ address,
+ TradingRewardAggregationPeriod::Daily,
+ Some(aggr_opts),
+ )
+ .await?;
+ tracing::info!("Trading rewards aggregated response: {:?}", aggregated);
+
+ // Parent subaccount
+ let subaccount_resp = indexer
+ .accounts()
+ .get_parent_subaccount(&parent_subaccount)
+ .await?;
+ tracing::info!(
+ "Subaccount response (parent subaccount): {:?}",
+ subaccount_resp
+ );
+
+ let asset_positions = indexer
+ .accounts()
+ .get_parent_asset_positions(&parent_subaccount)
+ .await?;
+ tracing::info!(
+ "Asset positions response (parent subaccount): {:?}",
+ asset_positions
+ );
+
+ let pos_opts = ListPositionsOpts {
+ status: PerpetualPositionStatus::Closed.into(),
+ limit: Some(3),
+ ..Default::default()
+ };
+ let positions = indexer
+ .accounts()
+ .list_parent_positions(&parent_subaccount, Some(pos_opts))
+ .await?;
+ tracing::info!(
+ "Perpetual positions response (parent subaccount): {:?}",
+ positions
+ );
+
+ let trf_opts = GetTransfersOpts {
+ limit: Some(3),
+ ..Default::default()
+ };
+ let transfers = indexer
+ .accounts()
+ .get_parent_transfers(&parent_subaccount, Some(trf_opts))
+ .await?;
+ tracing::info!("Transfers response (parent subaccount): {:?}", transfers);
+
+ let ord_opts = ListOrdersOpts {
+ ticker: Some(Ticker::from("ETH-USD")),
+ limit: Some(3),
+ side: OrderSide::Buy.into(),
+ ..Default::default()
+ };
+ let orders = indexer
+ .accounts()
+ .list_parent_orders(&parent_subaccount, Some(ord_opts))
+ .await?;
+ tracing::info!("Orders response (parent subaccount): {:?}", orders);
+
+ let fill_opts = GetFillsOpts {
+ limit: Some(3),
+ market: Some(Ticker::from("ETH-USD")),
+ market_type: Some(MarketType::Perpetual),
+ ..Default::default()
+ };
+ let fills = indexer
+ .accounts()
+ .get_parent_fills(&parent_subaccount, Some(fill_opts))
+ .await?;
+ tracing::info!("Fills response (parent subaccount): {:?}", fills);
+
+ let pnl_opts = GetHistoricalPnlOpts {
+ limit: Some(3),
+ ..Default::default()
+ };
+ let pnls = indexer
+ .accounts()
+ .get_parent_historical_pnl(&parent_subaccount, Some(pnl_opts))
+ .await?;
+ tracing::info!("Historical PnLs response (parent subaccount): {:?}", pnls);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/batch_cancel_orders.rs b/v4-client-rs/client/examples/batch_cancel_orders.rs
new file mode 100644
index 00000000..4655f1d1
--- /dev/null
+++ b/v4-client-rs/client/examples/batch_cancel_orders.rs
@@ -0,0 +1,97 @@
+mod support;
+use anyhow::{Error, Result};
+use bigdecimal::BigDecimal;
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{ClientId, IndexerClient};
+use dydx_v4_rust::node::{
+ NodeClient, OrderBuilder, OrderSide, Wallet, SHORT_TERM_ORDER_MAXIMUM_LIFETIME,
+};
+use rand::thread_rng;
+use std::str::FromStr;
+use support::constants::TEST_MNEMONIC;
+use tokio::time::{sleep, Duration};
+use v4_proto_rs::dydxprotocol::clob::{order::TimeInForce, OrderBatch};
+
+const N_ORDERS: usize = 6;
+
+const ETH_USD_TICKER: &str = "ETH-USD";
+
+pub struct OrderPlacer {
+ client: NodeClient,
+ indexer: IndexerClient,
+ wallet: Wallet,
+}
+
+impl OrderPlacer {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self {
+ client,
+ indexer,
+ wallet,
+ })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut placer = OrderPlacer::connect().await?;
+ let mut account = placer.wallet.account(0, &mut placer.client).await?;
+
+ let subaccount = account.subaccount(0)?;
+
+ let market = placer
+ .indexer
+ .markets()
+ .get_perpetual_market(Ð_USD_TICKER.into())
+ .await?;
+
+ let builder = OrderBuilder::new(market.clone(), subaccount.clone())
+ .market(OrderSide::Buy, BigDecimal::from_str("0.001")?)
+ .price(100)
+ .reduce_only(false)
+ .time_in_force(TimeInForce::Unspecified);
+
+ let mut client_ids = Vec::new();
+ // Push some orders
+ for _id in 0..N_ORDERS {
+ // Short term orders have a maximum validity of 20 blocks
+ let height = placer.client.get_latest_block_height().await?;
+ let order_builder = builder.clone().until(height.ahead(10));
+
+ let (order_id, order) =
+ order_builder.build(ClientId::random_with_rng(&mut thread_rng()))?;
+ let client_id = order_id.client_id;
+ client_ids.push(client_id);
+ let tx_hash = placer.client.place_order(&mut account, order).await?;
+ tracing::info!("Broadcast order ({client_id}) transaction hash: {tx_hash:?}");
+ sleep(Duration::from_secs(2)).await;
+ }
+
+ // Batch cancel
+ let batch = OrderBatch {
+ clob_pair_id: market.clob_pair_id.0,
+ client_ids,
+ };
+ let til_height = placer
+ .client
+ .get_latest_block_height()
+ .await?
+ .ahead(SHORT_TERM_ORDER_MAXIMUM_LIFETIME);
+ let tx_hash = placer
+ .client
+ .batch_cancel_orders(&mut account, subaccount, vec![batch], til_height)
+ .await?;
+ tracing::info!(
+ "Broadcast cancel orders batch transaction hash: {:?}",
+ tx_hash
+ );
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/bot_basic_adder.rs b/v4-client-rs/client/examples/bot_basic_adder.rs
new file mode 100644
index 00000000..4a09f95f
--- /dev/null
+++ b/v4-client-rs/client/examples/bot_basic_adder.rs
@@ -0,0 +1,271 @@
+mod support;
+
+use anyhow::{anyhow as err, Error, Result};
+use bigdecimal::{BigDecimal, One, Signed};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{
+ AnyId, Feed, IndexerClient, ListPerpetualMarketsOpts, PerpetualMarket, Price, Quantity,
+ SubaccountsMessage, Ticker, TradesMessage,
+};
+use dydx_v4_rust::node::{Account, NodeClient, OrderBuilder, OrderId, OrderSide, Wallet};
+use std::str::FromStr;
+use support::constants::TEST_MNEMONIC;
+use support::order_book::LiveOrderBook;
+use tokio::select;
+
+pub struct Parameters {
+ ticker: Ticker,
+ depth: BigDecimal,
+ allowed_deviation: BigDecimal,
+ max_position: Quantity,
+}
+
+pub struct Variables {
+ position: Quantity,
+ state: State,
+}
+
+enum State {
+ Resting { price: Price, oid: OrderId },
+ InFlightOrder,
+ Cancelled,
+}
+
+pub struct BasicAdder {
+ client: NodeClient,
+ #[allow(dead_code)] // TODO remove after completion
+ indexer: IndexerClient,
+ #[allow(dead_code)] // TODO remove after completion
+ wallet: Wallet,
+ account: Account,
+ #[allow(dead_code)] // TODO remove after completion
+ market: PerpetualMarket,
+ generator: OrderBuilder,
+ trades_feed: Feed,
+ subaccounts_feed: Feed,
+ order_book: LiveOrderBook,
+ parameters: Parameters,
+ variables: Variables,
+}
+
+impl BasicAdder {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let mut client = NodeClient::connect(config.node).await?;
+ let mut indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ let account = wallet.account(0, &mut client).await?;
+ let subaccount = account.subaccount(0)?;
+
+ let ticker = Ticker::from("ETH-USD");
+ let market = indexer
+ .markets()
+ .list_perpetual_markets(Some(ListPerpetualMarketsOpts {
+ ticker: Some(ticker.clone()),
+ limit: None,
+ }))
+ .await?
+ .remove(&ticker)
+ .ok_or_else(|| err!("{ticker} not found in markets query response"))?;
+ let generator = OrderBuilder::new(market.clone(), subaccount.clone());
+
+ let trades_feed = indexer.feed().trades(&ticker, false).await?;
+ let orders_feed = indexer.feed().orders(&ticker, false).await?;
+ let subaccounts_feed = indexer.feed().subaccounts(subaccount, false).await?;
+ let order_book = LiveOrderBook::new(orders_feed);
+ let depth: BigDecimal = BigDecimal::from_str("0.001")?;
+ let allowed_deviation: BigDecimal = BigDecimal::from_str("0.2")?;
+ let max_position: Quantity = "1.0".parse()?;
+ let parameters = Parameters {
+ ticker,
+ depth,
+ allowed_deviation,
+ max_position,
+ };
+ let variables = Variables {
+ position: 0.into(),
+ state: State::Cancelled,
+ };
+ Ok(Self {
+ client,
+ indexer,
+ wallet,
+ account,
+ market,
+ generator,
+ trades_feed,
+ subaccounts_feed,
+ order_book,
+ parameters,
+ variables,
+ })
+ }
+
+ async fn entrypoint(mut self) {
+ loop {
+ if let Err(err) = self.step().await {
+ tracing::error!("Bot update failed: {err}");
+ }
+ }
+ }
+
+ async fn step(&mut self) -> Result<()> {
+ select! {
+ msg = self.trades_feed.recv() => {
+ if let Some(msg) = msg {
+ self.handle_trades_message(msg).await?;
+ }
+ }
+ msg = self.subaccounts_feed.recv() => {
+ if let Some(msg) = msg {
+ self.handle_subaccounts_message(msg).await?;
+ }
+ }
+ _ = self.order_book.changed() => {
+ self.handle_order_book().await?;
+ }
+ }
+ Ok(())
+ }
+
+ async fn handle_trades_message(&mut self, msg: TradesMessage) -> Result<()> {
+ match msg {
+ TradesMessage::Initial(_upd) => {}
+ TradesMessage::Update(_upd) => {}
+ }
+ Ok(())
+ }
+
+ async fn handle_subaccounts_message(&mut self, msg: SubaccountsMessage) -> Result<()> {
+ match msg {
+ SubaccountsMessage::Initial(upd) => {
+ let positions = upd.contents.subaccount.open_perpetual_positions;
+ if let Some(position) = positions.get(&self.parameters.ticker) {
+ self.variables.position = position.size.clone();
+ tracing::info!("Position: {}", self.variables.position);
+ }
+ }
+ SubaccountsMessage::Update(upd) => {
+ if let Some(ref positions) = upd
+ .contents
+ .first()
+ .ok_or_else(|| err!("Subaccount message does not have data!"))?
+ .perpetual_positions
+ {
+ let size = positions
+ .iter()
+ .find(|p| (p.market == self.parameters.ticker))
+ .map(|p| p.size.clone());
+ if let Some(size) = size {
+ self.variables.position = size;
+ tracing::info!("Position: {}", self.variables.position);
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ async fn handle_order_book(&mut self) -> Result<()> {
+ let spread = self
+ .order_book
+ .borrow()
+ .spread()
+ .map(|spread| (spread.bid.price.clone(), spread.ask.price.clone()));
+
+ if let Some((bid, ask)) = spread {
+ let side = if self.variables.position.is_negative() {
+ OrderSide::Buy
+ } else {
+ OrderSide::Sell
+ };
+
+ let one = ::one();
+ let (book_price, ideal_price) = match side {
+ OrderSide::Buy => (&bid, bid.clone() * (one + &self.parameters.depth)),
+ OrderSide::Sell => (&ask, ask.clone() * (one - &self.parameters.depth)),
+ other => panic!("Unhandled side {other:?}!"),
+ };
+ let ideal_distance = &book_price.0 * &self.parameters.depth;
+
+ match &self.variables.state {
+ State::Resting { price, oid } => {
+ let distance = (ideal_price.clone() - price.clone()).abs();
+ if distance > &self.parameters.allowed_deviation * ideal_distance {
+ tracing::info!(
+ "Cancelling order due to deviation: ID:{} side:{:?} ideal_price:{} price:{}",
+ oid.client_id, side, ideal_price, price
+ );
+ self.cancel_order(oid.clone()).await?;
+ self.variables.state = State::Cancelled;
+ }
+ }
+ State::InFlightOrder => {
+ tracing::info!("Not placing an order because in flight");
+ }
+ State::Cancelled => {
+ let size = &self.parameters.max_position.0 - self.variables.position.abs();
+ if &size * &ideal_price.0 < BigDecimal::from_str("3.0")? {
+ tracing::info!("Not placing an order because at position limit: size:{size} ideal_price:{ideal_price}");
+ return Ok(());
+ }
+ self.variables.state = State::InFlightOrder;
+ if let Ok(oid) = self
+ .place_limit_order(side, ideal_price.clone(), size)
+ .await
+ {
+ self.variables.state = State::Resting {
+ price: ideal_price,
+ oid,
+ };
+ } else {
+ self.variables.state = State::Cancelled;
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ async fn place_limit_order(
+ &mut self,
+ side: OrderSide,
+ price: Price,
+ size: BigDecimal,
+ ) -> Result {
+ let current_block = self.client.get_latest_block_height().await?;
+ let (id, order) = self
+ .generator
+ .clone()
+ .limit(side, price, size)
+ .until(current_block.ahead(10))
+ .build(AnyId)?;
+ let hash = self.client.place_order(&mut self.account, order).await?;
+ tracing::info!("Placing {side:?} order: {hash} (ID: {})", id.client_id);
+
+ Ok(id)
+ }
+
+ async fn cancel_order(&mut self, id: OrderId) -> Result<()> {
+ let current_block = self.client.get_latest_block_height().await?;
+ let until = current_block.ahead(10);
+ let c_id = id.client_id;
+ let hash = self
+ .client
+ .cancel_order(&mut self.account, id, until)
+ .await?;
+ tracing::info!("Cancelling order: {hash} (ID: {c_id})");
+
+ Ok(())
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let adder = BasicAdder::connect().await?;
+ adder.entrypoint().await;
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/bot_trend_follower.rs b/v4-client-rs/client/examples/bot_trend_follower.rs
new file mode 100644
index 00000000..9a1772bc
--- /dev/null
+++ b/v4-client-rs/client/examples/bot_trend_follower.rs
@@ -0,0 +1,388 @@
+mod support;
+
+use anyhow::{anyhow as err, Error, Result};
+use chrono::{TimeDelta, Utc};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{
+ CandleResolution, ClientId, Feed, GetCandlesOpts, IndexerClient, ListPerpetualMarketsOpts,
+ PerpetualMarket, Price, Quantity, Subaccount, SubaccountsMessage, Ticker, TradesMessage,
+};
+use dydx_v4_rust::node::{
+ Account, NodeClient, OrderBuilder, OrderId, OrderSide, Wallet,
+ SHORT_TERM_ORDER_MAXIMUM_LIFETIME,
+};
+use std::fmt;
+use support::constants::TEST_MNEMONIC;
+use support::order_book::LiveOrderBook;
+use tokio::{
+ select,
+ sync::mpsc,
+ time::{sleep, Duration},
+};
+
+pub struct Parameters {
+ ticker: Ticker,
+ position_size: Quantity,
+ shorter_span: TimeDelta,
+ longer_span: TimeDelta,
+}
+
+pub struct Variables {
+ position: Quantity,
+ shorter_channel: Channel,
+ longer_channel: Channel,
+ state: State,
+}
+
+enum State {
+ Waiting,
+ InTrend(OrderSide),
+}
+
+pub struct Channel {
+ high: Price,
+ low: Price,
+}
+
+impl fmt::Display for Channel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "[{}, {}]", self.low, self.high)?;
+ Ok(())
+ }
+}
+
+pub struct TrendFollower {
+ client: NodeClient,
+ indexer: IndexerClient,
+ #[allow(dead_code)] // TODO remove after completion
+ wallet: Wallet,
+ account: Account,
+ subaccount: Subaccount,
+ market: PerpetualMarket,
+ generator: OrderBuilder,
+ trades_feed: Feed,
+ subaccounts_feed: Feed,
+ order_book: LiveOrderBook,
+ channel_rx: mpsc::UnboundedReceiver,
+ parameters: Parameters,
+ variables: Variables,
+}
+
+impl TrendFollower {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let mut client = NodeClient::connect(config.node).await?;
+ let mut indexer = IndexerClient::new(config.indexer.clone());
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ let mut account = wallet.account(0, &mut client).await?;
+ let subaccount = account.subaccount(0)?;
+
+ let ticker = Ticker::from("ETH-USD");
+ let market = indexer
+ .markets()
+ .list_perpetual_markets(Some(ListPerpetualMarketsOpts {
+ ticker: Some(ticker.clone()),
+ limit: None,
+ }))
+ .await?
+ .remove(&ticker)
+ .ok_or_else(|| err!("{ticker} not found in markets query response"))?;
+ let generator = OrderBuilder::new(market.clone(), subaccount.clone());
+
+ // Close position
+ client
+ .close_position(
+ &mut account,
+ subaccount.clone(),
+ market.clone(),
+ None,
+ ClientId::random(),
+ )
+ .await?;
+
+ let trades_feed = indexer.feed().trades(&ticker, false).await?;
+ let orders_feed = indexer.feed().orders(&ticker, false).await?;
+ let subaccounts_feed = indexer
+ .feed()
+ .subaccounts(subaccount.clone(), false)
+ .await?;
+ let order_book = LiveOrderBook::new(orders_feed);
+ let position_size: Quantity = "0.001".parse()?;
+ let shorter_span = TimeDelta::minutes(10);
+ let longer_span = TimeDelta::minutes(30);
+
+ let shorter_channel = calculate_channel(&indexer, &ticker, shorter_span).await?;
+ let longer_channel = calculate_channel(&indexer, &ticker, longer_span).await?;
+
+ tracing::info!("Watching channel: {longer_channel}");
+
+ let (tx, channel_rx) = mpsc::unbounded_channel();
+ tokio::spawn(Self::channel_fetcher(
+ tx,
+ IndexerClient::new(config.indexer),
+ ticker.clone(),
+ shorter_span,
+ ));
+
+ let parameters = Parameters {
+ ticker,
+ position_size,
+ shorter_span,
+ longer_span,
+ };
+ let variables = Variables {
+ position: 0.into(),
+ shorter_channel,
+ longer_channel,
+ state: State::Waiting,
+ };
+ Ok(Self {
+ client,
+ indexer,
+ wallet,
+ account,
+ subaccount,
+ market,
+ generator,
+ trades_feed,
+ subaccounts_feed,
+ order_book,
+ channel_rx,
+ parameters,
+ variables,
+ })
+ }
+
+ async fn entrypoint(mut self) {
+ loop {
+ if let Err(err) = self.step().await {
+ tracing::error!("Bot update failed: {err}");
+ }
+ }
+ }
+
+ async fn step(&mut self) -> Result<()> {
+ select! {
+ msg = self.trades_feed.recv() => {
+ if let Some(msg) = msg {
+ self.handle_trades_message(msg).await?;
+ }
+ }
+ msg = self.subaccounts_feed.recv() => {
+ if let Some(msg) = msg {
+ self.handle_subaccounts_message(msg).await?;
+ }
+ }
+ channel = self.channel_rx.recv() => {
+ if let Some(channel) = channel {
+ self.variables.shorter_channel = channel;
+ }
+ }
+ _ = self.order_book.changed() => {
+ self.handle_order_book().await?;
+ }
+ }
+ Ok(())
+ }
+
+ async fn handle_trades_message(&mut self, msg: TradesMessage) -> Result<()> {
+ match msg {
+ TradesMessage::Initial(_upd) => {}
+ TradesMessage::Update(_upd) => {}
+ }
+ Ok(())
+ }
+
+ async fn handle_subaccounts_message(&mut self, msg: SubaccountsMessage) -> Result<()> {
+ match msg {
+ SubaccountsMessage::Initial(upd) => {
+ let positions = upd.contents.subaccount.open_perpetual_positions;
+ if let Some(position) = positions.get(&self.parameters.ticker) {
+ self.variables.position = position.size.clone();
+ tracing::info!("Position: {}", self.variables.position);
+ }
+ }
+ SubaccountsMessage::Update(upd) => {
+ if let Some(ref positions) = upd
+ .contents
+ .first()
+ .ok_or_else(|| err!("Subaccount message does not have data!"))?
+ .perpetual_positions
+ {
+ let size = positions
+ .iter()
+ .find(|p| (p.market == self.parameters.ticker))
+ .map(|p| p.size.clone());
+ if let Some(size) = size {
+ self.variables.position = size;
+ tracing::info!("Position: {}", self.variables.position);
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ async fn handle_order_book(&mut self) -> Result<()> {
+ let spread = self
+ .order_book
+ .borrow()
+ .spread()
+ .map(|spread| (spread.bid.price.clone(), spread.ask.price.clone()));
+
+ if let Some((bid, ask)) = spread {
+ let price = Price((bid.0 + ask.0) / 2);
+ match self.variables.state {
+ State::Waiting => {
+ if price > self.variables.longer_channel.high {
+ tracing::info!("Channel broken at {price}. Placing buy order.");
+ self.place_limit_order(OrderSide::Buy, price).await?;
+ self.variables.state = State::InTrend(OrderSide::Buy);
+ self.variables.shorter_channel =
+ self.get_channel(self.parameters.shorter_span).await?;
+ tracing::info!("In-trend channel: {}", self.variables.shorter_channel);
+ } else if price < self.variables.longer_channel.low {
+ tracing::info!("Channel broken at {price}. Placing sell order.");
+ self.place_limit_order(OrderSide::Sell, price).await?;
+ self.variables.state = State::InTrend(OrderSide::Sell);
+ self.variables.shorter_channel =
+ self.get_channel(self.parameters.shorter_span).await?;
+ tracing::info!("In-trend channel: {}", self.variables.shorter_channel);
+ }
+ }
+ State::InTrend(side) => {
+ let break_price = match side {
+ OrderSide::Buy => {
+ if price < self.variables.shorter_channel.low {
+ Some(price)
+ } else {
+ None
+ }
+ }
+ OrderSide::Sell => {
+ if price > self.variables.shorter_channel.high {
+ Some(price)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ };
+ if let Some(price) = break_price {
+ tracing::info!(
+ "Leaving trend at {price}, channel: {}. Closing position.",
+ self.variables.shorter_channel
+ );
+ self.close_position().await?;
+ self.variables.state = State::Waiting;
+ self.variables.longer_channel =
+ self.get_channel(self.parameters.longer_span).await?;
+ tracing::info!("Watching channel {}.", self.variables.longer_channel);
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+
+ async fn place_limit_order(&mut self, side: OrderSide, price: Price) -> Result {
+ let current_block = self.client.get_latest_block_height().await?;
+ let (id, order) = self
+ .generator
+ .clone()
+ .limit(side, price, self.parameters.position_size.clone())
+ .until(current_block.ahead(SHORT_TERM_ORDER_MAXIMUM_LIFETIME))
+ .build(ClientId::random())?;
+ let hash = self.client.place_order(&mut self.account, order).await?;
+ tracing::info!("Placing {side:?} order: {hash} (ID: {})", id.client_id);
+
+ Ok(id)
+ }
+
+ async fn _cancel_order(&mut self, id: OrderId) -> Result<()> {
+ let current_block = self.client.get_latest_block_height().await?;
+ let until = current_block.ahead(10);
+ let c_id = id.client_id;
+ let hash = self
+ .client
+ .cancel_order(&mut self.account, id, until)
+ .await?;
+ tracing::info!("Cancelling order: {hash} (ID: {c_id})");
+
+ Ok(())
+ }
+
+ async fn close_position(&mut self) -> Result<()> {
+ self.client
+ .close_position(
+ &mut self.account,
+ self.subaccount.clone(),
+ self.market.clone(),
+ None,
+ ClientId::random(),
+ )
+ .await
+ .map(|_| ())
+ .map_err(|e| err!("Failed closing position: {e}"))
+ }
+
+ async fn get_channel(&self, span: TimeDelta) -> Result {
+ calculate_channel(&self.indexer, &self.parameters.ticker, span).await
+ }
+
+ async fn channel_fetcher(
+ tx: mpsc::UnboundedSender,
+ indexer: IndexerClient,
+ ticker: Ticker,
+ span: TimeDelta,
+ ) -> Result {
+ loop {
+ sleep(Duration::from_secs(30)).await;
+ let result = calculate_channel(&indexer, &ticker, span).await?;
+ tx.send(result)?;
+ }
+ }
+}
+
+async fn calculate_channel(
+ indexer: &IndexerClient,
+ ticker: &Ticker,
+ span: TimeDelta,
+) -> Result {
+ let now = Utc::now();
+ let opts = GetCandlesOpts {
+ from_iso: Some(now - span),
+ to_iso: Some(now),
+ limit: None,
+ };
+ let candles = indexer
+ .markets()
+ .get_candles(ticker, CandleResolution::M1, Some(opts))
+ .await?;
+ if candles.is_empty() {
+ return Err(err!("Candles response is empty"));
+ }
+ let high = candles
+ .iter()
+ .max_by_key(|c| c.high.clone())
+ .unwrap()
+ .high
+ .clone();
+ let low = candles
+ .iter()
+ .min_by_key(|c| c.low.clone())
+ .unwrap()
+ .low
+ .clone();
+ Ok(Channel { low, high })
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let follower = TrendFollower::connect().await?;
+ follower.entrypoint().await;
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/cancel_order.rs b/v4-client-rs/client/examples/cancel_order.rs
new file mode 100644
index 00000000..318775bf
--- /dev/null
+++ b/v4-client-rs/client/examples/cancel_order.rs
@@ -0,0 +1,71 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{AnyId, IndexerClient, Ticker};
+use dydx_v4_rust::node::{NodeClient, OrderBuilder, OrderSide, Wallet};
+use support::constants::TEST_MNEMONIC;
+use tokio::time::{sleep, Duration};
+use v4_proto_rs::dydxprotocol::clob::order::TimeInForce;
+
+const ETH_USD_TICKER: &str = "ETH-USD";
+
+pub struct OrderPlacer {
+ client: NodeClient,
+ indexer: IndexerClient,
+ wallet: Wallet,
+}
+
+impl OrderPlacer {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self {
+ client,
+ indexer,
+ wallet,
+ })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut placer = OrderPlacer::connect().await?;
+ let mut account = placer.wallet.account(0, &mut placer.client).await?;
+ let subaccount = account.subaccount(0)?;
+
+ let ticker = Ticker(ETH_USD_TICKER.into());
+ let market = placer
+ .indexer
+ .markets()
+ .get_perpetual_market(&ticker)
+ .await?;
+
+ let current_block_height = placer.client.get_latest_block_height().await?;
+ let good_until = current_block_height.ahead(10);
+
+ let (order_id, order) = OrderBuilder::new(market, subaccount)
+ .limit(OrderSide::Buy, 100, 3)
+ .reduce_only(false)
+ .time_in_force(TimeInForce::Unspecified)
+ .until(good_until.clone())
+ .build(AnyId)?;
+
+ let place_tx_hash = placer.client.place_order(&mut account, order).await?;
+ tracing::info!("Place order transaction hash: {:?}", place_tx_hash);
+
+ sleep(Duration::from_secs(5)).await;
+
+ // Cancel order
+ let cancel_tx_hash = placer
+ .client
+ .cancel_order(&mut account, order_id, good_until)
+ .await?;
+ tracing::info!("Cancel order transaction hash: {:?}", cancel_tx_hash);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/close_all_positions.rs b/v4-client-rs/client/examples/close_all_positions.rs
new file mode 100644
index 00000000..a537a47e
--- /dev/null
+++ b/v4-client-rs/client/examples/close_all_positions.rs
@@ -0,0 +1,90 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{
+ ClientId, IndexerClient, ListPositionsOpts,
+ PerpetualPositionResponseObject as PerpetualPosition, PerpetualPositionStatus, Subaccount,
+};
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use support::constants::TEST_MNEMONIC;
+
+pub struct OrderPlacer {
+ client: NodeClient,
+ indexer: IndexerClient,
+ wallet: Wallet,
+}
+
+impl OrderPlacer {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self {
+ client,
+ indexer,
+ wallet,
+ })
+ }
+}
+
+async fn get_open_positions(
+ indexer: &IndexerClient,
+ subaccount: &Subaccount,
+) -> Result> {
+ indexer
+ .accounts()
+ .list_positions(
+ subaccount,
+ Some(ListPositionsOpts {
+ status: Some(PerpetualPositionStatus::Open),
+ ..Default::default()
+ }),
+ )
+ .await
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut placer = OrderPlacer::connect().await?;
+ let mut account = placer.wallet.account(0, &mut placer.client).await?;
+
+ let subaccount = account.subaccount(0)?;
+
+ let open_positions = get_open_positions(&placer.indexer, &subaccount).await?;
+ tracing::info!("Number of open positions: {}", open_positions.len());
+
+ for position in open_positions {
+ let market = placer
+ .indexer
+ .markets()
+ .get_perpetual_market(&position.market)
+ .await?;
+ let ticker = market.ticker.clone();
+
+ // Fully close the position, if open, matching best current market prices
+ let tx_hash = placer
+ .client
+ .close_position(
+ &mut account,
+ subaccount.clone(),
+ market,
+ None,
+ ClientId::random(),
+ )
+ .await?;
+ tracing::info!("{ticker} position close transaction hash: {:?}", tx_hash);
+ }
+
+ tracing::info!(
+ "Number of open positions: {}",
+ get_open_positions(&placer.indexer, &subaccount)
+ .await?
+ .len()
+ );
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/close_position.rs b/v4-client-rs/client/examples/close_position.rs
new file mode 100644
index 00000000..dc4058f6
--- /dev/null
+++ b/v4-client-rs/client/examples/close_position.rs
@@ -0,0 +1,121 @@
+mod support;
+use anyhow::{Error, Result};
+use bigdecimal::BigDecimal;
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{
+ ClientId, IndexerClient, ListPositionsOpts,
+ PerpetualPositionResponseObject as PerpetualPosition, PerpetualPositionStatus, Subaccount,
+ Ticker,
+};
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use std::str::FromStr;
+use support::constants::TEST_MNEMONIC;
+use tokio::time::{sleep, Duration};
+
+const ETH_USD_TICKER: &str = "ETH-USD";
+
+pub struct OrderPlacer {
+ client: NodeClient,
+ indexer: IndexerClient,
+ wallet: Wallet,
+}
+
+impl OrderPlacer {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self {
+ client,
+ indexer,
+ wallet,
+ })
+ }
+}
+
+async fn get_open_position(
+ indexer: &IndexerClient,
+ subaccount: &Subaccount,
+ ticker: &Ticker,
+) -> Option {
+ indexer
+ .accounts()
+ .list_positions(
+ subaccount,
+ Some(ListPositionsOpts {
+ status: Some(PerpetualPositionStatus::Open),
+ ..Default::default()
+ }),
+ )
+ .await
+ .ok()
+ .and_then(|positions| positions.into_iter().find(|pos| pos.market == *ticker))
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut placer = OrderPlacer::connect().await?;
+ let mut account = placer.wallet.account(0, &mut placer.client).await?;
+
+ let subaccount = account.subaccount(0)?;
+ let ticker = Ticker(ETH_USD_TICKER.into());
+
+ let market = placer
+ .indexer
+ .markets()
+ .get_perpetual_market(Ð_USD_TICKER.into())
+ .await?;
+
+ println!(
+ "Current open position: {:?}",
+ get_open_position(&placer.indexer, &subaccount, &ticker).await
+ );
+
+ // Reduce position by an amount, if open, matching best current market prices
+ let reduce_by = BigDecimal::from_str("0.0001")?;
+ let tx_hash = placer
+ .client
+ .close_position(
+ &mut account,
+ subaccount.clone(),
+ market.clone(),
+ Some(reduce_by),
+ ClientId::random(),
+ )
+ .await?;
+ tracing::info!(
+ "Partial position close broadcast transaction hash: {:?}",
+ tx_hash
+ );
+
+ sleep(Duration::from_secs(3)).await;
+
+ // Fully close the position, if open, matching best current market prices
+ let tx_hash = placer
+ .client
+ .close_position(
+ &mut account,
+ subaccount.clone(),
+ market,
+ None,
+ ClientId::random(),
+ )
+ .await?;
+ tracing::info!(
+ "Fully position close broadcast transaction hash: {:?}",
+ tx_hash
+ );
+
+ sleep(Duration::from_secs(3)).await;
+
+ println!(
+ "Current open position: {:?}",
+ get_open_position(&placer.indexer, &subaccount, &ticker).await
+ );
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/deposit.rs b/v4-client-rs/client/examples/deposit.rs
new file mode 100644
index 00000000..86481d56
--- /dev/null
+++ b/v4-client-rs/client/examples/deposit.rs
@@ -0,0 +1,39 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use support::constants::TEST_MNEMONIC;
+
+pub struct Transferor {
+ client: NodeClient,
+ wallet: Wallet,
+}
+
+impl Transferor {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { client, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut transferor = Transferor::connect().await?;
+ let mut account = transferor.wallet.account(0, &mut transferor.client).await?;
+
+ let sender = account.address().clone();
+ let recipient = account.subaccount(0)?;
+
+ let tx_hash = transferor
+ .client
+ .deposit(&mut account, sender, recipient, 1)
+ .await?;
+ tracing::info!("Deposit transaction hash: {:?}", tx_hash);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/faucet_endpoint.rs b/v4-client-rs/client/examples/faucet_endpoint.rs
new file mode 100644
index 00000000..4aa18b5a
--- /dev/null
+++ b/v4-client-rs/client/examples/faucet_endpoint.rs
@@ -0,0 +1,54 @@
+mod support;
+use anyhow::Result;
+
+#[cfg(feature = "faucet")]
+mod faucet_endpoint_example {
+ use super::support::constants::TEST_MNEMONIC;
+ use anyhow::{anyhow as err, Error, Result};
+ use dydx_v4_rust::config::ClientConfig;
+ use dydx_v4_rust::faucet::FaucetClient;
+ use dydx_v4_rust::indexer::Usdc;
+ use dydx_v4_rust::node::Wallet;
+ pub struct FaucetRequester {
+ faucet: FaucetClient,
+ wallet: Wallet,
+ }
+
+ impl FaucetRequester {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let faucet = FaucetClient::new(
+ config
+ .faucet
+ .ok_or_else(|| err!("Config file must contain a [faucet] config!"))?,
+ );
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { faucet, wallet })
+ }
+ }
+
+ pub async fn run() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let requester = FaucetRequester::connect().await?;
+ let subaccount = requester.wallet.account_offline(0)?.subaccount(0)?;
+
+ requester
+ .faucet
+ .fill(&subaccount, &Usdc::from(1000))
+ .await?;
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "faucet")]
+#[tokio::main]
+async fn main() -> Result<()> {
+ faucet_endpoint_example::run().await?;
+ Ok(())
+}
+
+#[cfg(not(feature = "faucet"))]
+fn main() {
+ eprintln!("Feature 'faucet' must be enabled to run this example!")
+}
diff --git a/v4-client-rs/client/examples/live_price.rs b/v4-client-rs/client/examples/live_price.rs
new file mode 100644
index 00000000..9611e637
--- /dev/null
+++ b/v4-client-rs/client/examples/live_price.rs
@@ -0,0 +1,83 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{Feed, IndexerClient, MarketsMessage, Ticker};
+use dydx_v4_rust::node::{OrderBuilder, Wallet};
+use support::constants::TEST_MNEMONIC;
+
+pub struct Feeder {
+ ticker: Ticker,
+ markets_feed: Feed,
+ ordergen: OrderBuilder,
+}
+
+impl Feeder {
+ pub async fn connect() -> Result {
+ tracing_subscriber::fmt()
+ .with_max_level(tracing::Level::DEBUG)
+ .try_init()
+ .map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let mut indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ let ticker = "ETH-USD".into();
+
+ let account = wallet.account_offline(0)?;
+ let subaccount = account.subaccount(0)?;
+ let market = indexer.markets().get_perpetual_market(&ticker).await?;
+ let ordergen = OrderBuilder::new(market, subaccount);
+ let markets_feed = indexer.feed().markets(false).await?;
+
+ Ok(Self {
+ ticker,
+ markets_feed,
+ ordergen,
+ })
+ }
+
+ async fn entrypoint(mut self) {
+ loop {
+ self.step().await;
+ }
+ }
+
+ async fn step(&mut self) {
+ if let Some(msg) = self.markets_feed.recv().await {
+ self.handle_markets_msg(msg).await;
+ }
+ }
+
+ async fn handle_markets_msg(&mut self, msg: MarketsMessage) {
+ let price_opt = match msg {
+ MarketsMessage::Initial(mut init) => init
+ .contents
+ .markets
+ .remove(&self.ticker)
+ .and_then(|market| market.oracle_price),
+ MarketsMessage::Update(mut upd) => upd
+ .contents
+ .first_mut()
+ .and_then(|contents| {
+ contents
+ .oracle_prices
+ .as_mut()
+ .and_then(|prices| prices.remove(&self.ticker))
+ })
+ .map(|opm| opm.oracle_price),
+ };
+ if let Some(price) = price_opt {
+ tracing::info!("Oracle price updated: {price:?}");
+ // Since `OrderBuilder` uses the oracle price for slippage protection in Market orders,
+ // it is recommended to be updated if the same instance is re-used for different orders.
+ self.ordergen.update_market_price(price);
+ }
+ }
+}
+#[tokio::main]
+async fn main() -> Result<()> {
+ let feeder = Feeder::connect().await?;
+ feeder.entrypoint().await;
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/market_endpoint.rs b/v4-client-rs/client/examples/market_endpoint.rs
new file mode 100644
index 00000000..907db033
--- /dev/null
+++ b/v4-client-rs/client/examples/market_endpoint.rs
@@ -0,0 +1,100 @@
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{
+ CandleResolution, GetCandlesOpts, GetHistoricalFundingOpts, GetTradesOpts, IndexerClient,
+ ListPerpetualMarketsOpts, SparklineTimePeriod, Ticker,
+};
+
+const ETH_USD_TICKER: &str = "ETH-USD";
+
+pub struct Rester {
+ indexer: IndexerClient,
+}
+
+impl Rester {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let indexer = IndexerClient::new(config.indexer);
+ Ok(Self { indexer })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let rester = Rester::connect().await?;
+ let indexer = rester.indexer;
+
+ // Test value
+ let ticker = Ticker::from(ETH_USD_TICKER);
+
+ let markets_options = ListPerpetualMarketsOpts {
+ limit: Some(5),
+ ..Default::default()
+ };
+ let markets = indexer
+ .markets()
+ .list_perpetual_markets(Some(markets_options))
+ .await?;
+ tracing::info!("Markets response: {:?}", markets);
+
+ let markets_options = ListPerpetualMarketsOpts {
+ ticker: Some(ticker.clone()),
+ ..Default::default()
+ };
+ let market = indexer
+ .markets()
+ .list_perpetual_markets(Some(markets_options))
+ .await?;
+ tracing::info!("Market ({ETH_USD_TICKER}) response: {:?}", market);
+
+ let sparklines = indexer
+ .markets()
+ .get_sparklines(SparklineTimePeriod::SevenDays)
+ .await?;
+ tracing::info!(
+ "Sparklines ({ETH_USD_TICKER}) response: {:?}",
+ sparklines.get(&ticker)
+ );
+
+ let trades_opts = GetTradesOpts {
+ limit: Some(5),
+ ..Default::default()
+ };
+ let trades = indexer
+ .markets()
+ .get_trades(&ticker, Some(trades_opts))
+ .await?;
+ tracing::info!("Trades ({ETH_USD_TICKER}) response: {:?}", trades);
+
+ let orderbook = indexer
+ .markets()
+ .get_perpetual_market_orderbook(&ticker)
+ .await?;
+ tracing::info!("Orderbook ({ETH_USD_TICKER}) response: {:?}", orderbook);
+
+ let candles_opts = GetCandlesOpts {
+ limit: Some(3),
+ ..Default::default()
+ };
+ let candles = indexer
+ .markets()
+ .get_candles(&ticker, CandleResolution::M1, Some(candles_opts))
+ .await?;
+ tracing::info!("Candles ({ETH_USD_TICKER}) response: {:?}", candles);
+
+ let fund_opts = GetHistoricalFundingOpts {
+ limit: Some(3),
+ ..Default::default()
+ };
+ let funding = indexer
+ .markets()
+ .get_historical_funding(&ticker, Some(fund_opts))
+ .await?;
+ tracing::info!(
+ "Historical funding ({ETH_USD_TICKER}) response: {:?}",
+ funding
+ );
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/noble_transfer.rs b/v4-client-rs/client/examples/noble_transfer.rs
new file mode 100644
index 00000000..ad9edc12
--- /dev/null
+++ b/v4-client-rs/client/examples/noble_transfer.rs
@@ -0,0 +1,116 @@
+use anyhow::Result;
+
+#[cfg(feature = "noble")]
+mod noble_transfer_example {
+ use super::*;
+ use anyhow::{anyhow as err, Error};
+ use dydx_v4_rust::config::ClientConfig;
+ use dydx_v4_rust::indexer::Token;
+ use dydx_v4_rust::noble::{NobleClient, NobleUsdc};
+ use dydx_v4_rust::node::{NodeClient, Wallet};
+ use tokio::time::{sleep, Duration};
+
+ const TEST_MNEMONIC: &str = "mirror actor skill push coach wait confirm orchard lunch mobile athlete gossip awake miracle matter bus reopen team ladder lazy list timber render wait";
+ const DYDX_SOURCE_CHANNEL: &str = "channel-0";
+ const NOBLE_SOURCE_CHANNEL: &str = "channel-33";
+
+ pub struct Bridger {
+ wallet: Wallet,
+ noble: NobleClient,
+ node: NodeClient,
+ }
+
+ impl Bridger {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let noble = NobleClient::connect(
+ config
+ .noble
+ .ok_or_else(|| err!("Config file must contain a [noble] config!"))?,
+ )
+ .await?;
+ let node = NodeClient::connect(config.node).await?;
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self {
+ noble,
+ wallet,
+ node,
+ })
+ }
+ }
+
+ #[tokio::main]
+ pub async fn run() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let mut bridger = Bridger::connect().await?;
+
+ let mut account_dydx = bridger.wallet.account_offline(0)?;
+ let mut account_noble = bridger.wallet.noble().account_offline(0)?;
+
+ let address_dydx = account_dydx.address().clone();
+ let address_noble = account_noble.address().clone();
+
+ tracing::info!(
+ "Before transfer balance: {:?}",
+ bridger
+ .noble
+ .get_account_balances(address_noble.clone())
+ .await?
+ );
+ let tx_hash = bridger
+ .node
+ .send_token_ibc(
+ &mut account_dydx,
+ address_dydx.clone(),
+ address_noble.clone(),
+ Token::Usdc(1.into()),
+ DYDX_SOURCE_CHANNEL.into(),
+ )
+ .await?;
+ tracing::info!("dYdX -> Noble Tx hash: {tx_hash}");
+
+ sleep(Duration::from_secs(30)).await;
+
+ tracing::info!(
+ "After transfer balance: {:?}",
+ bridger
+ .noble
+ .get_account_balances(address_noble.clone())
+ .await?
+ );
+
+ let tx_hash = bridger
+ .noble
+ .send_token_ibc(
+ &mut account_noble,
+ address_noble.clone(),
+ address_dydx,
+ NobleUsdc::from(1),
+ NOBLE_SOURCE_CHANNEL.into(),
+ )
+ .await?;
+ tracing::info!("Noble -> dYdX Tx hash: {tx_hash}");
+
+ sleep(Duration::from_secs(30)).await;
+
+ tracing::info!(
+ "Undo transfer balance: {:?}",
+ bridger
+ .noble
+ .get_account_balances(address_noble.clone())
+ .await?
+ );
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "noble")]
+fn main() -> Result<()> {
+ noble_transfer_example::run()
+}
+
+#[cfg(not(feature = "noble"))]
+fn main() {
+ eprintln!("Feature 'noble' must be enabled to run this example!")
+}
diff --git a/v4-client-rs/client/examples/place_order_long_term.rs b/v4-client-rs/client/examples/place_order_long_term.rs
new file mode 100644
index 00000000..1c18b3b8
--- /dev/null
+++ b/v4-client-rs/client/examples/place_order_long_term.rs
@@ -0,0 +1,65 @@
+mod support;
+use anyhow::{Error, Result};
+use bigdecimal::BigDecimal;
+use chrono::{TimeDelta, Utc};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{ClientId, IndexerClient, Ticker};
+use dydx_v4_rust::node::{NodeClient, OrderBuilder, OrderSide, Wallet};
+use support::constants::TEST_MNEMONIC;
+use v4_proto_rs::dydxprotocol::clob::order::TimeInForce;
+
+const ETH_USD_TICKER: &str = "ETH-USD";
+
+pub struct OrderPlacer {
+ client: NodeClient,
+ indexer: IndexerClient,
+ wallet: Wallet,
+}
+
+impl OrderPlacer {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self {
+ client,
+ indexer,
+ wallet,
+ })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut placer = OrderPlacer::connect().await?;
+ let mut account = placer.wallet.account(0, &mut placer.client).await?;
+
+ // Test values
+ let subaccount = account.subaccount(0)?;
+ let client_id = ClientId::random();
+ let ticker = Ticker(ETH_USD_TICKER.into());
+ let market = placer
+ .indexer
+ .markets()
+ .get_perpetual_market(&ticker)
+ .await?;
+
+ let now = Utc::now();
+ let time_in_force_seconds = now + TimeDelta::seconds(60);
+
+ let (_id, order) = OrderBuilder::new(market, subaccount)
+ .limit(OrderSide::Buy, 123, BigDecimal::new(2.into(), 2))
+ .time_in_force(TimeInForce::Unspecified)
+ .until(time_in_force_seconds)
+ .long_term()
+ .build(client_id)?;
+
+ let tx_hash = placer.client.place_order(&mut account, order).await?;
+ tracing::info!("Broadcast transaction hash: {:?}", tx_hash);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/place_order_short_term.rs b/v4-client-rs/client/examples/place_order_short_term.rs
new file mode 100644
index 00000000..f0e81f97
--- /dev/null
+++ b/v4-client-rs/client/examples/place_order_short_term.rs
@@ -0,0 +1,63 @@
+mod support;
+use anyhow::{Error, Result};
+use bigdecimal::BigDecimal;
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::IndexerClient;
+use dydx_v4_rust::node::{NodeClient, OrderBuilder, OrderSide, Wallet};
+use std::str::FromStr;
+use support::constants::TEST_MNEMONIC;
+use v4_proto_rs::dydxprotocol::clob::order::TimeInForce;
+
+const ETH_USD_TICKER: &str = "ETH-USD";
+
+pub struct OrderPlacer {
+ client: NodeClient,
+ indexer: IndexerClient,
+ wallet: Wallet,
+}
+
+impl OrderPlacer {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self {
+ client,
+ indexer,
+ wallet,
+ })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut placer = OrderPlacer::connect().await?;
+ let mut account = placer.wallet.account(0, &mut placer.client).await?;
+ let subaccount = account.subaccount(0)?;
+
+ let market = placer
+ .indexer
+ .markets()
+ .get_perpetual_market(Ð_USD_TICKER.into())
+ .await?;
+
+ let current_block_height = placer.client.get_latest_block_height().await?;
+
+ let size = BigDecimal::from_str("0.02")?;
+ let (_id, order) = OrderBuilder::new(market, subaccount)
+ .market(OrderSide::Buy, size)
+ .reduce_only(false)
+ .price(100) // market-order slippage protection price
+ .time_in_force(TimeInForce::Unspecified)
+ .until(current_block_height.ahead(10))
+ .build(123456)?;
+
+ let tx_hash = placer.client.place_order(&mut account, order).await?;
+ tracing::info!("Broadcast transaction hash: {:?}", tx_hash);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/send_token.rs b/v4-client-rs/client/examples/send_token.rs
new file mode 100644
index 00000000..d6b0cc90
--- /dev/null
+++ b/v4-client-rs/client/examples/send_token.rs
@@ -0,0 +1,40 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::Token;
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use support::constants::TEST_MNEMONIC;
+
+pub struct Transferor {
+ client: NodeClient,
+ wallet: Wallet,
+}
+
+impl Transferor {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { client, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let mut transferor = Transferor::connect().await?;
+
+ let mut account0 = transferor.wallet.account(0, &mut transferor.client).await?;
+ let sender = account0.address().clone();
+
+ let account1 = transferor.wallet.account(1, &mut transferor.client).await?;
+ let recipient = account1.address().clone();
+
+ let tx_hash = transferor
+ .client
+ .send_token(&mut account0, sender, recipient, Token::DydxTnt(1.into()))
+ .await?;
+ tracing::info!("Send token transaction hash: {:?}", tx_hash);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/sequencer.rs b/v4-client-rs/client/examples/sequencer.rs
new file mode 100644
index 00000000..54c9a105
--- /dev/null
+++ b/v4-client-rs/client/examples/sequencer.rs
@@ -0,0 +1,162 @@
+mod support;
+use anyhow::{Error, Result};
+use async_trait::async_trait;
+use bigdecimal::BigDecimal;
+use chrono::{TimeDelta, Utc};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{
+ Address, ClientId, IndexerClient, PerpetualMarket, Subaccount, Ticker,
+};
+use dydx_v4_rust::node::{sequencer::*, Account, NodeClient, OrderBuilder, OrderSide, Wallet};
+use std::sync::Arc;
+use support::constants::TEST_MNEMONIC;
+use tokio::sync::Mutex;
+use tokio::time::{sleep, Duration};
+use v4_proto_rs::dydxprotocol::clob::order::TimeInForce;
+
+const ETH_USD_TICKER: &str = "ETH-USD";
+
+pub struct OrderPlacer {
+ client: NodeClient,
+ market: PerpetualMarket,
+ account: Account,
+ subaccount: Subaccount,
+}
+
+impl OrderPlacer {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let mut client = NodeClient::connect(config.node).await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ let ticker = Ticker(ETH_USD_TICKER.into());
+ let market = indexer.markets().get_perpetual_market(&ticker).await?;
+ let account = wallet.account(0, &mut client).await?;
+ let subaccount = account.subaccount(0)?;
+ Ok(Self {
+ client,
+ market,
+ account,
+ subaccount,
+ })
+ }
+
+ pub async fn place_order(&mut self) -> Result<()> {
+ let (_, order) = OrderBuilder::new(self.market.clone(), self.subaccount.clone())
+ .limit(OrderSide::Buy, 123, BigDecimal::new(2.into(), 2))
+ .time_in_force(TimeInForce::Unspecified)
+ .until(Utc::now() + TimeDelta::seconds(60))
+ .long_term()
+ .build(ClientId::random())?;
+
+ self.client
+ .place_order(&mut self.account, order)
+ .await
+ .map(drop)
+ .map_err(Error::msg)
+ }
+
+ pub async fn fetch_sequence_number(&mut self) -> Result {
+ let (_, sequence_number) = self.client.query_address(self.account.address()).await?;
+ Ok(sequence_number)
+ }
+}
+
+#[derive(Clone)]
+pub struct CustomSequencer {
+ counter: Arc>,
+ // Or use an Atomic in this case
+}
+
+impl CustomSequencer {
+ pub fn new(start_at: u64) -> Self {
+ Self {
+ counter: Arc::new(Mutex::new(start_at)),
+ }
+ }
+}
+
+#[async_trait]
+impl Sequencer for CustomSequencer {
+ async fn next_nonce(&mut self, _: &Address) -> Result {
+ let mut counter = self.counter.lock().await;
+ *counter += 1;
+ Ok(Nonce::Sequence(*counter - 1))
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut placer = OrderPlacer::connect().await?;
+
+ // In Cosmos-based blockchains, like dYdX, an account sequence number is used as a nonce to
+ // prevent replay attacks. This affects only relevant requests: non-short term orders and transfer methods.
+ // This crate provides three different mechanisms to set the account number:
+ //
+ // - QueryingSequencer: a request is made to the network to fetch the correct sequence number
+ // to be used in the next transaction. This request is made for every relevant request
+ // previously to the transaction broadcast.
+ // - IncrementalSequencer: for each relevant request, a simple counter is increased. The
+ // starting value counter must be set manually, using for example the value returned by
+ // NodeClient::query_address().
+ // - TimestamperSequencer: for each relevant request, the current timestamp (milliseconds) is
+ // used.
+ //
+ // The Sequencer trait can be used to provide custom sequencers to the NodeClient.
+
+ // By default, NodeClient uses the QueryingSequencer.
+ placer.place_order().await?;
+ sleep(Duration::from_secs(4)).await;
+ tracing::info!(
+ "(After QueryingSequencer) Sequence number: {}",
+ placer.fetch_sequence_number().await?
+ );
+
+ // To use the incremental sequencer, create one with the to-be used addresses and initial
+ // counters.
+ let incremental_sequencer = IncrementalSequencer::new(&[(
+ placer.account.address().clone(),
+ placer.fetch_sequence_number().await?,
+ )]);
+ placer.client.with_sequencer(incremental_sequencer);
+
+ placer.place_order().await?;
+ sleep(Duration::from_secs(4)).await;
+ tracing::info!(
+ "(After IncrementalSequencer) Sequence number: {}",
+ placer.fetch_sequence_number().await?
+ );
+
+ // And the timestamper sequencer,
+ let timestamper_sequencer = TimestamperSequencer;
+ placer.client.with_sequencer(timestamper_sequencer);
+
+ placer.place_order().await?;
+ sleep(Duration::from_secs(4)).await;
+ tracing::info!(
+ "(After TimestamperSequencer) Sequence number: {}",
+ placer.fetch_sequence_number().await?
+ );
+
+ // To tackle other specific scenarios, a Sequencer can also be provided.
+ // Here we try to tackle a concurrent scenario where different trading bots running in the same
+ // process are utilizing the same account to issue long-term orders.
+ // Note: here, orders may reach the network out-of-order, resulting in a sequencing error.
+ let custom_sequencer = CustomSequencer::new(placer.fetch_sequence_number().await?);
+ let mut placer1 = OrderPlacer::connect().await?;
+ let mut placer2 = OrderPlacer::connect().await?;
+ placer1.client.with_sequencer(custom_sequencer.clone());
+ placer2.client.with_sequencer(custom_sequencer.clone());
+
+ tokio::try_join!(placer1.place_order(), placer2.place_order())?;
+ sleep(Duration::from_secs(4)).await;
+ tracing::info!(
+ "(After CustomSequencer, two orders) Sequence number: {}",
+ placer.fetch_sequence_number().await?
+ );
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/support/constants.rs b/v4-client-rs/client/examples/support/constants.rs
new file mode 100644
index 00000000..b9ba39cf
--- /dev/null
+++ b/v4-client-rs/client/examples/support/constants.rs
@@ -0,0 +1 @@
+pub const TEST_MNEMONIC: &str = "mirror actor skill push coach wait confirm orchard lunch mobile athlete gossip awake miracle matter bus reopen team ladder lazy list timber render wait";
diff --git a/v4-client-rs/client/examples/support/mod.rs b/v4-client-rs/client/examples/support/mod.rs
new file mode 100644
index 00000000..a59bb337
--- /dev/null
+++ b/v4-client-rs/client/examples/support/mod.rs
@@ -0,0 +1,6 @@
+pub mod constants;
+#[allow(dead_code)]
+pub mod order_book;
+#[allow(dead_code)]
+#[cfg(feature = "telemetry")]
+pub mod telemetry;
diff --git a/v4-client-rs/client/examples/support/order_book.rs b/v4-client-rs/client/examples/support/order_book.rs
new file mode 100644
index 00000000..9cbea9ce
--- /dev/null
+++ b/v4-client-rs/client/examples/support/order_book.rs
@@ -0,0 +1,149 @@
+use bigdecimal::Zero;
+use derive_more::{Deref, DerefMut};
+use dydx_v4_rust::indexer::{
+ Feed, OrderBookResponseObject, OrderbookResponsePriceLevel, OrdersMessage, Price, Quantity,
+};
+use std::collections::BTreeMap;
+use std::fmt;
+use tokio::sync::watch;
+use tokio::task::JoinHandle;
+
+#[derive(Deref, DerefMut)]
+pub struct LiveOrderBook {
+ handle: JoinHandle<()>,
+ #[deref]
+ #[deref_mut]
+ rx: watch::Receiver,
+}
+
+impl LiveOrderBook {
+ pub fn new(feed: Feed) -> Self {
+ let (tx, rx) = watch::channel(OrderBook::default());
+ let task = LiveOrderBookTask { feed, tx };
+ let handle = tokio::spawn(task.entrypoint());
+ Self { handle, rx }
+ }
+}
+
+impl Drop for LiveOrderBook {
+ fn drop(&mut self) {
+ self.handle.abort();
+ }
+}
+
+struct LiveOrderBookTask {
+ feed: Feed,
+ tx: watch::Sender,
+}
+
+impl LiveOrderBookTask {
+ async fn entrypoint(mut self) {
+ while let Some(msg) = self.feed.recv().await {
+ match msg {
+ OrdersMessage::Initial(upd) => {
+ self.tx.send_modify(move |order_book| {
+ order_book.update_bids(upd.contents.bids);
+ order_book.update_asks(upd.contents.asks);
+ });
+ }
+ OrdersMessage::Update(upd) => {
+ self.tx.send_modify(move |order_book| {
+ if let Some(bids) = upd.contents.bids {
+ order_book.update_bids(bids);
+ }
+ if let Some(asks) = upd.contents.asks {
+ order_book.update_asks(asks);
+ }
+ });
+ }
+ }
+ }
+ }
+}
+
+pub struct Quote<'a> {
+ pub price: &'a Price,
+ pub quantity: &'a Quantity,
+}
+
+impl<'a> From<(&'a Price, &'a Quantity)> for Quote<'a> {
+ fn from((price, quantity): (&'a Price, &'a Quantity)) -> Self {
+ Self { price, quantity }
+ }
+}
+
+pub struct Spread<'a> {
+ pub bid: Quote<'a>,
+ pub ask: Quote<'a>,
+}
+
+#[derive(Default, Debug)]
+pub struct OrderBook {
+ /// Prices you can sell
+ pub bids: BTreeMap,
+ /// Prices you can buy (how much the seller asks)
+ pub asks: BTreeMap,
+}
+
+impl OrderBook {
+ pub fn bids(&self) -> impl Iterator- {
+ self.bids.iter().map(Quote::from).rev()
+ }
+
+ pub fn asks(&self) -> impl Iterator
- {
+ self.asks.iter().map(Quote::from)
+ }
+
+ pub fn spread(&self) -> Option {
+ let bid = self.bids().next()?;
+ let ask = self.asks().next()?;
+ Some(Spread { bid, ask })
+ }
+
+ fn update(map: &mut BTreeMap, levels: Vec) {
+ for level in levels {
+ if level.size.is_zero() {
+ map.remove(&level.price);
+ } else {
+ map.insert(level.price, level.size);
+ }
+ }
+ }
+
+ pub fn update_bids(&mut self, bids: Vec) {
+ Self::update(&mut self.bids, bids);
+ }
+
+ pub fn update_asks(&mut self, asks: Vec) {
+ Self::update(&mut self.asks, asks);
+ }
+
+ pub fn table(&self) -> OrderBookTable {
+ OrderBookTable { inner: self }
+ }
+}
+
+impl From for OrderBook {
+ fn from(response: OrderBookResponseObject) -> Self {
+ let mut order_book = OrderBook::default();
+ order_book.update_bids(response.bids);
+ order_book.update_asks(response.asks);
+ order_book
+ }
+}
+
+pub struct OrderBookTable<'a> {
+ inner: &'a OrderBook,
+}
+
+impl<'a> fmt::Display for OrderBookTable<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for (price, size) in &self.inner.bids {
+ writeln!(f, "BID: {} - {}", price, size)?;
+ }
+ for (price, size) in &self.inner.asks {
+ writeln!(f, "ASK: {} - {}", price, size)?;
+ }
+ Ok(())
+ }
+}
diff --git a/v4-client-rs/client/examples/support/telemetry.rs b/v4-client-rs/client/examples/support/telemetry.rs
new file mode 100644
index 00000000..5c6c2513
--- /dev/null
+++ b/v4-client-rs/client/examples/support/telemetry.rs
@@ -0,0 +1,37 @@
+use anyhow::{anyhow as err, Result};
+use metrics_exporter_tcp::Error;
+use std::io::ErrorKind;
+
+/// Setup telemetry
+pub async fn metrics_dashboard() -> Result<()> {
+ let default_port = 5000;
+ let alt_port = 5049;
+ // Try server on default port (5000) else try listen on 5049
+ let port = match setup_server(default_port) {
+ Ok(()) => default_port,
+ Err(e) => match e {
+ Error::Io(e) => {
+ if matches!(e.kind(), ErrorKind::AddrInUse) {
+ setup_server(alt_port)
+ .map_err(|e| err!("Unable to setup telemetry server on port {default_port} or {alt_port}: {e}"))?;
+ alt_port
+ } else {
+ return Err(e.into());
+ }
+ }
+ _ => return Err(e.into()),
+ },
+ };
+
+ tracing::info!("== THIS EXAMPLE USES `https://github.com/metrics-rs/metrics/tree/main/metrics-observer` AS A METRICS EXPORTER (on TCP port {port}) ==");
+
+ Ok(())
+}
+
+fn setup_server(port: u16) -> Result<(), Error> {
+ let addr: std::net::SocketAddr = format!("0.0.0.0:{port}")
+ .parse()
+ .expect("Failed parsing SocketAddr");
+ let builder = metrics_exporter_tcp::TcpBuilder::new().listen_address(addr);
+ builder.install()
+}
diff --git a/v4-client-rs/client/examples/transfer.rs b/v4-client-rs/client/examples/transfer.rs
new file mode 100644
index 00000000..5585ca17
--- /dev/null
+++ b/v4-client-rs/client/examples/transfer.rs
@@ -0,0 +1,37 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use support::constants::TEST_MNEMONIC;
+
+pub struct Transferor {
+ client: NodeClient,
+ wallet: Wallet,
+}
+
+impl Transferor {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { client, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let mut transferor = Transferor::connect().await?;
+ let mut account = transferor.wallet.account(0, &mut transferor.client).await?;
+
+ let sender = account.subaccount(0)?;
+ let recipient = account.subaccount(1)?;
+
+ let tx_hash = transferor
+ .client
+ .transfer(&mut account, sender, recipient, 1)
+ .await?;
+ tracing::info!("Transfer transaction hash: {:?}", tx_hash);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/utility_endpoint.rs b/v4-client-rs/client/examples/utility_endpoint.rs
new file mode 100644
index 00000000..1db64b36
--- /dev/null
+++ b/v4-client-rs/client/examples/utility_endpoint.rs
@@ -0,0 +1,42 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::IndexerClient;
+use dydx_v4_rust::node::Wallet;
+use support::constants::TEST_MNEMONIC;
+
+pub struct Rester {
+ indexer: IndexerClient,
+ wallet: Wallet,
+}
+
+impl Rester {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { indexer, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let rester = Rester::connect().await?;
+ let account = rester.wallet.account_offline(0)?;
+ let indexer = rester.indexer;
+
+ // Test values
+ let address = account.address();
+
+ let time = indexer.utility().get_time().await?;
+ tracing::info!("Time: {time:?}");
+
+ let height = indexer.utility().get_height().await?;
+ tracing::info!("Height: {height:?}");
+
+ let screen = indexer.utility().get_screen(address).await?;
+ tracing::info!("Screen for address {address}: {screen:?}");
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/validator_get.rs b/v4-client-rs/client/examples/validator_get.rs
new file mode 100644
index 00000000..8f2b169e
--- /dev/null
+++ b/v4-client-rs/client/examples/validator_get.rs
@@ -0,0 +1,120 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use support::constants::TEST_MNEMONIC;
+
+const ETH_USD_PAIR_ID: u32 = 1;
+
+pub struct Getter {
+ client: NodeClient,
+ wallet: Wallet,
+}
+
+impl Getter {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { client, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let mut getter = Getter::connect().await?;
+ // Test values
+ let account = getter.wallet.account_offline(0)?;
+ let address = account.address().clone();
+ let subaccount = account.subaccount(0)?;
+
+ let account = getter.client.get_account(&address).await?;
+ tracing::info!("Get account: {account:?}");
+
+ let balances = getter.client.get_account_balances(&address).await?;
+ tracing::info!("Get account balances: {balances:?}");
+
+ let balance = getter
+ .client
+ .get_account_balance(&address, &"adv4tnt".parse()?)
+ .await?;
+ tracing::info!("Get account balance: {balance:?}");
+
+ let node_info = getter.client.get_node_info().await?;
+ let version = node_info
+ .application_version
+ .map(|v| format!("{} v{} @{}", v.name, v.version, &v.git_commit[0..7]));
+ tracing::info!(
+ "Get node info (node version): {}",
+ version.unwrap_or("unknown".into())
+ );
+
+ let block = getter.client.get_latest_block().await?;
+ tracing::info!("Get latest block: {block:?}");
+
+ let height = getter.client.get_latest_block_height().await?;
+ tracing::info!("Get latest block height: {height:?}");
+
+ let stats = getter.client.get_user_stats(&address).await?;
+ tracing::info!("Get user stats: {stats:?}");
+
+ let validators = getter.client.get_all_validators(None).await?;
+ tracing::info!("Get all validators: {validators:?}");
+
+ let subaccount = getter.client.get_subaccount(&subaccount).await?;
+ tracing::info!("Get subaccount: {subaccount:?}");
+
+ let subaccounts = getter.client.get_subaccounts().await?;
+ tracing::info!("Get subaccounts: {subaccounts:?}");
+
+ let clob_pair = getter.client.get_clob_pair(ETH_USD_PAIR_ID).await?;
+ tracing::info!("Get clob pair: {clob_pair:?}");
+
+ let clob_pairs = getter.client.get_clob_pairs(None).await?;
+ tracing::info!("Get clob pairs: {clob_pairs:?}");
+
+ let price = getter.client.get_price(ETH_USD_PAIR_ID).await?;
+ tracing::info!("Get price: {price:?}");
+
+ let prices = getter.client.get_prices(None).await?;
+ tracing::info!("Get prices: {prices:?}");
+
+ let perpetual = getter.client.get_perpetual(ETH_USD_PAIR_ID).await?;
+ tracing::info!("Get perpetual: {perpetual:?}");
+
+ let perpetuals = getter.client.get_perpetuals(None).await?;
+ tracing::info!("Get perpetuals: {perpetuals:?}");
+
+ let equity_tier_limit = getter.client.get_equity_tier_limit_config().await?;
+ tracing::info!("Get equity tier limit config: {equity_tier_limit:?}");
+
+ let delegations = getter
+ .client
+ .get_delegator_delegations(address.clone(), None)
+ .await?;
+ tracing::info!("Get delegator delegations: {delegations:?}");
+
+ let unbonding_delegations = getter
+ .client
+ .get_delegator_unbonding_delegations(address.clone(), None)
+ .await?;
+ tracing::info!("Get delegator unbonding delegations: {unbonding_delegations:?}");
+
+ let bridge_messages = getter
+ .client
+ .get_delayed_complete_bridge_messages(address.clone())
+ .await?;
+ tracing::info!("Get delayed complete bridge messages: {bridge_messages:?}");
+
+ let fee_tiers = getter.client.get_fee_tiers().await?;
+ tracing::info!("Get fee tiers: {fee_tiers:?}");
+
+ let user_fee_tier = getter.client.get_user_fee_tier(address.clone()).await?;
+ tracing::info!("Get user fee tier: {user_fee_tier:?}");
+
+ let reward_params = getter.client.get_rewards_params().await?;
+ tracing::info!("Get reward params: {reward_params:?}");
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/validator_post.rs b/v4-client-rs/client/examples/validator_post.rs
new file mode 100644
index 00000000..88cc095a
--- /dev/null
+++ b/v4-client-rs/client/examples/validator_post.rs
@@ -0,0 +1,82 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use rand::{thread_rng, Rng};
+use support::constants::TEST_MNEMONIC;
+use tokio::time::{sleep, Duration};
+use v4_proto_rs::dydxprotocol::clob::{
+ order::{ConditionType, GoodTilOneof, Side, TimeInForce},
+ Order, OrderId,
+};
+use v4_proto_rs::dydxprotocol::subaccounts::SubaccountId;
+
+const ETH_USD_PAIR_ID: u32 = 1;
+const ETH_USD_QUANTUMS: u64 = 10_000_000; // calculated based on market
+const SUBTICKS: u64 = 40_000_000_000; // calculated based on market and price
+const ORDER_FLAGS_SHORT_TERM: u32 = 0; // for short term order is 0
+const N_ORDERS: usize = 6;
+
+pub struct OrderPlacer {
+ client: NodeClient,
+ wallet: Wallet,
+}
+
+impl OrderPlacer {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { client, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let mut placer = OrderPlacer::connect().await?;
+ let mut account = placer.wallet.account(0, &mut placer.client).await?;
+
+ let subaccount = SubaccountId {
+ owner: account.address().to_string(),
+ number: 0,
+ };
+ let order_ids = (0..N_ORDERS)
+ .map(|_| OrderId {
+ subaccount_id: Some(subaccount.clone()),
+ client_id: thread_rng().gen_range(0..100_000_000),
+ order_flags: ORDER_FLAGS_SHORT_TERM,
+ clob_pair_id: ETH_USD_PAIR_ID,
+ })
+ .collect::>();
+
+ // Push some orders
+ for id in &order_ids {
+ // Short term orders have a maximum validity of 20 blocks
+ let til_height = placer.client.get_latest_block_height().await?.ahead(10).0;
+ let order = Order {
+ order_id: Some(id.clone()),
+ side: Side::Sell.into(),
+ quantums: ETH_USD_QUANTUMS,
+ subticks: SUBTICKS,
+ time_in_force: TimeInForce::Unspecified.into(),
+ reduce_only: false,
+ client_metadata: 0u32,
+ condition_type: ConditionType::Unspecified.into(),
+ conditional_order_trigger_subticks: 0u64,
+ good_til_oneof: Some(GoodTilOneof::GoodTilBlock(til_height)),
+ };
+
+ let tx_hash = placer.client.place_order(&mut account, order).await?;
+ tracing::info!(
+ "Broadcast order ({}) transaction hash: {:?}",
+ id.client_id,
+ tx_hash
+ );
+ sleep(Duration::from_secs(2)).await;
+ }
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/wallet.rs b/v4-client-rs/client/examples/wallet.rs
new file mode 100644
index 00000000..2631d1c2
--- /dev/null
+++ b/v4-client-rs/client/examples/wallet.rs
@@ -0,0 +1,102 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+#[cfg(feature = "noble")]
+use dydx_v4_rust::noble::NobleClient;
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use support::constants::TEST_MNEMONIC;
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+
+ // Create a `Wallet` from a mnemonic
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+
+ // A `Wallet` is used to derive an `Account` used to sign transactions
+ let account0 = wallet.account_offline(0)?;
+
+ // Multiple accounts can be derived from your mnemonic/master private key
+ let account1 = wallet.account_offline(1)?;
+
+ // Some online attributes like an up-to-date sequence number are required for some
+ // order/transfer methods in `NodeClient`'s operations.
+ // This is usually not required if `NodeClient` is allowed to `manage_sequencing = true`.
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let mut client = NodeClient::connect(config.node).await?;
+ let account_upd = wallet.account(0, &mut client).await?;
+
+ // An `Account` is mostly identified by its `Address`
+ let address = account0.address();
+ tracing::info!(
+ "Account '0' address: {address} | sequence-number: {} | account-number (online ID): {}",
+ account0.sequence_number(),
+ account0.account_number()
+ );
+ tracing::info!(
+ "Account '0' (synced-values) address: {} | sequence-number: {} | account-number (online ID): {}",
+ account_upd.address(), account_upd.sequence_number(), account_upd.account_number()
+ );
+ tracing::info!("Account '1' address: {}", account1.address());
+
+ // dYdX uses the concept of "subaccounts" to help isolate funds and manage risk
+ let subaccount00 = account0.subaccount(0)?;
+ let subaccount01 = account0.subaccount(1)?;
+
+ // Different subaccounts under the same account have the same address, being differentiated by
+ // their subaccount number
+ tracing::info!(
+ "Account '0' subaccount '0': address {} | number {}",
+ subaccount00.address,
+ subaccount00.number
+ );
+ tracing::info!(
+ "Account '0' subaccount '1': address {} | number {}",
+ subaccount01.address,
+ subaccount01.number
+ );
+
+ // Subaccounts 0..=127 are parent subaccounts. These subaccounts can have multiple positions
+ // opened and all positions are cross-margined.
+ // Subaccounts 128..=128000 are child subaccounts. These subaccounts can only have one position
+ // open.
+ tracing::info!(
+ "Is subaccount '0' a parent subaccount? {:?}",
+ subaccount00.is_parent()
+ );
+ tracing::info!(
+ "The parent subaccount of the subaccount '256' is: {:?}",
+ account0.subaccount(256)?.parent()
+ );
+ tracing::info!(
+ "Is the parent of subaccount '256' equal to subaccount '0'? {:?}",
+ account0.subaccount(256)?.parent() == subaccount00
+ );
+
+ #[cfg(feature = "noble")]
+ {
+ // To derive a Noble account (used to transfer USDC in and out of dYdX through Cosmos IBC)
+ // the same wallet instance as before can be used
+ let noble_account0 = wallet.noble().account_offline(0)?;
+ tracing::info!(
+ "Account '0' (Noble) address: {} | sequence-number: {}",
+ noble_account0.address(),
+ noble_account0.sequence_number()
+ );
+
+ // Noble accounts also use sequence numbers
+ if let Some(noble_config) = config.noble {
+ let mut noble = NobleClient::connect(noble_config).await?;
+ let noble_account_upd = wallet.noble().account(0, &mut noble).await?;
+ tracing::info!(
+ "Account '0' (Noble, synced-values) address: {} | sequence-number: {}",
+ noble_account_upd.address(),
+ noble_account_upd.sequence_number()
+ );
+ } else {
+ tracing::warn!("A [noble] configuration is required for some parts of this example.");
+ }
+ }
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/websockets.rs b/v4-client-rs/client/examples/websockets.rs
new file mode 100644
index 00000000..2851412e
--- /dev/null
+++ b/v4-client-rs/client/examples/websockets.rs
@@ -0,0 +1,89 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::indexer::{
+ BlockHeightMessage, CandleResolution, CandlesMessage, Feed, IndexerClient, MarketsMessage,
+ OrdersMessage, ParentSubaccountsMessage, SubaccountsMessage, Ticker, TradesMessage,
+};
+use dydx_v4_rust::node::Wallet;
+use support::constants::TEST_MNEMONIC;
+use tokio::select;
+
+pub struct Feeder {
+ trades_feed: Feed,
+ orders_feed: Feed,
+ markets_feed: Feed,
+ subaccounts_feed: Feed,
+ parent_subaccounts_feed: Feed,
+ candles_feed: Feed,
+ height_feed: Feed,
+}
+
+impl Feeder {
+ pub async fn connect() -> Result {
+ tracing_subscriber::fmt()
+ .with_max_level(tracing::Level::DEBUG)
+ .try_init()
+ .map_err(Error::msg)?;
+ #[cfg(feature = "telemetry")]
+ support::telemetry::metrics_dashboard().await?;
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let mut indexer = IndexerClient::new(config.indexer);
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+
+ let account = wallet.account_offline(0)?;
+ let subaccount = account.subaccount(127)?;
+
+ let ticker = Ticker::from("ETH-USD");
+ let markets_feed = indexer.feed().markets(false).await?;
+ let trades_feed = indexer.feed().trades(&ticker, false).await?;
+ let orders_feed = indexer.feed().orders(&ticker, false).await?;
+ let candles_feed = indexer
+ .feed()
+ .candles(&ticker, CandleResolution::M1, false)
+ .await?;
+ let subaccounts_feed = indexer
+ .feed()
+ .subaccounts(subaccount.clone(), false)
+ .await?;
+ let parent_subaccounts_feed = indexer
+ .feed()
+ .parent_subaccounts(subaccount.parent(), false)
+ .await?;
+ let height_feed = indexer.feed().block_height(false).await?;
+
+ Ok(Self {
+ trades_feed,
+ markets_feed,
+ orders_feed,
+ candles_feed,
+ subaccounts_feed,
+ parent_subaccounts_feed,
+ height_feed,
+ })
+ }
+
+ async fn step(&mut self) {
+ select! {
+ msg = self.trades_feed.recv() => if let Some(msg) = msg { tracing::info!("Received trades message: {msg:?}") },
+ msg = self.orders_feed.recv() => if let Some(msg) = msg { tracing::info!("Received orders message: {msg:?}") },
+ msg = self.markets_feed.recv() => if let Some(msg) = msg { tracing::info!("Received markets message: {msg:?}") },
+ msg = self.subaccounts_feed.recv() => if let Some(msg) = msg { tracing::info!("Received subaccounts message: {msg:?}") },
+ msg = self.parent_subaccounts_feed.recv() => if let Some(msg) = msg { tracing::info!("Received parent subaccounts message: {msg:?}") },
+ msg = self.candles_feed.recv() => if let Some(msg) = msg { tracing::info!("Received candles message: {msg:?}") },
+ msg = self.height_feed.recv() => if let Some(msg) = msg { tracing::info!("Received block height message: {msg:?}") },
+ }
+ }
+
+ async fn entrypoint(mut self) {
+ loop {
+ self.step().await;
+ }
+ }
+}
+#[tokio::main]
+async fn main() -> Result<()> {
+ let feeder = Feeder::connect().await?;
+ feeder.entrypoint().await;
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/withdraw.rs b/v4-client-rs/client/examples/withdraw.rs
new file mode 100644
index 00000000..0e68d239
--- /dev/null
+++ b/v4-client-rs/client/examples/withdraw.rs
@@ -0,0 +1,37 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use support::constants::TEST_MNEMONIC;
+
+pub struct Transferor {
+ client: NodeClient,
+ wallet: Wallet,
+}
+
+impl Transferor {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { client, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let mut transferor = Transferor::connect().await?;
+ let mut account = transferor.wallet.account(0, &mut transferor.client).await?;
+
+ let recipient = account.address().clone();
+ let sender = account.subaccount(0)?;
+
+ let tx_hash = transferor
+ .client
+ .withdraw(&mut account, sender, recipient, 1)
+ .await?;
+ tracing::info!("Withdraw transaction hash: {:?}", tx_hash);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/examples/withdraw_other.rs b/v4-client-rs/client/examples/withdraw_other.rs
new file mode 100644
index 00000000..0a0f6308
--- /dev/null
+++ b/v4-client-rs/client/examples/withdraw_other.rs
@@ -0,0 +1,75 @@
+mod support;
+use anyhow::{Error, Result};
+use dydx_v4_rust::config::ClientConfig;
+use dydx_v4_rust::node::{NodeClient, Wallet};
+use std::iter::once;
+use support::constants::TEST_MNEMONIC;
+use v4_proto_rs::{
+ dydxprotocol::{sending::MsgWithdrawFromSubaccount, subaccounts::SubaccountId},
+ ToAny,
+};
+
+pub struct Transferor {
+ client: NodeClient,
+ wallet: Wallet,
+}
+
+impl Transferor {
+ pub async fn connect() -> Result {
+ let config = ClientConfig::from_file("client/tests/testnet.toml").await?;
+ let client = NodeClient::connect(config.node).await?;
+ let wallet = Wallet::from_mnemonic(TEST_MNEMONIC)?;
+ Ok(Self { client, wallet })
+ }
+}
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::fmt().try_init().map_err(Error::msg)?;
+ let mut transferor = Transferor::connect().await?;
+ let account = transferor.wallet.account(0, &mut transferor.client).await?;
+ let mut client = transferor.client;
+
+ let amount = 1_u64;
+
+ let recipient = account.address().clone();
+ let sender = SubaccountId {
+ owner: recipient.to_string(),
+ number: 0,
+ };
+
+ // Simulate transaction
+ let msg = MsgWithdrawFromSubaccount {
+ sender: Some(sender.clone()),
+ recipient: recipient.to_string(),
+ asset_id: 0,
+ quantums: amount,
+ }
+ .to_any();
+ let simulated_tx = client
+ .builder
+ .build_transaction(&account, once(msg), None)?;
+ let simulation = client.simulate(&simulated_tx).await?;
+ tracing::info!("Simulation: {:?}", simulation);
+
+ let fee = client.builder.calculate_fee(Some(simulation.gas_used))?;
+ tracing::info!("Total fee: {:?}", fee);
+
+ let fee_amount: u64 = fee.amount[0].amount.try_into()?;
+
+ // Issue transaction
+ let final_msg = MsgWithdrawFromSubaccount {
+ sender: Some(sender),
+ recipient: recipient.into(),
+ asset_id: 0,
+ quantums: amount - fee_amount,
+ }
+ .to_any();
+ let final_tx = client
+ .builder
+ .build_transaction(&account, once(final_msg), Some(fee))?;
+ let tx_hash = client.broadcast_transaction(final_tx).await?;
+ tracing::info!("Withdraw transaction hash: {:?}", tx_hash);
+
+ Ok(())
+}
diff --git a/v4-client-rs/client/src/config.rs b/v4-client-rs/client/src/config.rs
new file mode 100644
index 00000000..5ed33acf
--- /dev/null
+++ b/v4-client-rs/client/src/config.rs
@@ -0,0 +1,33 @@
+#[cfg(feature = "faucet")]
+use super::faucet::FaucetConfig;
+#[cfg(feature = "noble")]
+use super::noble::NobleConfig;
+use super::{indexer::IndexerConfig, node::NodeConfig};
+use anyhow::Error;
+use serde::Deserialize;
+use std::path::Path;
+use tokio::fs;
+
+/// Serves as a configuration wrapper over configurations for specific clients.
+#[derive(Debug, Deserialize)]
+pub struct ClientConfig {
+ /// Configuration for [`IndexerClient`](crate::indexer::IndexerClient)
+ pub indexer: IndexerConfig,
+ /// Configuration for [`NodeClient`](crate::node::NodeClient)
+ pub node: NodeConfig,
+ /// Configuration for [`FaucetClient`](crate::faucet::FaucetClient)
+ #[cfg(feature = "faucet")]
+ pub faucet: Option,
+ /// Configuration for [`NobleClient`](crate::noble::NobleClient)
+ #[cfg(feature = "noble")]
+ pub noble: Option,
+}
+
+impl ClientConfig {
+ /// Creates a new `ClientConfig` instance from a TOML file at the given path
+ pub async fn from_file(path: impl AsRef) -> Result {
+ let toml_str = fs::read_to_string(path).await?;
+ let config = toml::from_str(&toml_str)?;
+ Ok(config)
+ }
+}
diff --git a/v4-client-rs/client/src/faucet.rs b/v4-client-rs/client/src/faucet.rs
new file mode 100644
index 00000000..d75032f1
--- /dev/null
+++ b/v4-client-rs/client/src/faucet.rs
@@ -0,0 +1,84 @@
+pub use crate::indexer::{Address, Subaccount, SubaccountNumber, Usdc};
+use anyhow::{anyhow as err, Error};
+use bigdecimal::num_traits::ToPrimitive;
+use reqwest::Client;
+use serde::{Deserialize, Serialize};
+
+/// Configuration for the Faucet client.
+#[derive(Debug, Deserialize)]
+pub struct FaucetConfig {
+ /// The base url of the faucet service.
+ pub endpoint: String,
+}
+
+/// [Faucet](https://docs.dydx.exchange/infrastructure_providers-network/faucet)
+/// serves as a source of funds for test purposes.
+///
+/// See also [What is a Crypto Faucet?](https://dydx.exchange/crypto-learning/crypto-faucet).
+///
+/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/faucet_endpoint.rs).
+#[derive(Debug)]
+pub struct FaucetClient {
+ config: FaucetConfig,
+ client: Client,
+}
+
+#[derive(Serialize)]
+#[serde(rename_all = "camelCase")]
+struct FillReq<'t> {
+ address: &'t Address,
+ subaccount_number: &'t SubaccountNumber,
+ amount: u64,
+}
+
+#[derive(Serialize)]
+#[serde(rename_all = "camelCase")]
+struct FillNativeReq<'t> {
+ address: &'t Address,
+}
+
+impl FaucetClient {
+ /// Creates a new `FaucetClient`
+ pub fn new(config: FaucetConfig) -> Self {
+ Self {
+ config,
+ client: Client::default(),
+ }
+ }
+
+ /// add USDC to a subaccount
+ pub async fn fill(&self, subaccount: &Subaccount, amount: &Usdc) -> Result<(), Error> {
+ const URI: &str = "/faucet/tokens";
+ let url = format!("{}{URI}", self.config.endpoint);
+ let body = FillReq {
+ address: &subaccount.address,
+ subaccount_number: &subaccount.number,
+ amount: amount
+ .to_u64()
+ .ok_or_else(|| err!("Failed converting USDC amount to u64"))?,
+ };
+ let _resp = self
+ .client
+ .post(url)
+ .json(&body)
+ .send()
+ .await?
+ .error_for_status()?;
+ Ok(())
+ }
+
+ /// add native dYdX testnet token to an address
+ pub async fn fill_native(&self, address: &Address) -> Result<(), Error> {
+ const URI: &str = "/faucet/native-token";
+ let url = format!("{}{URI}", self.config.endpoint);
+ let body = FillNativeReq { address };
+ let _resp = self
+ .client
+ .post(url)
+ .json(&body)
+ .send()
+ .await?
+ .error_for_status()?;
+ Ok(())
+ }
+}
diff --git a/v4-client-rs/client/src/indexer/config.rs b/v4-client-rs/client/src/indexer/config.rs
new file mode 100644
index 00000000..64248ac6
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/config.rs
@@ -0,0 +1,13 @@
+pub use crate::indexer::{rest::RestConfig, sock::SockConfig};
+use serde::Deserialize;
+
+/// Indexer client configuration.
+#[derive(Clone, Debug, Deserialize)]
+pub struct IndexerConfig {
+ /// Indexer REST client configuration.
+ #[serde(alias = "http")]
+ pub rest: RestConfig,
+ /// Indexer Websocket client configuration.
+ #[serde(alias = "ws")]
+ pub sock: SockConfig,
+}
diff --git a/v4-client-rs/client/src/indexer/mod.rs b/v4-client-rs/client/src/indexer/mod.rs
new file mode 100644
index 00000000..8f26c0ab
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/mod.rs
@@ -0,0 +1,51 @@
+/// Indexer client configuration.
+pub mod config;
+mod rest;
+mod sock;
+/// Tokens.
+pub mod tokens;
+/// Types for Indexer data.
+pub mod types;
+
+pub use config::IndexerConfig;
+pub use rest::*;
+pub use sock::*;
+pub use tokens::*;
+pub use types::*;
+
+/// Indexer client.
+#[derive(Debug)]
+pub struct IndexerClient {
+ rest: RestClient,
+ sock: SockClient,
+}
+
+impl IndexerClient {
+ /// Create a new Indexer client.
+ pub fn new(config: IndexerConfig) -> Self {
+ Self {
+ rest: RestClient::new(config.rest),
+ sock: SockClient::new(config.sock),
+ }
+ }
+
+ /// Get accounts query dispatcher.
+ pub fn accounts(&self) -> rest::Accounts {
+ self.rest.accounts()
+ }
+
+ /// Get markets query dispatcher.
+ pub fn markets(&self) -> rest::Markets {
+ self.rest.markets()
+ }
+
+ /// Get utility query dispatcher.
+ pub fn utility(&self) -> rest::Utility {
+ self.rest.utility()
+ }
+
+ /// Get feeds dispatcher.
+ pub fn feed(&mut self) -> Feeds<'_> {
+ Feeds::new(&mut self.sock)
+ }
+}
diff --git a/v4-client-rs/client/src/indexer/rest/client/accounts.rs b/v4-client-rs/client/src/indexer/rest/client/accounts.rs
new file mode 100644
index 00000000..5da56641
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/rest/client/accounts.rs
@@ -0,0 +1,520 @@
+use super::*;
+use anyhow::Error;
+
+/// Accounts dispatcher.
+///
+/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/account_endpoint.rs).
+pub struct Accounts<'a> {
+ rest: &'a RestClient,
+}
+
+impl<'a> Accounts<'a> {
+ /// Create a new accounts dispatcher.
+ pub(crate) fn new(rest: &'a RestClient) -> Self {
+ Self { rest }
+ }
+
+ /// Query for all subaccounts infos.
+ ///
+ /// Compare with [`Self::get_subaccount`].
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getaddress).
+ pub async fn get_subaccounts(&self, address: &Address) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/addresses";
+ let url = format!("{}{URI}/{address}", rest.config.endpoint);
+ let resp = rest
+ .client
+ .get(url)
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(resp)
+ }
+
+ /// Query for the subaccount, its current perpetual and asset positions, margin and collateral.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getsubaccount).
+ pub async fn get_subaccount(
+ &self,
+ subaccount: &Subaccount,
+ ) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/addresses";
+ let address = &subaccount.address;
+ let number = &subaccount.number;
+ let url = format!(
+ "{}{URI}/{address}/subaccountNumber/{number}",
+ rest.config.endpoint
+ );
+ let subaccount = rest
+ .client
+ .get(url)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .subaccount;
+ Ok(subaccount)
+ }
+
+ /// Query for the parent subaccount, its child subaccounts, equity, collateral and margin.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getparentsubaccount).
+ pub async fn get_parent_subaccount(
+ &self,
+ subaccount: &ParentSubaccount,
+ ) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/addresses";
+ let address = &subaccount.address;
+ let number = &subaccount.number;
+ let url = format!(
+ "{}{URI}/{address}/parentSubaccountNumber/{number}",
+ rest.config.endpoint
+ );
+ let subaccount = rest
+ .client
+ .get(url)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .subaccount;
+ Ok(subaccount)
+ }
+
+ /// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/close_all_positions.rs).
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listpositions).
+ pub async fn list_positions(
+ &self,
+ subaccount: &Subaccount,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/perpetualPositions";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let query = Query {
+ address: &subaccount.address,
+ subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let positions = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .positions;
+ Ok(positions)
+ }
+
+ /// List all positions of a parent subaccount.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listpositionsforparentsubaccount).
+ pub async fn list_parent_positions(
+ &self,
+ subaccount: &ParentSubaccount,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/perpetualPositions";
+ let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint);
+ let query = QueryParent {
+ address: &subaccount.address,
+ parent_subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let positions = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .positions;
+ Ok(positions)
+ }
+
+ /// Query for asset positions (size, buy/sell etc).
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getassetpositions).
+ pub async fn get_asset_positions(
+ &self,
+ subaccount: &Subaccount,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/assetPositions";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let query = Query {
+ address: &subaccount.address,
+ subaccount_number: &subaccount.number,
+ };
+ let positions = rest
+ .client
+ .get(url)
+ .query(&query)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .positions;
+ Ok(positions)
+ }
+
+ /// Query for asset positions (size, buy/sell etc) for a parent subaccount.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getassetpositionsforparentsubaccount).
+ pub async fn get_parent_asset_positions(
+ &self,
+ subaccount: &ParentSubaccount,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/assetPositions";
+ let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint);
+ let query = QueryParent {
+ address: &subaccount.address,
+ parent_subaccount_number: &subaccount.number,
+ };
+ let positions = rest
+ .client
+ .get(url)
+ .query(&query)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .positions;
+ Ok(positions)
+ }
+
+ /// Query for transfers between subaccounts.
+ ///
+ /// See also [`crate::node::NodeClient::transfer`].
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettransfers).
+ pub async fn get_transfers(
+ &self,
+ subaccount: &Subaccount,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/transfers";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let query = Query {
+ address: &subaccount.address,
+ subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let transfers = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .transfers;
+ Ok(transfers)
+ }
+
+ /// Query for transfers between subaccounts associated with a parent subaccount.
+ ///
+ /// See also [`crate::node::NodeClient::transfer`].
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettransfersforparentsubaccount).
+ pub async fn get_parent_transfers(
+ &self,
+ subaccount: &ParentSubaccount,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/transfers";
+ let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint);
+ let query = QueryParent {
+ address: &subaccount.address,
+ parent_subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let transfers = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .transfers;
+ Ok(transfers)
+ }
+
+ /// Query for orders filtered by order params.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listorders).
+ pub async fn list_orders(
+ &self,
+ subaccount: &Subaccount,
+ opts: Option,
+ ) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/orders";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let query = Query {
+ address: &subaccount.address,
+ subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let orders = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(orders)
+ }
+
+ /// Query for orders filtered by order params of a parent subaccount.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listordersforparentsubaccount).
+ pub async fn list_parent_orders(
+ &self,
+ subaccount: &ParentSubaccount,
+ opts: Option,
+ ) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/orders";
+ let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint);
+ let query = QueryParent {
+ address: &subaccount.address,
+ parent_subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let orders = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(orders)
+ }
+
+ /// Query for the order.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getorder).
+ pub async fn get_order(&self, order_id: &OrderId) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/orders";
+ let url = format!("{}{URI}/{order_id}", rest.config.endpoint);
+ let order = rest
+ .client
+ .get(url)
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(order)
+ }
+
+ /// Query for fills (i.e. filled orders data).
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getfills).
+ pub async fn get_fills(
+ &self,
+ subaccount: &Subaccount,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/fills";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let query = Query {
+ address: &subaccount.address,
+ subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let fills = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .fills;
+ Ok(fills)
+ }
+
+ /// Query for fills (i.e. filled orders data) for a parent subaccount.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getfillsforparentsubaccount).
+ pub async fn get_parent_fills(
+ &self,
+ subaccount: &ParentSubaccount,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/fills";
+ let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint);
+ let query = QueryParent {
+ address: &subaccount.address,
+ parent_subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let fills = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .fills;
+ Ok(fills)
+ }
+
+ /// Query for profit and loss report for the specified time/block range.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gethistoricalpnl).
+ pub async fn get_historical_pnl(
+ &self,
+ subaccount: &Subaccount,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/historical-pnl";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let query = Query {
+ address: &subaccount.address,
+ subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let pnls = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .historical_pnl;
+ Ok(pnls)
+ }
+
+ /// Query for profit and loss report for the specified time/block range of a parent subaccount.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gethistoricalpnlforparentsubaccount).
+ pub async fn get_parent_historical_pnl(
+ &self,
+ subaccount: &ParentSubaccount,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/historical-pnl";
+ let url = format!("{}{URI}/parentSubaccountNumber", rest.config.endpoint);
+ let query = QueryParent {
+ address: &subaccount.address,
+ parent_subaccount_number: &subaccount.number,
+ };
+ let options = opts.unwrap_or_default();
+ let pnls = rest
+ .client
+ .get(url)
+ .query(&query)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .historical_pnl;
+ Ok(pnls)
+ }
+
+ /// Get trader's rewards.
+ ///
+ /// See also [Trading Rewards](https://docs.dydx.exchange/concepts-trading/rewards_fees_and_parameters#trading-rewards).
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettradingrewards).
+ pub async fn get_rewards(
+ &self,
+ address: &Address,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/historicalBlockTradingRewards";
+ let url = format!("{}{URI}/{address}", rest.config.endpoint);
+ let options = opts.unwrap_or_default();
+ let rewards = rest
+ .client
+ .get(url)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .rewards;
+ Ok(rewards)
+ }
+
+ /// Get trader's rewards aggregation.
+ ///
+ /// See also [`Self::get_rewards`].
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getaggregations).
+ pub async fn get_rewards_aggregated(
+ &self,
+ address: &Address,
+ period: TradingRewardAggregationPeriod,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/historicalTradingRewardAggregations";
+ let url = format!("{}{URI}/{address}", rest.config.endpoint);
+ let options = opts.unwrap_or_default();
+ let aggregated = rest
+ .client
+ .get(url)
+ .query(&[("period", &period)])
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .rewards;
+ Ok(aggregated)
+ }
+}
diff --git a/v4-client-rs/client/src/indexer/rest/client/markets.rs b/v4-client-rs/client/src/indexer/rest/client/markets.rs
new file mode 100644
index 00000000..eb45f526
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/rest/client/markets.rs
@@ -0,0 +1,174 @@
+use super::*;
+use anyhow::{anyhow as err, Error};
+use std::collections::HashMap;
+
+/// Markets dispatcher.
+///
+/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/market_endpoint.rs).
+pub struct Markets<'a> {
+ rest: &'a RestClient,
+}
+
+impl<'a> Markets<'a> {
+ /// Create a new markets dispatcher.
+ pub(crate) fn new(rest: &'a RestClient) -> Self {
+ Self { rest }
+ }
+
+ /// Query for perpetual markets data.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#listperpetualmarkets).
+ pub async fn list_perpetual_markets(
+ &self,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/perpetualMarkets";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let options = opts.unwrap_or_default();
+ let markets = rest
+ .client
+ .get(url)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .markets;
+ Ok(markets)
+ }
+
+ /// Query for the perpetual market.
+ pub async fn get_perpetual_market(&self, ticker: &Ticker) -> Result {
+ let mut markets = self
+ .list_perpetual_markets(Some(ListPerpetualMarketsOpts {
+ limit: Some(1),
+ ticker: Some(ticker.clone()),
+ }))
+ .await?;
+ markets
+ .remove(ticker)
+ .ok_or_else(|| err!("Market ticker not found in list Markets response"))
+ }
+
+ /// Query for bids-asks for the perpetual market.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getperpetualmarket).
+ pub async fn get_perpetual_market_orderbook(
+ &self,
+ ticker: &Ticker,
+ ) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/orderbooks/perpetualMarket";
+ let url = format!("{}{URI}/{ticker}", rest.config.endpoint);
+ let orderbook = rest
+ .client
+ .get(url)
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(orderbook)
+ }
+
+ /// Query for trades.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettrades).
+ pub async fn get_trades(
+ &self,
+ ticker: &Ticker,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/trades/perpetualMarket";
+ let url = format!("{}{URI}/{ticker}", rest.config.endpoint);
+ let options = opts.unwrap_or_default();
+ let trades = rest
+ .client
+ .get(url)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .trades;
+ Ok(trades)
+ }
+
+ /// Query for [candles](https://dydx.exchange/crypto-learning/candlestick-patterns).
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getcandles).
+ pub async fn get_candles(
+ &self,
+ ticker: &Ticker,
+ res: CandleResolution,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/candles/perpetualMarkets";
+ let url = format!("{}{URI}/{ticker}", rest.config.endpoint);
+ let options = opts.unwrap_or_default();
+ let candles = rest
+ .client
+ .get(url)
+ .query(&[("resolution", &res)])
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .candles;
+ Ok(candles)
+ }
+
+ /// Query for funding till time/block specified.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gethistoricalfunding).
+ pub async fn get_historical_funding(
+ &self,
+ ticker: &Ticker,
+ opts: Option,
+ ) -> Result, Error> {
+ let rest = &self.rest;
+ const URI: &str = "/v4/historicalFunding";
+ let url = format!("{}{URI}/{ticker}", rest.config.endpoint);
+ let options = opts.unwrap_or_default();
+ let funding = rest
+ .client
+ .get(url)
+ .query(&options)
+ .send()
+ .await?
+ .error_for_status()?
+ .json::()
+ .await?
+ .historical_funding;
+ Ok(funding)
+ }
+
+ /// Query for [sparklines](https://en.wikipedia.org/wiki/Sparkline).
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#get).
+ pub async fn get_sparklines(
+ &self,
+ period: SparklineTimePeriod,
+ ) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/sparklines";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let sparklines = rest
+ .client
+ .get(url)
+ .query(&[("timePeriod", &period)])
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(sparklines)
+ }
+}
diff --git a/v4-client-rs/client/src/indexer/rest/client/mod.rs b/v4-client-rs/client/src/indexer/rest/client/mod.rs
new file mode 100644
index 00000000..9b27af44
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/rest/client/mod.rs
@@ -0,0 +1,58 @@
+pub mod accounts;
+pub mod markets;
+pub mod utility;
+
+use super::config::RestConfig;
+use super::options::*;
+use crate::indexer::{rest::types::*, types::*};
+use accounts::Accounts;
+use markets::Markets;
+use reqwest::Client;
+use serde::Serialize;
+use utility::Utility;
+
+#[derive(Serialize)]
+#[serde(rename_all = "camelCase")]
+struct Query<'t> {
+ address: &'t Address,
+ subaccount_number: &'t SubaccountNumber,
+}
+
+#[derive(Serialize)]
+#[serde(rename_all = "camelCase")]
+struct QueryParent<'t> {
+ address: &'t Address,
+ parent_subaccount_number: &'t ParentSubaccountNumber,
+}
+
+/// REST client to Indexer.
+#[derive(Debug)]
+pub(crate) struct RestClient {
+ config: RestConfig,
+ client: Client,
+}
+
+impl RestClient {
+ /// Create a new Indexer REST client.
+ pub(crate) fn new(config: RestConfig) -> Self {
+ Self {
+ config,
+ client: Client::default(),
+ }
+ }
+
+ /// Get accounts query dispatcher.
+ pub(crate) fn accounts(&self) -> Accounts<'_> {
+ Accounts::new(self)
+ }
+
+ /// Get markets query dispatcher.
+ pub(crate) fn markets(&self) -> Markets<'_> {
+ Markets::new(self)
+ }
+
+ /// Get utility query dispatcher.
+ pub(crate) fn utility(&self) -> Utility<'_> {
+ Utility::new(self)
+ }
+}
diff --git a/v4-client-rs/client/src/indexer/rest/client/utility.rs b/v4-client-rs/client/src/indexer/rest/client/utility.rs
new file mode 100644
index 00000000..28a51e18
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/rest/client/utility.rs
@@ -0,0 +1,71 @@
+use super::*;
+use anyhow::Error;
+
+/// Other data dispatcher.
+///
+/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/utility_endpoint.rs).
+pub struct Utility<'a> {
+ rest: &'a RestClient,
+}
+
+impl<'a> Utility<'a> {
+ /// Create a new utility dispatcher.
+ pub(crate) fn new(rest: &'a RestClient) -> Self {
+ Self { rest }
+ }
+
+ /// Current server time (UTC) of Indexer.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#gettime).
+ pub async fn get_time(&self) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/time";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let resp = rest
+ .client
+ .get(url)
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(resp)
+ }
+
+ /// Current block height and block time (UTC) parsed by Indexer.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#getheight).
+ pub async fn get_height(&self) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/height";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let resp = rest
+ .client
+ .get(url)
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(resp)
+ }
+
+ /// Query for screening results (compliance) of the address.
+ ///
+ /// [Reference](https://docs.dydx.exchange/api_integration-indexer/indexer_api#screen).
+ pub async fn get_screen(&self, query: &Address) -> Result {
+ let rest = &self.rest;
+ const URI: &str = "/v4/screen";
+ let url = format!("{}{URI}", rest.config.endpoint);
+ let resp = rest
+ .client
+ .get(url)
+ .query(&[("address", query)])
+ .send()
+ .await?
+ .error_for_status()?
+ .json()
+ .await?;
+ Ok(resp)
+ }
+}
diff --git a/v4-client-rs/client/src/indexer/rest/config.rs b/v4-client-rs/client/src/indexer/rest/config.rs
new file mode 100644
index 00000000..7d7d7e88
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/rest/config.rs
@@ -0,0 +1,10 @@
+use serde::Deserialize;
+
+/// REST Indexer client configuration.
+#[derive(Clone, Debug, Deserialize)]
+pub struct RestConfig {
+ /// REST endpoint.
+ ///
+ /// You can select REST endpoints from [the list](https://docs.dydx.exchange/infrastructure_providers-network/resources#indexer-endpoints).
+ pub endpoint: String,
+}
diff --git a/v4-client-rs/client/src/indexer/rest/mod.rs b/v4-client-rs/client/src/indexer/rest/mod.rs
new file mode 100644
index 00000000..dc71ef71
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/rest/mod.rs
@@ -0,0 +1,13 @@
+mod client;
+mod config;
+mod options;
+mod types;
+
+pub(crate) use client::RestClient;
+pub use config::RestConfig;
+pub use options::*;
+pub use types::*;
+
+pub use client::accounts::Accounts;
+pub use client::markets::Markets;
+pub use client::utility::Utility;
diff --git a/v4-client-rs/client/src/indexer/rest/options.rs b/v4-client-rs/client/src/indexer/rest/options.rs
new file mode 100644
index 00000000..a9decada
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/rest/options.rs
@@ -0,0 +1,157 @@
+use crate::indexer::types::*;
+use chrono::{DateTime, Utc};
+use serde::Serialize;
+
+/// Filter options for perpetual markets.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct ListPerpetualMarketsOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Ticker.
+ pub ticker: Option,
+}
+
+/// Filter options for trades.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct GetTradesOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Block height.
+ pub created_before_or_at_height: Option,
+ /// Time.
+ pub created_before_or_at: Option>,
+}
+
+/// Filter options for candles.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct GetCandlesOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Time.
+ #[serde(rename = "fromISO")]
+ pub from_iso: Option>,
+ /// Time.
+ #[serde(rename = "toISO")]
+ pub to_iso: Option>,
+}
+
+/// Filter options for fundings.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct GetHistoricalFundingOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Block height.
+ pub effective_before_or_at_height: Option,
+ /// Time.
+ pub effective_before_or_at: Option>,
+}
+
+/// Filter options for positions.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct ListPositionsOpts {
+ /// Perpetual postion status.
+ pub status: Option,
+ /// Limit.
+ pub limit: Option,
+ /// Block height.
+ pub created_before_or_at_height: Option,
+ /// Time.
+ pub created_before_or_at: Option>,
+}
+
+/// Filter options for transfers.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct GetTransfersOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Block height.
+ pub created_before_or_at_height: Option,
+ /// Time.
+ pub created_before_or_at: Option>,
+}
+
+/// Filter options for orders.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct ListOrdersOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Ticker.
+ pub ticker: Option,
+ /// Side (buy/sell).
+ pub side: Option,
+ // TODO: Arrays is supported
+ /// Order status.
+ pub status: Option,
+ /// Order type.
+ #[serde(rename = "type")]
+ pub order_type: Option,
+ /// Block height.
+ pub good_til_block_before_or_at: Option,
+ /// Time.
+ pub good_til_block_time_before_or_at: Option>,
+ /// Whether to return the latest orders.
+ pub return_latest_orders: Option,
+}
+
+/// Filter options for fills.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct GetFillsOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Block height.
+ pub created_before_or_at_height: Option,
+ /// Time.
+ pub created_before_or_at: Option>,
+ /// Ticker.
+ pub market: Option,
+ /// Market type.
+ pub market_type: Option,
+}
+
+/// Filter options for profit and loss.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct GetHistoricalPnlOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Block height.
+ pub created_before_or_at_height: Option,
+ /// Time.
+ pub created_before_or_at: Option>,
+ /// Block height.
+ pub created_on_or_after_height: Option,
+ /// Time.
+ pub created_on_or_after: Option>,
+}
+
+/// Filter options for rewards.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct GetTradingRewardsOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Block height.
+ pub starting_before_or_at_height: Option,
+ /// Time.
+ pub starting_before_or_at: Option>,
+}
+
+/// Filter options for aggregated rewards.
+#[derive(Serialize, Default)]
+#[serde(rename_all = "camelCase")]
+pub struct GetAggregationsOpts {
+ /// Limit.
+ pub limit: Option,
+ /// Block height.
+ pub starting_before_or_at_height: Option,
+ /// Time.
+ pub starting_before_or_at: Option>,
+}
diff --git a/v4-client-rs/client/src/indexer/rest/types.rs b/v4-client-rs/client/src/indexer/rest/types.rs
new file mode 100644
index 00000000..8c830e00
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/rest/types.rs
@@ -0,0 +1,303 @@
+use bigdecimal::BigDecimal;
+use chrono::{DateTime, Utc};
+use derive_more::{Display, From};
+use serde::{Deserialize, Serialize};
+use std::collections::HashMap;
+
+use crate::indexer::types::*;
+
+/// REST Indexer response error.
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ResponseError {
+ /// Errors.
+ pub errors: Vec,
+}
+
+/// REST Indexer error message.
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ErrorMsg {
+ /// Message.
+ pub msg: String,
+ /// Parameter.
+ pub param: String,
+ /// Location.
+ pub location: String,
+}
+
+/// Profit and loss tick id.
+#[derive(Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct PnlTickId(pub String);
+
+/// Transfer id.
+#[derive(
+ Serialize, Deserialize, Debug, Clone, From, Display, PartialEq, Eq, PartialOrd, Ord, Hash,
+)]
+pub struct TransferId(pub String);
+
+/// Period to aggregate rewards over.
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
+pub enum TradingRewardAggregationPeriod {
+ /// Day.
+ Daily,
+ /// Week.
+ Weekly,
+ /// Month.
+ Monthly,
+}
+
+/// Sparkline time period.
+#[derive(Serialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
+pub enum SparklineTimePeriod {
+ /// 1 day.
+ OneDay,
+ /// 7 days.
+ SevenDays,
+}
+
+/// Fundings response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct HistoricalFundingResponse {
+ /// List of fundings
+ pub historical_funding: Vec,
+}
+
+/// Funding response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct HistoricalFundingResponseObject {
+ /// Market ticker.
+ pub ticker: Ticker,
+ /// Time.
+ pub effective_at: DateTime,
+ /// Block height.
+ pub effective_at_height: Height,
+ /// Price.
+ pub price: Price,
+ /// Funding rate.
+ pub rate: BigDecimal,
+}
+
+/// Sparkline response.
+pub type SparklineResponseObject = HashMap>;
+
+/// Indexer server time.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct TimeResponse {
+ /// Time (UTC).
+ pub iso: DateTime,
+ /// Unix epoch.
+ pub epoch: f64,
+}
+
+/// Compliance response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct ComplianceResponse {
+ /// Whether the address is restricted.
+ pub restricted: bool,
+ /// Reason.
+ pub reason: Option,
+}
+
+/// Address response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct AddressResponse {
+ /// List of all subaccounts.
+ pub subaccounts: Vec,
+ /// Total rewards.
+ pub total_trading_rewards: BigDecimal,
+}
+
+/// Subaccount response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct SubaccountResponse {
+ /// Subaccount.
+ pub subaccount: SubaccountResponseObject,
+}
+
+/// Parent subaccount response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct ParentSubaccountResponse {
+ /// Subaccount.
+ pub subaccount: ParentSubaccountResponseObject,
+}
+
+/// Asset positions response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct AssetPositionResponse {
+ /// Asset positions.
+ pub positions: Vec,
+}
+
+/// Perpetual positions response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct PerpetualPositionResponse {
+ /// Perpetual positions.
+ pub positions: Vec,
+}
+
+/// Transfers response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct TransferResponse {
+ /// List of transfers.
+ pub transfers: Vec,
+}
+
+/// Transfer response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct TransferResponseObject {
+ /// Transfer id.
+ pub id: TransferId,
+ /// Time (UTC).
+ pub created_at: DateTime,
+ /// Block height.
+ pub created_at_height: Height,
+ /// Sender of transfer.
+ pub sender: Account,
+ /// Recipient of transfer.
+ pub recipient: Account,
+ /// Size of transfer.
+ pub size: BigDecimal,
+ /// Token symbol.
+ pub symbol: Symbol,
+ /// Transfer transaction hash.
+ pub transaction_hash: String,
+ /// Transfer type.
+ #[serde(rename = "type")]
+ pub transfer_type: TransferType,
+}
+
+/// Orders list response.
+pub type ListOrdersResponse = Vec;
+
+/// Fills response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct FillResponse {
+ /// List of fills.
+ pub fills: Vec,
+}
+
+/// Fill response.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct FillResponseObject {
+ /// Fill id.
+ pub id: FillId,
+ /// Side (buy/sell).
+ pub side: OrderSide,
+ /// Size.
+ pub size: BigDecimal,
+ /// Fee.
+ pub fee: BigDecimal,
+ /// Fill type.
+ #[serde(rename = "type")]
+ pub fill_type: FillType,
+ /// Liquidity.
+ pub liquidity: Liquidity,
+ /// Market ticker.
+ pub market: Ticker,
+ /// Market type.
+ pub market_type: MarketType,
+ /// Price.
+ pub price: Price,
+ /// Subaccount number.
+ pub subaccount_number: SubaccountNumber,
+ /// Block height.
+ pub created_at_height: Height,
+ /// Time (UTC).
+ pub created_at: DateTime,
+ /// Client metadata.
+ pub client_metadata: Option,
+ /// Order id.
+ pub order_id: Option,
+}
+
+/// Profit and loss reports.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct HistoricalPnlResponse {
+ /// List of PnL reports.
+ pub historical_pnl: Vec,
+}
+
+/// Profit and loss report.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct PnlTicksResponseObject {
+ /// Report id.
+ pub id: PnlTickId,
+ /// Subaccount id.
+ pub subaccount_id: SubaccountId,
+ /// Block height.
+ pub block_height: Height,
+ /// Time (UTC).
+ pub block_time: DateTime,
+ /// Time (UTC).
+ pub created_at: DateTime,
+ /// Equity.
+ pub equity: BigDecimal,
+ /// Total PnL.
+ pub total_pnl: BigDecimal,
+ /// Net transfers.
+ pub net_transfers: BigDecimal,
+}
+
+/// Trading rewards reports.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct HistoricalBlockTradingRewardsResponse {
+ /// List of reports.
+ pub rewards: Vec,
+}
+
+/// Trading rewards report.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct HistoricalBlockTradingReward {
+ /// Trading reward amount.
+ pub trading_reward: BigDecimal,
+ /// Block height.
+ pub created_at_height: Height,
+ /// Time (UTC).
+ pub created_at: DateTime,
+}
+
+/// Trading rewards aggregation reports.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct HistoricalTradingRewardAggregationsResponse {
+ /// List of reports.
+ pub rewards: Vec,
+}
+
+/// Trading rewards aggregation report.
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct HistoricalTradingRewardAggregation {
+ /// Trading reward amount.
+ pub trading_reward: BigDecimal,
+ /// Block height.
+ pub started_at_height: Height,
+ /// Time (UTC).
+ pub started_at: DateTime,
+ /// Block height.
+ pub ended_at_height: Option,
+ /// Time (UTC).
+ pub ended_at: Option>,
+ /// Aggregation period.
+ pub period: TradingRewardAggregationPeriod,
+}
diff --git a/v4-client-rs/client/src/indexer/sock/config.rs b/v4-client-rs/client/src/indexer/sock/config.rs
new file mode 100644
index 00000000..d1afa635
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/sock/config.rs
@@ -0,0 +1,27 @@
+use serde::Deserialize;
+use std::num::NonZeroU32;
+
+/// Websocket Indexer client configuration.
+#[derive(Clone, Debug, Deserialize)]
+pub struct SockConfig {
+ /// Websocket endpoint.
+ ///
+ /// You can select Websocket endpoints from [the list](https://docs.dydx.exchange/infrastructure_providers-network/resources#indexer-endpoints).
+ pub endpoint: String,
+ /// Reconnect interval.
+ #[serde(default = "default_timeout")]
+ pub timeout: u64,
+ /// Rate limit.
+ ///
+ /// See also [Rate Limiting](https://docs.dydx.exchange/api_integration-indexer/indexer_websocket#rate-limiting).
+ #[serde(default = "default_rate_limit")]
+ pub rate_limit: NonZeroU32,
+}
+
+fn default_timeout() -> u64 {
+ 1_000
+}
+
+fn default_rate_limit() -> NonZeroU32 {
+ NonZeroU32::new(2).unwrap()
+}
diff --git a/v4-client-rs/client/src/indexer/sock/connector.rs b/v4-client-rs/client/src/indexer/sock/connector.rs
new file mode 100644
index 00000000..117694a7
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/sock/connector.rs
@@ -0,0 +1,316 @@
+use super::{config::SockConfig, messages::*};
+use anyhow::{anyhow as err, Error};
+use futures_util::{SinkExt, StreamExt};
+use governor::{DefaultDirectRateLimiter, Quota, RateLimiter};
+use std::collections::{hash_map::Entry, HashMap};
+use tokio::{
+ net::TcpStream,
+ sync::mpsc,
+ time::{sleep, Duration},
+};
+use tokio_tungstenite::{
+ connect_async,
+ tungstenite::{self, protocol::Message},
+ MaybeTlsStream, WebSocketStream,
+};
+
+#[cfg(feature = "telemetry")]
+use crate::telemetry::{
+ LatencyMetric, TELEMETRY_DESC_WS_RECEIVED, TELEMETRY_DESC_WS_RECONNECTS,
+ TELEMETRY_DESC_WS_SENT, TELEMETRY_DESC_WS_SENT_DURATION, TELEMETRY_WS_RECEIVED,
+ TELEMETRY_WS_RECONNECTS, TELEMETRY_WS_SENT, TELEMETRY_WS_SENT_DURATION,
+};
+
+#[derive(Debug)]
+pub enum ControlMsg {
+ Subscribe(Subscription, bool, ChannelSender),
+ Unsubscribe(Subscription),
+ #[allow(dead_code)] // TODO remove after completion.
+ Terminate,
+}
+
+type WsStream = WebSocketStream>;
+
+#[derive(Debug)]
+pub enum ChannelSender {
+ Subaccounts(mpsc::UnboundedSender>),
+ ParentSubaccounts(mpsc::UnboundedSender>),
+ Trades(mpsc::UnboundedSender>),
+ Orders(mpsc::UnboundedSender>),
+ Markets(mpsc::UnboundedSender>),
+ Candles(mpsc::UnboundedSender>),
+ BlockHeight(mpsc::UnboundedSender>),
+}
+
+impl ChannelSender {
+ pub(crate) fn status(&self, msg: ConnectorStatusMessage) -> Result<(), Error> {
+ match self {
+ Self::Subaccounts(tx) => tx.send(ConnectorMessage::Status(msg))?,
+ Self::ParentSubaccounts(tx) => tx.send(ConnectorMessage::Status(msg))?,
+ Self::Trades(tx) => tx.send(ConnectorMessage::Status(msg))?,
+ Self::Orders(tx) => tx.send(ConnectorMessage::Status(msg))?,
+ Self::Markets(tx) => tx.send(ConnectorMessage::Status(msg))?,
+ Self::Candles(tx) => tx.send(ConnectorMessage::Status(msg))?,
+ Self::BlockHeight(tx) => tx.send(ConnectorMessage::Status(msg))?,
+ }
+ Ok(())
+ }
+
+ pub(crate) fn send(&self, msg: FeedMessage) -> Result<(), Error> {
+ match (self, msg) {
+ (Self::Subaccounts(tx), FeedMessage::Subaccounts(m)) => {
+ tx.send(ConnectorMessage::Feed(m))?
+ }
+ (Self::ParentSubaccounts(tx), FeedMessage::ParentSubaccounts(m)) => {
+ tx.send(ConnectorMessage::Feed(m))?
+ }
+ (Self::Trades(tx), FeedMessage::Trades(m)) => tx.send(ConnectorMessage::Feed(m))?,
+ (Self::Orders(tx), FeedMessage::Orders(m)) => tx.send(ConnectorMessage::Feed(m))?,
+ (Self::Markets(tx), FeedMessage::Markets(m)) => tx.send(ConnectorMessage::Feed(m))?,
+ (Self::Candles(tx), FeedMessage::Candles(m)) => tx.send(ConnectorMessage::Feed(m))?,
+ (Self::BlockHeight(tx), FeedMessage::BlockHeight(m)) => {
+ tx.send(ConnectorMessage::Feed(m))?
+ }
+ _ => return Err(err!("Mismatched ChannelSender and FeedMessage types")),
+ }
+ Ok(())
+ }
+}
+
+/// Connector to Client message
+#[derive(Debug)]
+pub enum ConnectorMessage> {
+ Status(ConnectorStatusMessage),
+ Feed(T),
+}
+
+#[derive(Debug)]
+pub enum ConnectorStatusMessage {
+ Connected,
+ Disconnected,
+ Resubscription,
+}
+
+/// WebSockets connection manager, message router
+pub(crate) struct Connector {
+ client_handle: bool,
+ timeout: Duration,
+ url: String,
+ rx: mpsc::UnboundedReceiver,
+ subscriptions: HashMap,
+ rate_limiter: DefaultDirectRateLimiter,
+}
+
+impl Connector {
+ pub(crate) fn new(config: SockConfig, rx: mpsc::UnboundedReceiver) -> Self {
+ #[cfg(feature = "telemetry")]
+ {
+ metrics::describe_counter!(
+ TELEMETRY_WS_RECONNECTS,
+ metrics::Unit::Count,
+ TELEMETRY_DESC_WS_RECONNECTS
+ );
+ metrics::describe_counter!(
+ TELEMETRY_WS_RECEIVED,
+ metrics::Unit::Count,
+ TELEMETRY_DESC_WS_RECEIVED
+ );
+ metrics::describe_counter!(
+ TELEMETRY_WS_SENT,
+ metrics::Unit::Count,
+ TELEMETRY_DESC_WS_SENT
+ );
+
+ metrics::describe_histogram!(
+ TELEMETRY_WS_SENT_DURATION,
+ metrics::Unit::Milliseconds,
+ TELEMETRY_DESC_WS_SENT_DURATION
+ );
+ }
+ Connector {
+ client_handle: true,
+ url: config.endpoint,
+ timeout: Duration::from_millis(config.timeout),
+ rx,
+ subscriptions: Default::default(),
+ rate_limiter: RateLimiter::direct(Quota::per_second(config.rate_limit)),
+ }
+ }
+
+ pub(crate) async fn entrypoint(mut self) {
+ if let Err(err) = self.connection_loop().await {
+ log::error!("Connection failed: {err}");
+ }
+ }
+
+ async fn connection_loop(&mut self) -> Result<(), Error> {
+ let (mut wss, _) = connect_async(&self.url).await?;
+ while self.is_active() {
+ if let Err(err) = self.step(&mut wss).await {
+ match err {
+ SockError::Tungstenite(e) => {
+ log::error!(
+ "WebSocket interaction failed: {e}. Attempting reconnection..."
+ );
+ sleep(self.timeout).await;
+
+ #[cfg(feature = "telemetry")]
+ metrics::counter!(TELEMETRY_WS_RECONNECTS).increment(1);
+
+ wss = self.reconnect().await?;
+ }
+ SockError::Protocol(e) => log::error!("WebSocket protocol failure: {e}"),
+ }
+ }
+ }
+ log::debug!("Stopping connector.");
+ self.unsubscribe_all(&mut wss).await?;
+ Ok(())
+ }
+
+ async fn step(&mut self, wss: &mut WsStream) -> Result<(), SockError> {
+ tokio::select! {
+ // Client -> Indexer
+ Some(msg) = self.rx.recv() => {
+
+ if let Some(msg) = self.process_ctrl_msg(msg).await {
+ #[cfg(feature = "telemetry")]
+ LatencyMetric::new(TELEMETRY_WS_SENT_DURATION);
+
+ self.send(wss, msg).await?;
+
+ #[cfg(feature = "telemetry")]
+ metrics::counter!(TELEMETRY_WS_SENT).increment(1);
+ }
+ }
+ // Indexer -> Client
+ Some(msg) = wss.next() => {
+ self.process_wss_msg(msg?).await?;
+
+ #[cfg(feature = "telemetry")]
+ metrics::counter!(TELEMETRY_WS_RECEIVED).increment(1);
+ }
+ }
+ Ok(())
+ }
+
+ async fn process_ctrl_msg(&mut self, ctrl_msg: ControlMsg) -> Option {
+ match ctrl_msg {
+ ControlMsg::Subscribe(sub, batched, tx) => {
+ let msg = sub.sub_message(batched);
+ match self.subscriptions.entry(sub) {
+ Entry::Vacant(entry) => {
+ tx.status(ConnectorStatusMessage::Connected).ok()?;
+ entry.insert(tx);
+ Some(RatedMessage::RateLimited(msg))
+ }
+ Entry::Occupied(_) => {
+ tx.status(ConnectorStatusMessage::Resubscription).ok()?;
+ None
+ }
+ }
+ }
+ ControlMsg::Unsubscribe(sub) => {
+ let msg = sub.unsub_message();
+ self.subscriptions.remove(&sub);
+ Some(RatedMessage::Free(msg))
+ }
+ ControlMsg::Terminate => {
+ self.client_handle = false;
+ None
+ }
+ }
+ }
+
+ async fn process_wss_msg(&mut self, wss_msg: Message) -> Result<(), Error> {
+ match wss_msg {
+ Message::Text(text) => {
+ let json: WsMessage =
+ serde_json::from_str(&text).map_err(|e| err!("{e} for message: {text}"))?;
+ match json {
+ WsMessage::Setup(setup) => {
+ log::debug!(
+ "Connected to WebSocket stream with ID: {}",
+ setup.connection_id
+ );
+ Ok(())
+ }
+ WsMessage::Error(error) => {
+ Err(err!("Server sent error message: {}", error.message))
+ }
+ WsMessage::Data(data) => {
+ let sub = data.subscription().ok_or_else(|| {
+ err!("Could not match received FeedMessage with a subscription!")
+ })?;
+ let tx = self
+ .subscriptions
+ .get(&sub)
+ .ok_or_else(|| err!("Subscription {sub:?} is not found!"))?;
+ tx.send(data)?;
+ Ok(())
+ }
+ WsMessage::Unsub(unsub) => {
+ log::debug!(
+ "Received unsubscribed message for: {} {}",
+ unsub.channel,
+ unsub.id.unwrap_or("".into())
+ );
+ Ok(())
+ }
+ }
+ }
+ Message::Ping(_) | Message::Pong(_) => Ok(()),
+ evt => Err(err!("Unsupported WebSocket event: {evt:?}")),
+ }
+ }
+
+ async fn reconnect(&mut self) -> Result {
+ let (mut wss, _) = connect_async(&self.url).await?;
+ // Resubscribe to all
+ for sub in &self.subscriptions {
+ let msg = sub.0.sub_message(false);
+ self.send(&mut wss, RatedMessage::RateLimited(msg)).await?;
+ }
+ Ok(wss)
+ }
+
+ async fn unsubscribe_all(&mut self, wss: &mut WsStream) -> Result<(), Error> {
+ for sub in &self.subscriptions {
+ let msg = sub.0.unsub_message();
+ self.send(wss, RatedMessage::Free(msg)).await?;
+ }
+ Ok(())
+ }
+
+ /// Run while `SockClient` wasn't dropped or there are any live subscriptions
+ fn is_active(&self) -> bool {
+ self.client_handle || !self.subscriptions.is_empty()
+ }
+
+ /// Rate-limiting socket send
+ async fn send(&self, wss: &mut WsStream, msg: RatedMessage) -> Result<(), Error> {
+ let wmsg = match msg {
+ RatedMessage::RateLimited(wmsg) => {
+ self.rate_limiter.until_ready().await;
+ wmsg
+ }
+ RatedMessage::Free(wmsg) => wmsg,
+ };
+ wss.send(wmsg).await?;
+ Ok(())
+ }
+}
+
+#[derive(Debug)]
+enum RatedMessage {
+ RateLimited(Message),
+ Free(Message),
+}
+
+#[derive(Debug, thiserror::Error)]
+enum SockError {
+ #[error("Stream error: {0}")]
+ Tungstenite(#[from] tungstenite::Error),
+ #[error("Protocol error: {0}")]
+ Protocol(#[from] anyhow::Error),
+}
diff --git a/v4-client-rs/client/src/indexer/sock/feed.rs b/v4-client-rs/client/src/indexer/sock/feed.rs
new file mode 100644
index 00000000..bf744c7a
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/sock/feed.rs
@@ -0,0 +1,101 @@
+use anyhow::anyhow as err;
+use derive_more::Debug;
+use std::ops::{Deref, DerefMut};
+use thiserror::Error;
+use tokio::sync::mpsc;
+
+use super::connector::{ConnectorMessage, ConnectorStatusMessage};
+use super::{ControlMsg, FeedMessage, Subscription};
+
+/// Realtime feed.
+///
+/// Check [the example](https://github.com/dydxprotocol/v4-clients/blob/main/v4-client-rs/client/examples/websockets.rs).
+pub struct Feed> {
+ feed: mpsc::UnboundedReceiver>,
+ sub: Subscription,
+ ctrl: mpsc::UnboundedSender,
+}
+
+impl Feed
+where
+ T: TryFrom + Debug,
+{
+ pub(crate) async fn setup(
+ mut feed: mpsc::UnboundedReceiver>,
+ sub: Subscription,
+ ctrl: mpsc::UnboundedSender,
+ ) -> Result {
+ if let Some(msg) = feed.recv().await {
+ match msg {
+ ConnectorMessage::Status(ConnectorStatusMessage::Connected) => {
+ Ok(Self { feed, sub, ctrl })
+ }
+ ConnectorMessage::Status(status) => Err(status.into()),
+ other => Err(err!("Connector sent {:?}. Expected Connected status.", other).into()),
+ }
+ } else {
+ Err(FeedError::Disconnected)
+ }
+ }
+
+ // Can be made return Result
+ /// Receive feed update.
+ pub async fn recv(&mut self) -> Option {
+ match self.feed.recv().await {
+ Some(ConnectorMessage::Feed(feed)) => Some(feed),
+ _ => None,
+ }
+ }
+}
+
+impl> Drop for Feed {
+ fn drop(&mut self) {
+ if let Err(err) = self.ctrl.send(ControlMsg::Unsubscribe(self.sub.clone())) {
+ log::error!("Sending of Unsubscribe control message to connector failed: {err}");
+ }
+ }
+}
+
+impl> Deref for Feed {
+ type Target = mpsc::UnboundedReceiver>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.feed
+ }
+}
+
+impl> DerefMut for Feed {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.feed
+ }
+}
+
+/// Feed error.
+#[derive(Debug, Error)]
+pub enum FeedError {
+ /// Channel is disconnected.
+ #[error("Channel disconnected")]
+ Disconnected,
+ /// Resubscription is detected.
+ #[error("Resubscription detected")]
+ Resubscription,
+ /// Other error.
+ #[error("Other error: {0}")]
+ Other(#[from] anyhow::Error),
+}
+
+impl From for FeedError {
+ fn from(status: ConnectorStatusMessage) -> Self {
+ match status {
+ ConnectorStatusMessage::Disconnected => FeedError::Disconnected,
+ ConnectorStatusMessage::Resubscription => FeedError::Resubscription,
+ _ => FeedError::Other(err!("Unexpected ConnectorStatusMessage {:?}", status)),
+ }
+ }
+}
+
+impl From> for FeedError {
+ fn from(_err: mpsc::error::SendError) -> Self {
+ FeedError::Disconnected
+ }
+}
diff --git a/v4-client-rs/client/src/indexer/sock/messages.rs b/v4-client-rs/client/src/indexer/sock/messages.rs
new file mode 100644
index 00000000..219e6ec9
--- /dev/null
+++ b/v4-client-rs/client/src/indexer/sock/messages.rs
@@ -0,0 +1,1106 @@
+use crate::indexer::types::{
+ CandleResponse as CandlesInitialMessageContents, CandleResponseObject as Candle,
+ HeightResponse as BlockHeightInitialMessageContents,
+ OrderBookResponseObject as OrdersInitialMessageContents,
+ OrderResponseObject as OrderMessageObject,
+ ParentSubaccountResponseObject as ParentSubaccountMessageObject,
+ PerpetualMarketResponse as MarketsInitialMessageContents,
+ SubaccountResponseObject as SubaccountMessageObject,
+ TradeResponse as TradesInitialMessageContents, *,
+};
+use bigdecimal::BigDecimal;
+use chrono::{DateTime, Utc};
+use serde::Deserialize;
+use serde_json::{json, Value};
+use std::collections::HashMap;
+use tokio_tungstenite::tungstenite::protocol::Message;
+
+/// Feed subscription options
+/// Respective ticker is required for `Orders`, `Trades`, `Candles`
+#[derive(Debug, Clone, Hash, Eq, PartialEq)]
+pub enum Subscription {
+ /// Subaccounts.
+ Subaccounts(Subaccount),
+ /// Orders.
+ Orders(Ticker),
+ /// Trades.
+ Trades(Ticker),
+ /// Markets.
+ Markets,
+ /// Candles.
+ Candles(Ticker, CandleResolution),
+ /// Parent subaccounts.
+ ParentSubaccounts(ParentSubaccount),
+ /// Block height.
+ BlockHeight,
+}
+
+impl Subscription {
+ pub(crate) fn sub_message(&self, batched: bool) -> Message {
+ match self {
+ Self::Subaccounts(ref subacc) => subaccounts::sub_message(subacc, batched),
+ Self::Markets => markets::sub_message(batched),
+ Self::Orders(ref ticker) => orders::sub_message(ticker, batched),
+ Self::Trades(ref ticker) => trades::sub_message(ticker, batched),
+ Self::Candles(ref ticker, ref res) => candles::sub_message(ticker, res, batched),
+ Self::ParentSubaccounts(ref subacc) => parent_subaccounts::sub_message(subacc, batched),
+ Self::BlockHeight => block_height::sub_message(batched),
+ }
+ }
+
+ pub(crate) fn unsub_message(&self) -> Message {
+ match self {
+ Self::Subaccounts(ref subacc) => subaccounts::unsub_message(subacc),
+ Self::Markets => markets::unsub_message(),
+ Self::Orders(ref ticker) => orders::unsub_message(ticker),
+ Self::Trades(ref ticker) => trades::unsub_message(ticker),
+ Self::Candles(ref ticker, ref res) => candles::unsub_message(ticker, res),
+ Self::ParentSubaccounts(ref subacc) => parent_subaccounts::unsub_message(subacc),
+ Self::BlockHeight => block_height::unsub_message(),
+ }
+ }
+}
+
+struct MessageFormatter {}
+
+impl MessageFormatter {
+ pub(crate) fn sub_message(channel: &str, fields: Value) -> Message {
+ let message = json!({
+ "type": "subscribe",
+ "channel": channel,
+ });
+ Self::message(fields, message)
+ }
+
+ pub(crate) fn unsub_message(channel: &str, fields: Value) -> Message {
+ let message = json!({
+ "type": "unsubscribe",
+ "channel": channel,
+ });
+ Self::message(fields, message)
+ }
+
+ fn message(mut message: Value, fields: Value) -> Message {
+ if let Value::Object(ref mut map) = message {
+ if let Value::Object(fields) = fields {
+ map.extend(fields);
+ }
+ }
+ Message::Text(message.to_string())
+ }
+}
+
+pub(crate) mod subaccounts {
+ use super::{json, Message, MessageFormatter, Subaccount};
+ pub(crate) const CHANNEL: &str = "v4_subaccounts";
+
+ pub(crate) fn sub_message(subacc: &Subaccount, batched: bool) -> Message {
+ let address = &subacc.address;
+ let number = &subacc.number;
+ MessageFormatter::sub_message(
+ CHANNEL,
+ json!({"id": format!("{address}/{number}"), "batched": batched}),
+ )
+ }
+
+ pub(crate) fn unsub_message(subacc: &Subaccount) -> Message {
+ let address = &subacc.address;
+ let number = &subacc.number;
+ MessageFormatter::unsub_message(CHANNEL, json!({"id": format!("{address}/{number}")}))
+ }
+}
+
+pub(crate) mod parent_subaccounts {
+ use super::{json, Message, MessageFormatter, ParentSubaccount};
+ pub(crate) const CHANNEL: &str = "v4_parent_subaccounts";
+
+ pub(crate) fn sub_message(subacc: &ParentSubaccount, batched: bool) -> Message {
+ let address = &subacc.address;
+ let number = &subacc.number;
+ MessageFormatter::sub_message(
+ CHANNEL,
+ json!({"id": format!("{address}/{number}"), "batched": batched}),
+ )
+ }
+
+ pub(crate) fn unsub_message(subacc: &ParentSubaccount) -> Message {
+ let address = &subacc.address;
+ let number = &subacc.number;
+ MessageFormatter::unsub_message(CHANNEL, json!({"id": format!("{address}/{number}")}))
+ }
+}
+
+pub(crate) mod orders {
+ use super::{json, Message, MessageFormatter, Ticker};
+ pub(crate) const CHANNEL: &str = "v4_orderbook";
+
+ pub(crate) fn sub_message(id: &Ticker, batched: bool) -> Message {
+ MessageFormatter::sub_message(CHANNEL, json!({"id": id, "batched": batched}))
+ }
+
+ pub(crate) fn unsub_message(id: &Ticker) -> Message {
+ MessageFormatter::unsub_message(CHANNEL, json!({"id": id}))
+ }
+}
+
+pub(crate) mod trades {
+ use super::{json, Message, MessageFormatter, Ticker};
+ pub(crate) const CHANNEL: &str = "v4_trades";
+
+ pub(crate) fn sub_message(id: &Ticker, batched: bool) -> Message {
+ MessageFormatter::sub_message(CHANNEL, json!({"id": id, "batched": batched}))
+ }
+
+ pub(crate) fn unsub_message(id: &Ticker) -> Message {
+ MessageFormatter::unsub_message(CHANNEL, json!({"id": id}))
+ }
+}
+
+pub(crate) mod markets {
+ use super::{json, Message, MessageFormatter};
+ pub const CHANNEL: &str = "v4_markets";
+
+ pub(crate) fn sub_message(batched: bool) -> Message {
+ MessageFormatter::sub_message(CHANNEL, json!({"batched": batched}))
+ }
+
+ pub(crate) fn unsub_message() -> Message {
+ MessageFormatter::unsub_message(CHANNEL, json!({}))
+ }
+}
+
+pub(crate) mod candles {
+ use super::{json, CandleResolution, Message, MessageFormatter, Ticker};
+ pub(crate) const CHANNEL: &str = "v4_candles";
+
+ pub(crate) fn sub_message(
+ id: &Ticker,
+ resolution: &CandleResolution,
+ batched: bool,
+ ) -> Message {
+ let resolution_str = serde_json::to_string(resolution).unwrap_or_default();
+ let resolution_str = resolution_str.trim_matches('"');
+ MessageFormatter::sub_message(
+ CHANNEL,
+ json!({"id": format!("{id}/{resolution_str}"), "batched": batched}),
+ )
+ }
+
+ pub(crate) fn unsub_message(id: &Ticker, resolution: &CandleResolution) -> Message {
+ let resolution_str = serde_json::to_string(resolution).unwrap_or_default();
+ let resolution_str = resolution_str.trim_matches('"');
+ MessageFormatter::unsub_message(CHANNEL, json!({"id": format!("{id}/{resolution_str}")}))
+ }
+}
+
+pub(crate) mod block_height {
+ use super::{json, Message, MessageFormatter};
+ pub const CHANNEL: &str = "v4_block_height";
+
+ pub(crate) fn sub_message(batched: bool) -> Message {
+ MessageFormatter::sub_message(CHANNEL, json!({"batched": batched}))
+ }
+
+ pub(crate) fn unsub_message() -> Message {
+ MessageFormatter::unsub_message(CHANNEL, json!({}))
+ }
+}
+
+/* Main WS type */
+#[allow(clippy::large_enum_variant)]
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+pub(crate) enum WsMessage {
+ #[serde(rename = "connected")]
+ Setup(StatusConnectedMessage),
+ #[serde(rename = "error")]
+ Error(StatusErrorMessage),
+ #[serde(rename = "unsubscribed")]
+ Unsub(StatusUnsubMessage),
+ #[serde(untagged)]
+ Data(FeedMessage),
+}
+
+#[derive(Debug, Deserialize)]
+pub(crate) struct StatusConnectedMessage {
+ pub(crate) connection_id: String,
+ #[allow(dead_code)] // TODO remove after completion.
+ pub(crate) message_id: u64,
+}
+
+#[derive(Debug, Deserialize)]
+pub(crate) struct StatusErrorMessage {
+ pub(crate) message: String,
+ #[allow(dead_code)] // TODO remove after completion.
+ pub(crate) connection_id: String,
+ #[allow(dead_code)] // TODO remove after completion.
+ pub(crate) message_id: u64,
+}
+
+#[derive(Debug, Deserialize)]
+pub(crate) struct StatusUnsubMessage {
+ #[allow(dead_code)] // TODO remove after completion.
+ pub(crate) connection_id: String,
+ #[allow(dead_code)] // TODO remove after completion.
+ pub(crate) message_id: u64,
+ pub(crate) channel: String,
+ pub(crate) id: Option,
+}
+
+/// Feed Types
+#[derive(Debug, Deserialize)]
+#[serde(tag = "channel")]
+pub enum FeedMessage {
+ /// Subaccounts.
+ #[serde(rename = "v4_subaccounts")]
+ Subaccounts(SubaccountsMessage),
+ /// Orders.
+ #[serde(rename = "v4_orderbook")]
+ Orders(OrdersMessage),
+ /// Trades.
+ #[serde(rename = "v4_trades")]
+ Trades(TradesMessage),
+ /// Markets.
+ #[serde(rename = "v4_markets")]
+ Markets(MarketsMessage),
+ /// Candles.
+ #[serde(rename = "v4_candles")]
+ Candles(CandlesMessage),
+ /// Parent subaccounts.
+ #[serde(rename = "v4_parent_subaccounts")]
+ ParentSubaccounts(ParentSubaccountsMessage),
+ /// Block height.
+ #[serde(rename = "v4_block_height")]
+ BlockHeight(BlockHeightMessage),
+}
+
+macro_rules! impl_feed_message_try_from {
+ ($target_type:ty, $variant:ident) => {
+ impl TryFrom for $target_type {
+ type Error = ();
+ fn try_from(value: FeedMessage) -> Result {
+ match value {
+ FeedMessage::$variant(a) => Ok(a),
+ _ => Err(()),
+ }
+ }
+ }
+ };
+}
+
+/// Subaccounts message.
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+pub enum SubaccountsMessage {
+ /// Initial.
+ #[serde(rename = "subscribed")]
+ Initial(SubaccountsInitialMessage),
+ /// Update.
+ #[serde(untagged)]
+ Update(SubaccountsUpdateMessage),
+}
+impl_feed_message_try_from!(SubaccountsMessage, Subaccounts);
+
+/// Subaccounts message.
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+pub enum ParentSubaccountsMessage {
+ /// Initial.
+ #[serde(rename = "subscribed")]
+ Initial(ParentSubaccountsInitialMessage),
+ /// Update.
+ #[serde(untagged)]
+ Update(ParentSubaccountsUpdateMessage),
+}
+impl_feed_message_try_from!(ParentSubaccountsMessage, ParentSubaccounts);
+
+/// Trades message.
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+pub enum TradesMessage {
+ /// Initial.
+ #[serde(rename = "subscribed")]
+ Initial(TradesInitialMessage),
+ /// Update.
+ #[serde(untagged)]
+ Update(TradesUpdateMessage),
+}
+impl_feed_message_try_from!(TradesMessage, Trades);
+
+/// Orders message.
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+pub enum OrdersMessage {
+ /// Initial.
+ #[serde(rename = "subscribed")]
+ Initial(OrdersInitialMessage),
+ /// Update.
+ #[serde(untagged)]
+ Update(OrdersUpdateMessage),
+}
+impl_feed_message_try_from!(OrdersMessage, Orders);
+
+/// Markets message.
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+pub enum MarketsMessage {
+ /// Initial.
+ #[serde(rename = "subscribed")]
+ Initial(MarketsInitialMessage),
+ /// Update.
+ #[serde(untagged)]
+ Update(MarketsUpdateMessage),
+}
+impl_feed_message_try_from!(MarketsMessage, Markets);
+
+/// Candles message.
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+pub enum CandlesMessage {
+ /// Initial.
+ #[serde(rename = "subscribed")]
+ Initial(CandlesInitialMessage),
+ /// Update.
+ #[serde(untagged)]
+ Update(CandlesUpdateMessage),
+}
+impl_feed_message_try_from!(CandlesMessage, Candles);
+
+/// Block height message.
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+pub enum BlockHeightMessage {
+ /// Initial.
+ #[serde(rename = "subscribed")]
+ Initial(BlockHeightInitialMessage),
+ /// Update.
+ #[serde(untagged)]
+ Update(BlockHeightUpdateMessage),
+}
+impl_feed_message_try_from!(BlockHeightMessage, BlockHeight);
+
+impl FeedMessage {
+ pub(crate) fn subscription(&self) -> Option {
+ let parse_subacc_id = |id: &str| -> Option {
+ // Parse "id": "Address/Number"
+ let mut id_split = id.split('/');
+ let address = id_split.next()?.parse().ok()?;
+ let number_str = id_split.next()?;
+ let number = serde_json::from_str::(number_str).ok()?;
+ Some(Subaccount::new(address, number))
+ };
+ let parse_psubacc_id = |id: &str| -> Option {
+ // Parse "id": "Address/Number"
+ let mut id_split = id.split('/');
+ let address = id_split.next()?.parse().ok()?;
+ let number_str = id_split.next()?;
+ let number = serde_json::from_str::(number_str).ok()?;
+ Some(ParentSubaccount::new(address, number))
+ };
+ let parse_candles_id = |id: &str| -> Option<(Ticker, CandleResolution)> {
+ // Parse "id": "TICKER/RESOLUTION"
+ let mut id_split = id.split('/');
+ let ticker = Ticker(id_split.next()?.into());
+ let resolution_str = format!("\"{}\"", id_split.next()?);
+ let resolution = serde_json::from_str(&resolution_str).ok()?;
+ Some((ticker, resolution))
+ };
+
+ match self {
+ Self::Subaccounts(SubaccountsMessage::Initial(msg)) => {
+ let subacc = parse_subacc_id(&msg.id)?;
+ Some(Subscription::Subaccounts(subacc))
+ }
+ Self::Subaccounts(SubaccountsMessage::Update(msg)) => {
+ let subacc = parse_subacc_id(&msg.id)?;
+ Some(Subscription::Subaccounts(subacc))
+ }
+
+ Self::ParentSubaccounts(ParentSubaccountsMessage::Initial(msg)) => {
+ let subacc = parse_psubacc_id(&msg.id)?;
+ Some(Subscription::ParentSubaccounts(subacc))
+ }
+ Self::ParentSubaccounts(ParentSubaccountsMessage::Update(msg)) => {
+ let subacc = parse_psubacc_id(&msg.id)?;
+ Some(Subscription::ParentSubaccounts(subacc))
+ }
+
+ Self::Orders(OrdersMessage::Initial(msg)) => {
+ Some(Subscription::Orders(Ticker(msg.id.clone())))
+ }
+ Self::Orders(OrdersMessage::Update(msg)) => {
+ Some(Subscription::Orders(Ticker(msg.id.clone())))
+ }
+
+ Self::Trades(TradesMessage::Initial(msg)) => {
+ Some(Subscription::Trades(Ticker(msg.id.clone())))
+ }
+ Self::Trades(TradesMessage::Update(msg)) => {
+ Some(Subscription::Trades(Ticker(msg.id.clone())))
+ }
+
+ Self::Markets(MarketsMessage::Update(_)) => Some(Subscription::Markets),
+ Self::Markets(MarketsMessage::Initial(_)) => Some(Subscription::Markets),
+
+ Self::Candles(CandlesMessage::Initial(msg)) => {
+ let (ticker, resolution) = parse_candles_id(&msg.id)?;
+ Some(Subscription::Candles(ticker, resolution))
+ }
+ Self::Candles(CandlesMessage::Update(msg)) => {
+ let (ticker, resolution) = parse_candles_id(&msg.id)?;
+ Some(Subscription::Candles(ticker, resolution))
+ }
+
+ Self::BlockHeight(BlockHeightMessage::Initial(_)) => Some(Subscription::BlockHeight),
+ Self::BlockHeight(BlockHeightMessage::Update(_)) => Some(Subscription::BlockHeight),
+ }
+ }
+}
+
+/// Subaccount initial.
+#[derive(Debug, Deserialize)]
+pub struct SubaccountsInitialMessage {
+ /// Connection id.
+ pub connection_id: String,
+ /// Subaccount.
+ pub contents: SubaccountsInitialMessageContents,
+ /// Id.
+ pub id: String,
+ /// Message id.
+ pub message_id: u64,
+}
+
+/// Subaccount.
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SubaccountsInitialMessageContents {
+ /// Subaccount.
+ pub subaccount: SubaccountMessageObject,
+ /// Orders.
+ pub orders: Vec,
+ /// Block height.
+ pub block_height: Height,
+}
+
+/// Parent subaccount initial.
+#[derive(Debug, Deserialize)]
+pub struct ParentSubaccountsInitialMessage {
+ /// Connection id.
+ pub connection_id: String,
+ /// Subaccount.
+ pub contents: ParentSubaccountsInitialMessageContents,
+ /// Id.
+ pub id: String,
+ /// Message id.
+ pub message_id: u64,
+}
+
+/// Parent subaccount.
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ParentSubaccountsInitialMessageContents {
+ /// Subaccount.
+ pub subaccount: ParentSubaccountMessageObject,
+ /// Orders.
+ pub orders: Vec,
+ /// Block height.
+ pub block_height: Height,
+}
+
+/// Orders initial message.
+#[derive(Debug, Deserialize)]
+pub struct OrdersInitialMessage {
+ /// Connection id.
+ pub connection_id: String,
+ /// Orders.
+ pub contents: OrdersInitialMessageContents,
+ /// Id.
+ pub id: String,
+ /// Message id.
+ pub message_id: u64,
+}
+
+/// Trades initial message.
+#[derive(Debug, Deserialize)]
+pub struct TradesInitialMessage {
+ /// Connection id.
+ pub connection_id: String,
+ /// Trades.
+ pub contents: TradesInitialMessageContents,
+ /// Id.
+ pub id: String,
+ /// Message id.
+ pub message_id: u64,
+}
+
+/// Markets initial message.
+#[derive(Debug, Deserialize)]
+pub struct MarketsInitialMessage {
+ /// Connection id.
+ pub connection_id: String,
+ /// Market.
+ pub contents: MarketsInitialMessageContents,
+ /// Message id.
+ pub message_id: u64,
+}
+
+/// Candles initial message.
+#[derive(Debug, Deserialize)]
+pub struct CandlesInitialMessage {
+ /// Connection id.
+ pub connection_id: String,
+ /// Candles.
+ pub contents: CandlesInitialMessageContents,
+ /// Id.
+ pub id: String,
+ /// Message id.
+ pub message_id: u64,
+}
+
+/// Block height initial message.
+#[derive(Debug, Deserialize)]
+pub struct BlockHeightInitialMessage {
+ /// Connection id.
+ pub connection_id: String,
+ /// Block height contents.
+ pub contents: BlockHeightInitialMessageContents,
+ /// Message id.
+ pub message_id: u64,
+}
+
+// Updates
+macro_rules! generate_contents_deserialize_function {
+ ($fn_name:ident, $result_type:ty) => {
+ fn $fn_name<'de, D>(deserializer: D) -> Result, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let value = Value::deserialize(deserializer)?;
+
+ match value {
+ // Batched
+ Value::Array(arr) => arr
+ .into_iter()
+ .map(|v| serde_json::from_value(v))
+ .collect::, _>>()
+ .map_err(serde::de::Error::custom),
+ // Streamed
+ Value::Object(obj) => {
+ let item = serde_json::from_value::<$result_type>(Value::Object(obj.clone()))
+ .map_err(serde::de::Error::custom)?;
+ Ok(vec![item])
+ }
+ _ => Err(serde::de::Error::custom("Expected array or object")),
+ }
+ }
+ };
+}
+
+/// Subaccount update.
+#[derive(Debug, Deserialize)]
+pub struct SubaccountsUpdateMessage {
+ /// Connection id.
+ pub connection_id: String,
+ /// Update.
+ #[serde(deserialize_with = "deserialize_subaccounts_contents")]
+ pub contents: Vec,
+ /// Id.
+ pub id: String,
+ /// Message id.
+ pub message_id: u64,
+ /// Version.
+ pub version: String,
+}
+generate_contents_deserialize_function!(
+ deserialize_subaccounts_contents,
+ SubaccountUpdateMessageContents
+);
+
+/// Subaccount update contents.
+#[derive(Clone, Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SubaccountUpdateMessageContents {
+ /// Perpetual position updates on the subaccount.
+ pub perpetual_positions: Option>,
+ /// Asset position updates on the subaccount.
+ pub asset_positions: Option>,
+ /// Order updates on the subaccount.
+ pub orders: Option>,
+ /// Fills that occur on the subaccount.
+ pub fills: Option>,
+ /// Transfers that occur on the subaccount.
+ pub transfers: Option,
+ /// Rewards that occur on the subaccount.
+ pub trading_reward: Option,
+ /// Block height.
+ pub block_height: Option,
+}
+
+/// Perpetual position on subaccount.
+#[derive(Clone, Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct PerpetualPositionSubaccountMessageContents {
+ /// Address.
+ pub address: Address,
+ /// Subaccount number.
+ pub subaccount_number: SubaccountNumber,
+ /// Position id.
+ pub position_id: String,
+ /// Market ticker.
+ pub market: Ticker,
+ /// Side (buy/sell).
+ pub side: PositionSide,
+ /// Position status.
+ pub status: PerpetualPositionStatus,
+ /// Size.
+ pub size: Quantity,
+ /// Maximum size.
+ pub max_size: Quantity,
+ /// Net funding.
+ pub net_funding: BigDecimal,
+ /// Entry price.
+ pub entry_price: Price,
+ /// Exit price.
+ pub exit_price: Option,
+ /// Sum at open.
+ pub sum_open: BigDecimal,
+ /// Sum at close.
+ pub sum_close: BigDecimal,
+ /// Actual PnL.
+ pub realized_pnl: Option,
+ /// Potential PnL.
+ pub unrealized_pnl: Option,
+}
+
+/// Asset position per subaccount.
+#[derive(Clone, Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct AssetPositionSubaccountMessageContents {
+ /// Address.
+ pub address: Address,
+ /// Subaccount number.
+ pub subaccount_number: SubaccountNumber,
+ /// Position id.
+ pub position_id: String,
+ /// Asset id.
+ pub asset_id: AssetId,
+ /// Token symbol.
+ pub symbol: Symbol,
+ /// Side (buy/sell).
+ pub side: PositionSide,
+ /// Size.
+ pub size: Quantity,
+}
+
+/// Order per subaccount.
+#[derive(Clone, Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct OrderSubaccountMessageContents {
+ /// Id.
+ pub id: String,
+ /// Subaccount id.
+ pub subaccount_id: SubaccountId,
+ /// Client id.
+ pub client_id: ClientId,
+ /// Clob pair id.
+ pub clob_pair_id: Option