From 5f6eebc9507079a98788fbb2d940348308ce5270 Mon Sep 17 00:00:00 2001 From: Danny Browning Date: Mon, 8 May 2023 20:52:00 -0600 Subject: [PATCH] Initial commit --- .github/dependabot.yml | 15 ++ .github/workflows/release.yml | 115 +++++++++++ .github/workflows/rust.yml | 22 +++ .gitignore | 3 + Cargo.toml | 34 ++++ release.toml | 3 + src/api.rs | 91 +++++++++ src/lib.rs | 361 ++++++++++++++++++++++++++++++++++ src/model_definition.rs | 99 ++++++++++ 9 files changed, 743 insertions(+) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/rust.yml create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 release.toml create mode 100644 src/api.rs create mode 100644 src/lib.rs create mode 100644 src/model_definition.rs diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..e945fc9 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# Please see the documentation for all configuration options: +# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "cargo" + directory: "/" + schedule: + interval: "weekly" + ignore: + # These are peer deps of Cargo and should not be automatically bumped + - dependency-name: "semver" + - dependency-name: "crates-io" + rebase-strategy: "disabled" + diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..b41c3a4 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,115 @@ +# CI that: +# +# * checks for a Git Tag that looks like a release ("v1.2.0") +# * creates a Github Release™️ +# * builds binaries/packages with cargo-dist +# * uploads those packages to the Github Release™️ +# +# Note that the Github Release™️ will be created before the packages, +# so there will be a few minutes where the release has no packages +# and then they will slowly trickle in, possibly failing. To make +# this more pleasant we mark the release as a "draft" until all +# artifacts have been successfully uploaded. This allows you to +# choose what to do with partial successes and avoids spamming +# anyone with notifications before the release is actually ready. +name: Release + +permissions: + contents: write + +on: + workflow_dispatch: + inputs: + level: + description: 'Release level' + required: true + default: 'patch' + type: choice + options: + - patch + - minor + - major + +jobs: + # Build and packages all the things + build-binaries: + strategy: + matrix: + # For these target platforms + include: + - target: x86_64-unknown-linux-gnu + os: ubuntu-latest + - target: x86_64-apple-darwin + os: macos-latest + - target: aarch64-apple-darwin + os: macos-latest + #- target: x86_64-pc-windows-msvc + # os: windows-latest + runs-on: ${{ matrix.os }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + - name: Install Rust + run: | + rustup update stable + rustup default stable + - name: Install cargo-zigbuild + run: | + pip3 install ziglang + cargo install cargo-zigbuild + - name: Setup target + run: rustup target add ${{ matrix.target }} + - name: Run cargo zigbuild + run: | + cargo zigbuild --release --target ${{ matrix.target }} + - name: Compress artifacts + run: | + tar -cvzf wheel_${{ matrix.target }}.tar.gz -C target/${{ matrix.target }}/release wheel + - name: Archive artifact + uses: actions/upload-artifact@v3 + with: + name: wheel_${{ matrix.target }} + path: | + wheel_${{ matrix.target }}.tar.gz + + bump-versions: + needs: [build-binaries] + runs-on: ubuntu-latest + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_TOKEN }} + outputs: + tag: ${{ steps.release.outputs.tag }} + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - id: install-cargo-release + uses: taiki-e/install-action@v1 + with: + tool: cargo-release + - id: release + run: | + git config user.email "github@3box.io" + git config user.name "Github Automation" + echo "tag="$(cargo metadata --format-version=1 --no-deps | jq '.packages[0].version' | tr -d '"') >> $GITHUB_OUTPUT + cargo release -vv ${{ inputs.level }} -x --no-confirm + + publish-release: + needs: [bump-versions] + runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v3 + - uses: actions/download-artifact@v3 + with: + path: artifacts + - name: check artifacts + run: | + ls artifacts/**/*.tar.gz + - name: create release + run: | + echo "Creating release for "${{ needs.bump-versions.outputs.tag }} + gh release create v${{ needs.bump-versions.outputs.tag }} --title "v"${{ needs.bump-versions.outputs.tag }} --latest artifacts/**/*.tar.gz + diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml new file mode 100644 index 0000000..31000a2 --- /dev/null +++ b/.github/workflows/rust.yml @@ -0,0 +1,22 @@ +name: Rust + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +env: + CARGO_TERM_COLOR: always + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Build + run: cargo build --verbose + - name: Run tests + run: cargo test --verbose diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..79fe596 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.idea +Cargo.lock +/target diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..d36c869 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "ceramic-http-client" +version = "0.1.0" +edition = "2021" +authors = [ + "Nathaniel Cook ", + "Danny Browning ", +] +license = "MIT OR Apache-2.0" +repository = "https://github.com/3box/rust-ceramic-http-client" +publish = false +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1" +#ceramic-event = { git = "https://github.com/3box/rust-ceramic", branch = "main" } +ceramic-event = { path = "/Users/dbrowning/code/3box/rust-ceramic/event" } +json-patch = "0.3.0" +reqwest = { version = "0.11.14", features = ["json"], optional = true } +schemars = "0.8.12" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +ssi = { version = "0.6", features = ["ed25519"] } +url = { version = "2.2.2", optional = true } + +[features] +default = ["remote"] +remote = ["reqwest", "url"] + +[dev-dependencies] +test-log = { version = "0.2", default-features = false, features = ["trace"] } +tokio = { version = "1", default-features = false, features = ["macros", "rt"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } diff --git a/release.toml b/release.toml new file mode 100644 index 0000000..c775043 --- /dev/null +++ b/release.toml @@ -0,0 +1,3 @@ +consolidate-commits = false +allow-branch = ["main"] +tag-name = "{{prefix}}v{{prev_version}}" diff --git a/src/api.rs b/src/api.rs new file mode 100644 index 0000000..78ce3ab --- /dev/null +++ b/src/api.rs @@ -0,0 +1,91 @@ +use ceramic_event::{ + Base64String, Jws, MultiBase32String, MultiBase36String, StreamId, StreamIdType, +}; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize)] +pub struct BlockHeader { + pub family: String, + pub controllers: Vec, + pub model: Base64String, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct BlockData { + pub header: BlockHeader, + #[serde(skip_serializing_if = "Option::is_none")] + pub data: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub jws: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub linked_block: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub cacao_block: Option, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CreateRequest { + #[serde(rename = "type")] + pub r#type: StreamIdType, + #[serde(rename = "genesis")] + pub block: BlockData, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UpdateRequest { + #[serde(rename = "type")] + pub r#type: StreamIdType, + #[serde(rename = "commit")] + pub block: BlockData, + pub stream_id: MultiBase36String, +} + +#[derive(Deserialize)] +pub struct PostResponse { + error: Option, + #[serde(rename = "streamId")] + stream_id: Option, +} + +impl PostResponse { + pub fn resolve(self, context: &str) -> anyhow::Result { + if let Some(stream_id) = self.stream_id { + Ok(stream_id) + } else { + let post = if let Some(err) = self.error { + format!(": {}", err) + } else { + ": No additional information provided by ceramic".to_string() + }; + anyhow::bail!(format!("{}{}", context, post)) + } + } +} + +#[derive(Deserialize)] +pub struct StateLog { + pub cid: MultiBase36String, +} + +#[derive(Deserialize)] +pub struct Metadata { + pub controllers: Vec, + pub model: StreamId, +} + +#[derive(Deserialize)] +pub struct StreamState { + pub content: serde_json::Value, + pub log: Vec, + pub metadata: Metadata, +} + +#[derive(Deserialize)] +pub struct GetResponse { + #[serde(rename = "streamId")] + pub stream_id: StreamId, + pub state: StreamState, +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..89cf696 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,361 @@ +//! Ceramic HTTP API +//! +//! This crate provides a client for interacting with the Ceramic HTTP API. +mod api; +mod model_definition; + +use ceramic_event::{ + Base64String, Cid, DagCborEncoded, DidDocument, EventArgs, MultiBase36String, StreamId, + StreamIdType, +}; +use serde::{de::DeserializeOwned, Serialize}; +use std::str::FromStr; + +pub use model_definition::{GetRootSchema, ModelDefinition}; +pub use schemars; + +struct CeramicHttpClient { + signer: DidDocument, + private_key: String, +} + +impl CeramicHttpClient { + pub fn new(signer: DidDocument, private_key: &str) -> Self { + Self { + signer, + private_key: private_key.to_string(), + } + } + + pub fn streams_endpoint(&self) -> &'static str { + "/api/v0/streams" + } + + pub fn commits_endpoint(&self) -> &'static str { + "/api/v0/commits" + } + + pub async fn create_model_request( + &self, + model: &ModelDefinition, + ) -> anyhow::Result> { + let args = EventArgs::new(&self.signer); + let commit = args.init_with_data(&model, &self.private_key).await?; + let controllers: Vec<_> = args.controllers().map(|c| c.id.clone()).collect(); + let data = Base64String::from(commit.linked_block.as_ref()); + let model = Base64String::from(args.parent().to_vec()?); + + Ok(api::CreateRequest { + r#type: StreamIdType::Model, + block: api::BlockData { + header: api::BlockHeader { + family: "test".to_string(), + controllers, + model, + }, + linked_block: Some(data.clone()), + jws: Some(commit.jws), + data: Some(data), + cacao_block: None, + }, + }) + } + + pub async fn create_single_instance_request( + &self, + model_id: &StreamId, + ) -> anyhow::Result> { + if !model_id.is_model() { + anyhow::bail!("StreamId was not a model"); + } + let args = EventArgs::new_with_parent(&self.signer, model_id); + let commit = args.init()?; + let controllers: Vec<_> = args.controllers().map(|c| c.id.clone()).collect(); + let model = Base64String::from(model_id.to_vec()?); + Ok(api::CreateRequest { + r#type: StreamIdType::Document, + block: api::BlockData { + header: api::BlockHeader { + family: "test".to_string(), + controllers, + model, + }, + linked_block: None, + jws: None, + data: Some(commit.encoded), + cacao_block: None, + }, + }) + } + + pub async fn create_list_instance_request( + &self, + model_id: &StreamId, + data: T, + ) -> anyhow::Result> { + if !model_id.is_model() { + anyhow::bail!("StreamId was not a model"); + } + let args = EventArgs::new_with_parent(&self.signer, model_id); + let commit = args.init_with_data(&data, &self.private_key).await?; + let controllers: Vec<_> = args.controllers().map(|c| c.id.clone()).collect(); + let data = Base64String::from(commit.linked_block.as_ref()); + let model = Base64String::from(model_id.to_vec()?); + Ok(api::CreateRequest { + r#type: StreamIdType::Document, + block: api::BlockData { + header: api::BlockHeader { + family: "test".to_string(), + controllers, + model, + }, + linked_block: Some(data.clone()), + jws: Some(commit.jws), + data: Some(data), + cacao_block: None, + }, + }) + } + + pub async fn create_update_request( + &self, + get: &api::GetResponse, + patch: json_patch::Patch, + ) -> anyhow::Result { + if !get.stream_id.is_document() { + anyhow::bail!("StreamId was not a document"); + } + let tip = Cid::from_str(get.state.log[0].cid.as_ref())?; + let args = EventArgs::new_with_parent(&self.signer, &get.state.metadata.model); + let commit = args.update(&patch, &self.private_key, &tip).await?; + let controllers: Vec<_> = args.controllers().map(|c| c.id.clone()).collect(); + let data = Base64String::from(commit.linked_block.as_ref()); + let model = Base64String::from(get.state.metadata.model.to_vec()?); + let stream = MultiBase36String::try_from(&get.stream_id)?; + Ok(api::UpdateRequest { + r#type: StreamIdType::Document, + block: api::BlockData { + header: api::BlockHeader { + family: "test".to_string(), + controllers, + model, + }, + linked_block: Some(data.clone()), + jws: Some(commit.jws), + data: Some(data), + cacao_block: None, + }, + stream_id: stream, + }) + } +} + +pub mod remote { + use super::*; + + pub struct CeramicRemoteHttpClient { + cli: CeramicHttpClient, + remote: reqwest::Client, + url: url::Url, + } + + impl CeramicRemoteHttpClient { + pub fn new(signer: DidDocument, private_key: &str, remote: url::Url) -> Self { + Self { + cli: CeramicHttpClient::new(signer, private_key), + remote: reqwest::Client::new(), + url: remote, + } + } + + pub fn url_for_path(&self, path: &str) -> anyhow::Result { + let u = self.url.join(path)?; + Ok(u) + } + + pub async fn create_model(&self, model: &ModelDefinition) -> anyhow::Result { + let req = self.cli.create_model_request(model).await?; + let resp: api::PostResponse = self + .remote + .post(self.url_for_path(self.cli.streams_endpoint())?) + .json(&req) + .send() + .await? + .json() + .await?; + resp.resolve("create_model") + } + + pub async fn create_single_instance( + &self, + model_id: &StreamId, + ) -> anyhow::Result { + let req = self.cli.create_single_instance_request(model_id).await?; + let resp: api::PostResponse = self + .remote + .post(self.url_for_path(self.cli.streams_endpoint())?) + .json(&req) + .send() + .await? + .json() + .await?; + resp.resolve("create_single_instance") + } + + pub async fn create_list_instance( + &self, + model_id: &StreamId, + instance: T, + ) -> anyhow::Result { + let req = self + .cli + .create_list_instance_request(model_id, instance) + .await?; + let resp: api::PostResponse = self + .remote + .post(self.url_for_path(self.cli.streams_endpoint())?) + .json(&req) + .send() + .await? + .json() + .await?; + resp.resolve("create_list_instance") + } + + pub async fn update( + &self, + stream_id: &StreamId, + patch: json_patch::Patch, + ) -> anyhow::Result { + let resp = self.get(stream_id).await?; + let req = self.cli.create_update_request(&resp, patch).await?; + let res: api::PostResponse = self + .remote + .post(self.url_for_path(self.cli.commits_endpoint())?) + .json(&req) + .send() + .await? + .json() + .await?; + res.resolve("Update failed") + } + + pub async fn get(&self, stream_id: &StreamId) -> anyhow::Result { + let base_endpoint = self.url_for_path(self.cli.streams_endpoint())?; + let endpoint = base_endpoint.join(&stream_id.to_string())?; + let resp: api::GetResponse = self.remote.get(endpoint).send().await?.json().await?; + Ok(resp) + } + + pub async fn get_as(&self, stream_id: &StreamId) -> anyhow::Result { + let resp = self.get(stream_id).await?; + let resp = serde_json::from_value(resp.state.content)?; + Ok(resp) + } + } +} + +//#[cfg(all(test, feature = "remote"))] +#[cfg(test)] +mod tests { + use super::remote::*; + use super::*; + use crate::model_definition::{GetRootSchema, ModelAccountRelation, ModelDefinition}; + use json_patch::ReplaceOperation; + use schemars::JsonSchema; + use serde::{Deserialize, Serialize}; + use std::str::FromStr; + + #[derive(Deserialize, JsonSchema, Serialize)] + #[schemars(rename_all = "camelCase", deny_unknown_fields)] + struct Ball { + creator: String, + radius: u32, + red: u32, + green: u32, + blue: u32, + } + + impl GetRootSchema for Ball {} + + fn ceramic_url() -> url::Url { + let u = + std::env::var("CERAMIC_URL").unwrap_or_else(|_| "http://localhost:7071".to_string()); + url::Url::parse(&u).unwrap() + } + + #[tokio::test] + async fn should_create_model() { + let did_str = "did:key:z6MkeqMVHDo67GE1CDMDXGvFK2eG98Ta2c2WB18m7SVXDb6f"; + let private_key = "3224d39677c03d4c3d83d6ede051db0f2c1df16f422ed509731dd6592a906d9c"; + let did = DidDocument::new(did_str); + let ceramic = CeramicRemoteHttpClient::new(did, private_key, ceramic_url()); + let model = ModelDefinition::new::("TestBall", ModelAccountRelation::List).unwrap(); + ceramic.create_model(&model).await.unwrap(); + } + + #[tokio::test] + async fn should_create_single_instance() { + let model = + StreamId::from_str("kjzl6hvfrbw6c7dye5scv2pi60rh2qh1wyweld0fhqb41vq51ac8emih9ick5tm") + .unwrap(); + let did_str = "did:key:z6MkeqMVHDo67GE1CDMDXGvFK2eG98Ta2c2WB18m7SVXDb6f"; + let private_key = "3224d39677c03d4c3d83d6ede051db0f2c1df16f422ed509731dd6592a906d9c"; + let did = DidDocument::new(did_str); + let ceramic = CeramicRemoteHttpClient::new(did, private_key, ceramic_url()); + ceramic.create_single_instance(&model).await.unwrap(); + } + + #[tokio::test] + async fn should_create_and_update_list() { + let model = + StreamId::from_str("kjzl6hvfrbw6c7dye5scv2pi60rh2qh1wyweld0fhqb41vq51ac8emih9ick5tm") + .unwrap(); + let did_str = "did:key:z6MkeqMVHDo67GE1CDMDXGvFK2eG98Ta2c2WB18m7SVXDb6f"; + let private_key = "3224d39677c03d4c3d83d6ede051db0f2c1df16f422ed509731dd6592a906d9c"; + let did = DidDocument::new(did_str); + let ceramic = CeramicRemoteHttpClient::new(did, private_key, ceramic_url()); + let stream_id = ceramic + .create_list_instance( + &model, + &Ball { + creator: did_str.to_string(), + radius: 1, + red: 2, + green: 3, + blue: 4, + }, + ) + .await + .unwrap(); + + let resp = ceramic.get(&stream_id).await.unwrap(); + assert_eq!(resp.state.metadata.model, model); + + let patch = json_patch::Patch(vec![json_patch::PatchOperation::Replace( + ReplaceOperation { + path: "/red".to_string(), + value: serde_json::json!(5), + }, + )]); + let _stream_id = ceramic.update(&stream_id, patch).await.unwrap(); + + let resp = ceramic.get(&stream_id).await.unwrap(); + assert_eq!(resp.stream_id, stream_id); + let complete: Ball = serde_json::from_value(resp.state.content).unwrap(); + assert_eq!(complete.red, 5); + } + + #[tokio::test] + async fn can_get_stream() { + let did_str = "did:key:z6MkeqMVHDo67GE1CDMDXGvFK2eG98Ta2c2WB18m7SVXDb6f"; + let did = DidDocument::new(did_str); + let private_key = "3224d39677c03d4c3d83d6ede051db0f2c1df16f422ed509731dd6592a906d9c"; + let ceramic = CeramicRemoteHttpClient::new(did, private_key, ceramic_url()); + let stream_id = + StreamId::from_str("kjzl6kcym7w8y6wplxhlco1n8au1haudfdeowzmbk0gsaf3lfjjvkxkhra9y6og") + .unwrap(); + let complete: Ball = ceramic.get_as(&stream_id).await.unwrap(); + assert_eq!(complete.green, 3); + } +} diff --git a/src/model_definition.rs b/src/model_definition.rs new file mode 100644 index 0000000..258f7d4 --- /dev/null +++ b/src/model_definition.rs @@ -0,0 +1,99 @@ +use ceramic_event::StreamId; +use schemars::schema::RootSchema; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase", tag = "type")] +pub enum ModelAccountRelation { + List, + Single, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase", tag = "type")] +pub enum ModelRelationDefinition { + Account, + Document { model: StreamId }, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase", tag = "type")] +pub enum ModelViewDefinition { + DocumentAccount, + DocumentVersion, + RelationDocument { model: StreamId, property: String }, + RelationFrom { model: StreamId, property: String }, + RelationCountFrom { model: StreamId, property: String }, +} + +#[derive(Debug, Deserialize, Serialize)] +#[repr(transparent)] +pub struct CborSchema(serde_json::Value); + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ModelDefinition { + version: &'static str, + name: String, + #[serde(skip_serializing_if = "Option::is_none")] + description: Option, + schema: CborSchema, + account_relation: ModelAccountRelation, + #[serde(default, skip_serializing_if = "HashMap::is_empty")] + relations: HashMap, + #[serde(default, skip_serializing_if = "HashMap::is_empty")] + views: HashMap, +} + +impl ModelDefinition { + pub fn new( + name: &str, + account_relation: ModelAccountRelation, + ) -> anyhow::Result { + let schema = T::root_schema(); + let schema = serde_json::to_value(&schema)?; + Ok(Self { + version: "1.0", + name: name.to_string(), + description: None, + schema: CborSchema(schema), + account_relation, + relations: HashMap::default(), + views: HashMap::default(), + }) + } + + pub fn schema(&self) -> anyhow::Result { + let s = serde_json::from_value(self.schema.0.clone())?; + Ok(s) + } + + pub fn with_description(&mut self, description: String) -> &mut Self { + self.description = Some(description); + self + } + + pub fn with_relation(&mut self, key: String, relation: ModelRelationDefinition) -> &mut Self { + self.relations.insert(key, relation); + self + } + + pub fn with_view(&mut self, key: String, view: ModelViewDefinition) -> &mut Self { + self.views.insert(key, view); + self + } +} + +pub trait GetRootSchema: JsonSchema { + fn root_schema() -> RootSchema { + let settings = schemars::gen::SchemaSettings::default().with(|s| { + s.meta_schema = Some("https://json-schema.org/draft/2020-12/schema".to_string()); + s.option_nullable = true; + s.option_add_null_type = false; + }); + let gen = settings.into_generator(); + gen.into_root_schema_for::() + } +}