Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ref(api): Move chunk data types to submodule #2086

Merged
merged 1 commit into from
Jun 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions src/api/data_types/chunking/artifact.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
use serde::{Deserialize, Serialize};
use sha1_smol::Digest;

use super::ChunkedFileState;

#[derive(Debug, Serialize)]
pub struct ChunkedArtifactRequest<'a> {
pub checksum: Digest,
pub chunks: &'a [Digest],
#[serde(skip_serializing_if = "Vec::is_empty")]
pub projects: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub dist: Option<&'a str>,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AssembleArtifactsResponse {
pub state: ChunkedFileState,
pub missing_chunks: Vec<Digest>,
pub detail: Option<String>,
}
48 changes: 48 additions & 0 deletions src/api/data_types/chunking/compression.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
use std::fmt;

use serde::{Deserialize, Deserializer};

#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Default)]
pub enum ChunkCompression {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

driveby question: how much effort would it be to add zstd compression to the whole workflow?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not sure – have not worked too extensively with compression in the CLI yet.

What would be the benefit of adding zstd? Do we have any docs written about it?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no, we don’t have any docs on that.
and zstd is pretty much the best choice if you want to trade compression ratio for resource usage.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok, we should probably investigate how much effort this would be, will open an issue

/// No compression should be applied
#[default]
Uncompressed = 0,
/// GZIP compression (including header)
Gzip = 10,
/// Brotli compression
Brotli = 20,
}

impl ChunkCompression {
pub(in crate::api) fn field_name(self) -> &'static str {
match self {
ChunkCompression::Uncompressed => "file",
ChunkCompression::Gzip => "file_gzip",
ChunkCompression::Brotli => "file_brotli",
}
}
}

impl fmt::Display for ChunkCompression {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
ChunkCompression::Uncompressed => write!(f, "uncompressed"),
ChunkCompression::Gzip => write!(f, "gzip"),
ChunkCompression::Brotli => write!(f, "brotli"),
}
}
}

impl<'de> Deserialize<'de> for ChunkCompression {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(match String::deserialize(deserializer)?.as_str() {
"gzip" => ChunkCompression::Gzip,
"brotli" => ChunkCompression::Brotli,
// We do not know this compression, so we assume no compression
_ => ChunkCompression::Uncompressed,
})
}
}
29 changes: 29 additions & 0 deletions src/api/data_types/chunking/dif.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
use std::collections::HashMap;

use serde::{Deserialize, Serialize};
use sha1_smol::Digest;
use symbolic::common::DebugId;

use crate::api::DebugInfoFile;

use super::ChunkedFileState;

#[derive(Debug, Serialize)]
pub struct ChunkedDifRequest<'a> {
pub name: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
pub debug_id: Option<DebugId>,
pub chunks: &'a [Digest],
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ChunkedDifResponse {
pub state: ChunkedFileState,
pub missing_chunks: Vec<Digest>,
pub detail: Option<String>,
pub dif: Option<DebugInfoFile>,
}

pub type AssembleDifsRequest<'a> = HashMap<Digest, ChunkedDifRequest<'a>>;
pub type AssembleDifsResponse = HashMap<Digest, ChunkedDifResponse>;
29 changes: 29 additions & 0 deletions src/api/data_types/chunking/file_state.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
use serde::Deserialize;

#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub enum ChunkedFileState {
#[serde(rename = "error")]
Error,
#[serde(rename = "not_found")]
NotFound,
#[serde(rename = "created")]
Created,
#[serde(rename = "assembling")]
Assembling,
#[serde(rename = "ok")]
Ok,
}

impl ChunkedFileState {
pub fn is_finished(self) -> bool {
self == ChunkedFileState::Error || self == ChunkedFileState::Ok
}

pub fn is_pending(self) -> bool {
!self.is_finished()
}

pub fn is_err(self) -> bool {
self == ChunkedFileState::Error || self == ChunkedFileState::NotFound
}
}
7 changes: 7 additions & 0 deletions src/api/data_types/chunking/hash_algorithm.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
use serde::Deserialize;

#[derive(Debug, Deserialize, Clone, Copy, Eq, PartialEq, Ord, PartialOrd)]
pub enum ChunkHashAlgorithm {
#[serde(rename = "sha1")]
Sha1,
}
16 changes: 16 additions & 0 deletions src/api/data_types/chunking/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
//! Data types used in the API for sending and receiving data
//! from the server.

mod artifact;
mod compression;
mod dif;
mod file_state;
mod hash_algorithm;
mod upload;

pub use self::artifact::{AssembleArtifactsResponse, ChunkedArtifactRequest};
pub use self::compression::ChunkCompression;
pub use self::dif::{AssembleDifsRequest, AssembleDifsResponse, ChunkedDifRequest};
pub use self::file_state::ChunkedFileState;
pub use self::hash_algorithm::ChunkHashAlgorithm;
pub use self::upload::{ChunkUploadCapability, ChunkUploadOptions};
55 changes: 55 additions & 0 deletions src/api/data_types/chunking/upload/capability.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
use serde::{Deserialize, Deserializer};

#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ChunkUploadCapability {
/// Chunked upload of debug files
DebugFiles,

/// Chunked upload of release files
ReleaseFiles,

/// Chunked upload of standalone artifact bundles
ArtifactBundles,

/// Like `ArtifactBundles`, but with deduplicated chunk
/// upload.
ArtifactBundlesV2,

/// Upload of PDBs and debug id overrides
Pdbs,

/// Upload of Portable PDBs
PortablePdbs,

/// Uploads of source archives
Sources,

/// Upload of BCSymbolMap and PList auxiliary DIFs
BcSymbolmap,

/// Upload of il2cpp line mappings
Il2Cpp,

/// Any other unsupported capability (ignored)
Unknown,
}

impl<'de> Deserialize<'de> for ChunkUploadCapability {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(match String::deserialize(deserializer)?.as_str() {
"debug_files" => ChunkUploadCapability::DebugFiles,
"release_files" => ChunkUploadCapability::ReleaseFiles,
"artifact_bundles" => ChunkUploadCapability::ArtifactBundles,
"artifact_bundles_v2" => ChunkUploadCapability::ArtifactBundlesV2,
"pdbs" => ChunkUploadCapability::Pdbs,
"portablepdbs" => ChunkUploadCapability::PortablePdbs,
"sources" => ChunkUploadCapability::Sources,
"bcsymbolmaps" => ChunkUploadCapability::BcSymbolmap,
"il2cpp" => ChunkUploadCapability::Il2Cpp,
_ => ChunkUploadCapability::Unknown,
})
}
}
7 changes: 7 additions & 0 deletions src/api/data_types/chunking/upload/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
use super::*;

mod capability;
mod options;

pub use self::capability::ChunkUploadCapability;
pub use self::options::ChunkUploadOptions;
35 changes: 35 additions & 0 deletions src/api/data_types/chunking/upload/options.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
use serde::Deserialize;

use super::{ChunkCompression, ChunkHashAlgorithm, ChunkUploadCapability};

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ChunkUploadOptions {
pub url: String,
#[serde(rename = "chunksPerRequest")]
pub max_chunks: u64,
#[serde(rename = "maxRequestSize")]
pub max_size: u64,
#[serde(default)]
pub max_file_size: u64,
#[serde(default)]
pub max_wait: u64,
pub hash_algorithm: ChunkHashAlgorithm,
pub chunk_size: u64,
pub concurrency: u8,
#[serde(default)]
pub compression: Vec<ChunkCompression>,
#[serde(default = "default_chunk_upload_accept")]
pub accept: Vec<ChunkUploadCapability>,
}

impl ChunkUploadOptions {
/// Returns whether the given capability is accepted by the chunk upload endpoint.
pub fn supports(&self, capability: ChunkUploadCapability) -> bool {
self.accept.contains(&capability)
}
}

fn default_chunk_upload_accept() -> Vec<ChunkUploadCapability> {
vec![ChunkUploadCapability::DebugFiles]
}
3 changes: 3 additions & 0 deletions src/api/data_types/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
mod chunking;

pub use self::chunking::*;
Loading
Loading