From ffaf458a5418229ffe807ed2ddd1d53327fca8dd Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Fri, 7 Jun 2024 14:14:58 -0400 Subject: [PATCH] ref(api): Move chunk data types to submodule - Create a new `data_types` submodule under the `api` module. - Crate a `chunking` submodule under the `data_types` module. - Move all data types related to chunking from `src/api/mod.rs` to the `chunking` submodule, splitting their definitions further into submodules under `chunking`. Ref #2077 --- src/api/data_types/chunking/artifact.rs | 24 ++ src/api/data_types/chunking/compression.rs | 48 ++++ src/api/data_types/chunking/dif.rs | 30 +++ src/api/data_types/chunking/file_state.rs | 29 +++ src/api/data_types/chunking/hash_algorithm.rs | 7 + src/api/data_types/chunking/mod.rs | 16 ++ .../data_types/chunking/upload/capability.rs | 55 +++++ src/api/data_types/chunking/upload/mod.rs | 7 + src/api/data_types/chunking/upload/options.rs | 35 +++ src/api/data_types/mod.rs | 3 + src/api/mod.rs | 210 +----------------- 11 files changed, 258 insertions(+), 206 deletions(-) create mode 100644 src/api/data_types/chunking/artifact.rs create mode 100644 src/api/data_types/chunking/compression.rs create mode 100644 src/api/data_types/chunking/dif.rs create mode 100644 src/api/data_types/chunking/file_state.rs create mode 100644 src/api/data_types/chunking/hash_algorithm.rs create mode 100644 src/api/data_types/chunking/mod.rs create mode 100644 src/api/data_types/chunking/upload/capability.rs create mode 100644 src/api/data_types/chunking/upload/mod.rs create mode 100644 src/api/data_types/chunking/upload/options.rs create mode 100644 src/api/data_types/mod.rs diff --git a/src/api/data_types/chunking/artifact.rs b/src/api/data_types/chunking/artifact.rs new file mode 100644 index 0000000000..da617cec56 --- /dev/null +++ b/src/api/data_types/chunking/artifact.rs @@ -0,0 +1,24 @@ +use serde::{Deserialize, Serialize}; +use sha1_smol::Digest; + +use super::ChunkedFileState; + +#[derive(Debug, Serialize)] +pub struct ChunkedArtifactRequest<'a> { + pub checksum: Digest, + pub chunks: &'a [Digest], + #[serde(skip_serializing_if = "Vec::is_empty")] + pub projects: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub version: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + pub dist: Option<&'a str>, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AssembleArtifactsResponse { + pub state: ChunkedFileState, + pub missing_chunks: Vec, + pub detail: Option, +} diff --git a/src/api/data_types/chunking/compression.rs b/src/api/data_types/chunking/compression.rs new file mode 100644 index 0000000000..7bb1a1eadb --- /dev/null +++ b/src/api/data_types/chunking/compression.rs @@ -0,0 +1,48 @@ +use std::fmt; + +use serde::{Deserialize, Deserializer}; + +#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Default)] +pub enum ChunkCompression { + /// No compression should be applied + #[default] + Uncompressed = 0, + /// GZIP compression (including header) + Gzip = 10, + /// Brotli compression + Brotli = 20, +} + +impl ChunkCompression { + pub(in crate::api) fn field_name(self) -> &'static str { + match self { + ChunkCompression::Uncompressed => "file", + ChunkCompression::Gzip => "file_gzip", + ChunkCompression::Brotli => "file_brotli", + } + } +} + +impl fmt::Display for ChunkCompression { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + ChunkCompression::Uncompressed => write!(f, "uncompressed"), + ChunkCompression::Gzip => write!(f, "gzip"), + ChunkCompression::Brotli => write!(f, "brotli"), + } + } +} + +impl<'de> Deserialize<'de> for ChunkCompression { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(match String::deserialize(deserializer)?.as_str() { + "gzip" => ChunkCompression::Gzip, + "brotli" => ChunkCompression::Brotli, + // We do not know this compression, so we assume no compression + _ => ChunkCompression::Uncompressed, + }) + } +} diff --git a/src/api/data_types/chunking/dif.rs b/src/api/data_types/chunking/dif.rs new file mode 100644 index 0000000000..0cdcdd6551 --- /dev/null +++ b/src/api/data_types/chunking/dif.rs @@ -0,0 +1,30 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; +use sha1_smol::Digest; +use symbolic::common::DebugId; + +use crate::api::DebugInfoFile; + +use super::ChunkedFileState; + +#[derive(Debug, Serialize)] +pub struct ChunkedDifRequest<'a> { + pub name: &'a str, + #[serde(skip_serializing_if = "Option::is_none")] + pub debug_id: Option, + pub chunks: &'a [Digest], +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ChunkedDifResponse { + // TODO: Should this be public? + pub state: ChunkedFileState, + pub missing_chunks: Vec, + pub detail: Option, + pub dif: Option, +} + +pub type AssembleDifsRequest<'a> = HashMap>; +pub type AssembleDifsResponse = HashMap; diff --git a/src/api/data_types/chunking/file_state.rs b/src/api/data_types/chunking/file_state.rs new file mode 100644 index 0000000000..d39a04854c --- /dev/null +++ b/src/api/data_types/chunking/file_state.rs @@ -0,0 +1,29 @@ +use serde::Deserialize; + +#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub enum ChunkedFileState { + #[serde(rename = "error")] + Error, + #[serde(rename = "not_found")] + NotFound, + #[serde(rename = "created")] + Created, + #[serde(rename = "assembling")] + Assembling, + #[serde(rename = "ok")] + Ok, +} + +impl ChunkedFileState { + pub fn is_finished(self) -> bool { + self == ChunkedFileState::Error || self == ChunkedFileState::Ok + } + + pub fn is_pending(self) -> bool { + !self.is_finished() + } + + pub fn is_err(self) -> bool { + self == ChunkedFileState::Error || self == ChunkedFileState::NotFound + } +} diff --git a/src/api/data_types/chunking/hash_algorithm.rs b/src/api/data_types/chunking/hash_algorithm.rs new file mode 100644 index 0000000000..fbabcb4d35 --- /dev/null +++ b/src/api/data_types/chunking/hash_algorithm.rs @@ -0,0 +1,7 @@ +use serde::Deserialize; + +#[derive(Debug, Deserialize, Clone, Copy, Eq, PartialEq, Ord, PartialOrd)] +pub enum ChunkHashAlgorithm { + #[serde(rename = "sha1")] + Sha1, +} diff --git a/src/api/data_types/chunking/mod.rs b/src/api/data_types/chunking/mod.rs new file mode 100644 index 0000000000..e9e6d2fdd7 --- /dev/null +++ b/src/api/data_types/chunking/mod.rs @@ -0,0 +1,16 @@ +//! Data types used in the API for sending and receiving data +//! from the server. + +mod artifact; +mod compression; +mod dif; +mod file_state; +mod hash_algorithm; +mod upload; + +pub use self::artifact::{AssembleArtifactsResponse, ChunkedArtifactRequest}; +pub use self::compression::ChunkCompression; +pub use self::dif::{AssembleDifsRequest, AssembleDifsResponse, ChunkedDifRequest}; +pub use self::file_state::ChunkedFileState; +pub use self::hash_algorithm::ChunkHashAlgorithm; +pub use self::upload::{ChunkUploadCapability, ChunkUploadOptions}; diff --git a/src/api/data_types/chunking/upload/capability.rs b/src/api/data_types/chunking/upload/capability.rs new file mode 100644 index 0000000000..fe98123f9e --- /dev/null +++ b/src/api/data_types/chunking/upload/capability.rs @@ -0,0 +1,55 @@ +use serde::{Deserialize, Deserializer}; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum ChunkUploadCapability { + /// Chunked upload of debug files + DebugFiles, + + /// Chunked upload of release files + ReleaseFiles, + + /// Chunked upload of standalone artifact bundles + ArtifactBundles, + + /// Like `ArtifactBundles`, but with deduplicated chunk + /// upload. + ArtifactBundlesV2, + + /// Upload of PDBs and debug id overrides + Pdbs, + + /// Upload of Portable PDBs + PortablePdbs, + + /// Uploads of source archives + Sources, + + /// Upload of BCSymbolMap and PList auxiliary DIFs + BcSymbolmap, + + /// Upload of il2cpp line mappings + Il2Cpp, + + /// Any other unsupported capability (ignored) + Unknown, +} + +impl<'de> Deserialize<'de> for ChunkUploadCapability { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(match String::deserialize(deserializer)?.as_str() { + "debug_files" => ChunkUploadCapability::DebugFiles, + "release_files" => ChunkUploadCapability::ReleaseFiles, + "artifact_bundles" => ChunkUploadCapability::ArtifactBundles, + "artifact_bundles_v2" => ChunkUploadCapability::ArtifactBundlesV2, + "pdbs" => ChunkUploadCapability::Pdbs, + "portablepdbs" => ChunkUploadCapability::PortablePdbs, + "sources" => ChunkUploadCapability::Sources, + "bcsymbolmaps" => ChunkUploadCapability::BcSymbolmap, + "il2cpp" => ChunkUploadCapability::Il2Cpp, + _ => ChunkUploadCapability::Unknown, + }) + } +} diff --git a/src/api/data_types/chunking/upload/mod.rs b/src/api/data_types/chunking/upload/mod.rs new file mode 100644 index 0000000000..65b897ec9d --- /dev/null +++ b/src/api/data_types/chunking/upload/mod.rs @@ -0,0 +1,7 @@ +use super::*; + +mod capability; +mod options; + +pub use self::capability::ChunkUploadCapability; +pub use self::options::ChunkUploadOptions; diff --git a/src/api/data_types/chunking/upload/options.rs b/src/api/data_types/chunking/upload/options.rs new file mode 100644 index 0000000000..5512318e60 --- /dev/null +++ b/src/api/data_types/chunking/upload/options.rs @@ -0,0 +1,35 @@ +use serde::Deserialize; + +use super::{ChunkCompression, ChunkHashAlgorithm, ChunkUploadCapability}; + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ChunkUploadOptions { + pub url: String, + #[serde(rename = "chunksPerRequest")] + pub max_chunks: u64, + #[serde(rename = "maxRequestSize")] + pub max_size: u64, + #[serde(default)] + pub max_file_size: u64, + #[serde(default)] + pub max_wait: u64, + pub hash_algorithm: ChunkHashAlgorithm, + pub chunk_size: u64, + pub concurrency: u8, + #[serde(default)] + pub compression: Vec, + #[serde(default = "default_chunk_upload_accept")] + pub accept: Vec, +} + +impl ChunkUploadOptions { + /// Returns whether the given capability is accepted by the chunk upload endpoint. + pub fn supports(&self, capability: ChunkUploadCapability) -> bool { + self.accept.contains(&capability) + } +} + +fn default_chunk_upload_accept() -> Vec { + vec![ChunkUploadCapability::DebugFiles] +} diff --git a/src/api/data_types/mod.rs b/src/api/data_types/mod.rs new file mode 100644 index 0000000000..3d0af372bd --- /dev/null +++ b/src/api/data_types/mod.rs @@ -0,0 +1,3 @@ +mod chunking; + +pub use self::chunking::*; diff --git a/src/api/mod.rs b/src/api/mod.rs index 8e2e39a2ca..19ff84e7b5 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -6,6 +6,7 @@ pub mod envelopes_api; mod connection_manager; +mod data_types; mod encoding; mod errors; mod pagination; @@ -35,7 +36,7 @@ use log::{debug, info, warn}; use parking_lot::Mutex; use regex::{Captures, Regex}; use sentry::protocol::{Exception, Values}; -use serde::de::{DeserializeOwned, Deserializer}; +use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use sha1_smol::Digest; use symbolic::common::DebugId; @@ -58,6 +59,8 @@ use connection_manager::CurlConnectionManager; use encoding::{PathArg, QueryArg}; use errors::{ApiError, ApiErrorKind, ApiResult, SentryError}; +pub use self::data_types::*; + lazy_static! { static ref API: Mutex>> = Mutex::new(None); } @@ -2381,211 +2384,6 @@ impl Deploy { } } -#[derive(Debug, Deserialize, Clone, Copy, Eq, PartialEq, Ord, PartialOrd)] -pub enum ChunkHashAlgorithm { - #[serde(rename = "sha1")] - Sha1, -} - -#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Default)] -pub enum ChunkCompression { - /// No compression should be applied - #[default] - Uncompressed = 0, - /// GZIP compression (including header) - Gzip = 10, - /// Brotli compression - Brotli = 20, -} - -impl ChunkCompression { - fn field_name(self) -> &'static str { - match self { - ChunkCompression::Uncompressed => "file", - ChunkCompression::Gzip => "file_gzip", - ChunkCompression::Brotli => "file_brotli", - } - } -} - -impl fmt::Display for ChunkCompression { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - ChunkCompression::Uncompressed => write!(f, "uncompressed"), - ChunkCompression::Gzip => write!(f, "gzip"), - ChunkCompression::Brotli => write!(f, "brotli"), - } - } -} - -impl<'de> Deserialize<'de> for ChunkCompression { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Ok(match String::deserialize(deserializer)?.as_str() { - "gzip" => ChunkCompression::Gzip, - "brotli" => ChunkCompression::Brotli, - // We do not know this compression, so we assume no compression - _ => ChunkCompression::Uncompressed, - }) - } -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub enum ChunkUploadCapability { - /// Chunked upload of debug files - DebugFiles, - - /// Chunked upload of release files - ReleaseFiles, - - /// Chunked upload of standalone artifact bundles - ArtifactBundles, - - /// Like `ArtifactBundles`, but with deduplicated chunk - /// upload. - ArtifactBundlesV2, - - /// Upload of PDBs and debug id overrides - Pdbs, - - /// Upload of Portable PDBs - PortablePdbs, - - /// Uploads of source archives - Sources, - - /// Upload of BCSymbolMap and PList auxiliary DIFs - BcSymbolmap, - - /// Upload of il2cpp line mappings - Il2Cpp, - - /// Any other unsupported capability (ignored) - Unknown, -} - -impl<'de> Deserialize<'de> for ChunkUploadCapability { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Ok(match String::deserialize(deserializer)?.as_str() { - "debug_files" => ChunkUploadCapability::DebugFiles, - "release_files" => ChunkUploadCapability::ReleaseFiles, - "artifact_bundles" => ChunkUploadCapability::ArtifactBundles, - "artifact_bundles_v2" => ChunkUploadCapability::ArtifactBundlesV2, - "pdbs" => ChunkUploadCapability::Pdbs, - "portablepdbs" => ChunkUploadCapability::PortablePdbs, - "sources" => ChunkUploadCapability::Sources, - "bcsymbolmaps" => ChunkUploadCapability::BcSymbolmap, - "il2cpp" => ChunkUploadCapability::Il2Cpp, - _ => ChunkUploadCapability::Unknown, - }) - } -} - -fn default_chunk_upload_accept() -> Vec { - vec![ChunkUploadCapability::DebugFiles] -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ChunkUploadOptions { - pub url: String, - #[serde(rename = "chunksPerRequest")] - pub max_chunks: u64, - #[serde(rename = "maxRequestSize")] - pub max_size: u64, - #[serde(default)] - pub max_file_size: u64, - #[serde(default)] - pub max_wait: u64, - pub hash_algorithm: ChunkHashAlgorithm, - pub chunk_size: u64, - pub concurrency: u8, - #[serde(default)] - pub compression: Vec, - #[serde(default = "default_chunk_upload_accept")] - pub accept: Vec, -} - -impl ChunkUploadOptions { - /// Returns whether the given capability is accepted by the chunk upload endpoint. - pub fn supports(&self, capability: ChunkUploadCapability) -> bool { - self.accept.contains(&capability) - } -} - -#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub enum ChunkedFileState { - #[serde(rename = "error")] - Error, - #[serde(rename = "not_found")] - NotFound, - #[serde(rename = "created")] - Created, - #[serde(rename = "assembling")] - Assembling, - #[serde(rename = "ok")] - Ok, -} - -impl ChunkedFileState { - pub fn is_finished(self) -> bool { - self == ChunkedFileState::Error || self == ChunkedFileState::Ok - } - - pub fn is_pending(self) -> bool { - !self.is_finished() - } - - pub fn is_err(self) -> bool { - self == ChunkedFileState::Error || self == ChunkedFileState::NotFound - } -} - -#[derive(Debug, Serialize)] -pub struct ChunkedDifRequest<'a> { - pub name: &'a str, - #[serde(skip_serializing_if = "Option::is_none")] - pub debug_id: Option, - pub chunks: &'a [Digest], -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ChunkedDifResponse { - pub state: ChunkedFileState, - pub missing_chunks: Vec, - pub detail: Option, - pub dif: Option, -} - -pub type AssembleDifsRequest<'a> = HashMap>; -pub type AssembleDifsResponse = HashMap; - -#[derive(Debug, Serialize)] -pub struct ChunkedArtifactRequest<'a> { - pub checksum: Digest, - pub chunks: &'a [Digest], - #[serde(skip_serializing_if = "Vec::is_empty")] - pub projects: Vec, - #[serde(skip_serializing_if = "Option::is_none")] - pub version: Option<&'a str>, - #[serde(skip_serializing_if = "Option::is_none")] - pub dist: Option<&'a str>, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct AssembleArtifactsResponse { - pub state: ChunkedFileState, - pub missing_chunks: Vec, - pub detail: Option, -} - #[derive(Debug, Serialize, Clone)] pub struct PatchSet { pub path: String,