Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add SHA-512 support #814

Open
wants to merge 9 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 43 additions & 18 deletions tough/src/cache.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::error::{self, Result};
use crate::fetch::{fetch_max_size, fetch_sha256};
use crate::fetch::{fetch_max_size, fetch_sha256, fetch_sha512};
use crate::schema::{RoleType, Target};
use crate::transport::IntoVec;
use crate::{encode_filename, Prefix, Repository, TargetName};
Expand Down Expand Up @@ -257,15 +257,27 @@ impl Repository {
&self,
target: &Target,
name: &TargetName,
) -> (Vec<u8>, String) {
let sha256 = &target.hashes.sha256.clone().into_vec();
) -> Result<(Vec<u8>, String)> {
let sha256 = target.hashes.sha256.as_ref().map(|d| d.clone().into_vec());
let sha512 = target.hashes.sha512.as_ref().map(|d| d.clone().into_vec());

let digest = if let Some(sha256) = sha256 {
sha256
} else if let Some(sha512) = sha512 {
sha512
} else {
return Err(error::NoValidHashSnafu {
name: format!("{name:?}"),
}
.build());
};
if self.consistent_snapshot {
(
sha256.clone(),
format!("{}.{}", hex::encode(sha256), name.resolved()),
)
Ok((
digest.clone(),
format!("{}.{}", hex::encode(digest), name.resolved()),
))
} else {
(sha256.clone(), name.resolved().to_owned())
Ok((digest, name.resolved().to_owned()))
}
}

Expand All @@ -284,15 +296,28 @@ impl Repository {
path: filename,
url: self.targets_base_url.clone(),
})?;
Ok(fetch_sha256(
self.transport.as_ref(),
url.clone(),
target.length,
"targets.json",
digest,
)
.await?
.context(error::TransportSnafu { url })
.boxed())
if target.hashes.sha256.is_some() {
Ok(fetch_sha256(
self.transport.as_ref(),
url.clone(),
target.length,
"targets.json",
digest,
)
.await?
.context(error::TransportSnafu { url })
.boxed())
} else {
Ok(fetch_sha512(
self.transport.as_ref(),
url.clone(),
target.length,
"targets.json",
digest,
)
.await?
.context(error::TransportSnafu { url })
.boxed())
}
}
}
18 changes: 12 additions & 6 deletions tough/src/editor/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use crate::schema::{
use crate::transport::{IntoVec, Transport};
use crate::{encode_filename, Limits};
use crate::{Repository, TargetName};
use aws_lc_rs::digest::{SHA256, SHA256_OUTPUT_LEN};
use aws_lc_rs::digest::{SHA256, SHA256_OUTPUT_LEN, SHA512, SHA512_OUTPUT_LEN};
use aws_lc_rs::rand::SystemRandom;
use chrono::{DateTime, Utc};
use serde_json::Value;
Expand Down Expand Up @@ -112,13 +112,17 @@ impl RepositoryEditor {
}
}

let mut digest = [0; SHA256_OUTPUT_LEN];
digest.copy_from_slice(aws_lc_rs::digest::digest(&SHA256, &root_buf).as_ref());
let mut sha256_digest = [0; SHA256_OUTPUT_LEN];
sha256_digest.copy_from_slice(aws_lc_rs::digest::digest(&SHA256, &root_buf).as_ref());

let mut sha512_digest = [0; SHA512_OUTPUT_LEN];
sha512_digest.copy_from_slice(aws_lc_rs::digest::digest(&SHA512, &root_buf).as_ref());

let signed_root = SignedRole {
signed: root,
buffer: root_buf,
sha256: digest,
sha256: sha256_digest,
sha512: sha512_digest,
length: root_buf_len,
};

Expand Down Expand Up @@ -708,7 +712,8 @@ impl RepositoryEditor {
{
Metafile {
hashes: Some(Hashes {
sha256: role.sha256.to_vec().into(),
sha256: Some(role.sha256.to_vec().into()),
sha512: Some(role.sha512.to_vec().into()),
_extra: HashMap::new(),
}),
length: Some(role.length),
Expand Down Expand Up @@ -746,7 +751,8 @@ impl RepositoryEditor {
{
Metafile {
hashes: Some(Hashes {
sha256: role.sha256.to_vec().into(),
sha256: Some(role.sha256.to_vec().into()),
sha512: Some(role.sha512.to_vec().into()),
_extra: HashMap::new(),
}),
length: Some(role.length),
Expand Down
154 changes: 130 additions & 24 deletions tough/src/editor/signed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use crate::schema::{
Targets, Timestamp,
};
use async_trait::async_trait;
use aws_lc_rs::digest::{digest, SHA256, SHA256_OUTPUT_LEN};
use aws_lc_rs::digest::{digest, SHA256, SHA256_OUTPUT_LEN, SHA512, SHA512_OUTPUT_LEN};
use aws_lc_rs::rand::SecureRandom;
use futures::TryStreamExt;
use olpc_cjson::CanonicalFormatter;
Expand Down Expand Up @@ -48,6 +48,7 @@ pub struct SignedRole<T> {
pub(crate) signed: Signed<T>,
pub(crate) buffer: Vec<u8>,
pub(crate) sha256: [u8; SHA256_OUTPUT_LEN],
pub(crate) sha512: [u8; SHA512_OUTPUT_LEN],
pub(crate) length: u64,
}

Expand Down Expand Up @@ -123,12 +124,17 @@ where
let mut sha256 = [0; SHA256_OUTPUT_LEN];
sha256.copy_from_slice(digest(&SHA256, &buffer).as_ref());

// Calculate SHA-512
let mut sha512 = [0; SHA512_OUTPUT_LEN];
sha512.copy_from_slice(digest(&SHA512, &buffer).as_ref());

// Create the `SignedRole` containing, the `Signed<role>`, serialized
// buffer, length and sha256.
let signed_role = SignedRole {
signed: role,
buffer,
sha256,
sha512,
length,
};

Expand All @@ -146,9 +152,22 @@ where
&self.buffer
}

/// Provides the sha256 digest of the signed role.
pub fn sha256(&self) -> &[u8] {
&self.sha256
/// Provides the sha256 digest of the signed role, if available.
pub fn sha256(&self) -> Option<&[u8]> {
if self.sha256.iter().any(|&byte| byte != 0) {
Some(&self.sha256)
} else {
None
}
}

/// Provides the sha512 digest of the signed role, if available.
pub fn sha512(&self) -> Option<&[u8]> {
if self.sha512.iter().any(|&byte| byte != 0) {
Some(&self.sha512)
} else {
None
}
}

/// Provides the length in bytes of the serialized representation of the signed role.
Expand Down Expand Up @@ -762,18 +781,57 @@ trait TargetsWalker {
// should match, or we alert the caller; if target replacement is intended, it should
// happen earlier, in RepositoryEditor.
ensure!(
target_from_path.hashes.sha256 == repo_target.hashes.sha256,
target_from_path.hashes.sha256 == repo_target.hashes.sha256
|| target_from_path.hashes.sha512 == repo_target.hashes.sha512,
error::HashMismatchSnafu {
context: "target",
calculated: hex::encode(target_from_path.hashes.sha256),
expected: hex::encode(&repo_target.hashes.sha256),
calculated: format!(
"SHA-256: {}, SHA-512: {}",
hex::encode(
target_from_path
.hashes
.sha256
.as_ref()
.map_or(&[] as &[u8], |d| d.as_ref())
),
hex::encode(
target_from_path
.hashes
.sha512
.as_ref()
.map_or(&[] as &[u8], |d| d.as_ref())
)
),
expected: format!(
"SHA-256: {}, SHA-512: {}",
hex::encode(
repo_target
.hashes
.sha256
.as_ref()
.map_or(&[] as &[u8], |d| d.as_ref())
),
hex::encode(
repo_target
.hashes
.sha512
.as_ref()
.map_or(&[] as &[u8], |d| d.as_ref())
)
),
}
);

let dest = if self.consistent_snapshot() {
outdir.join(format!(
"{}.{}",
hex::encode(&target_from_path.hashes.sha256),
hex::encode(
target_from_path
.hashes
.sha256
.or(target_from_path.hashes.sha512)
.unwrap()
),
target_name.resolved()
))
} else {
Expand All @@ -790,22 +848,8 @@ trait TargetsWalker {
// unique; if we're not, then there could be a target from another repo with the same name
// but different checksum. We can't assume such conflicts are OK, so we fail.
if !self.consistent_snapshot() {
let url = Url::from_file_path(&dest)
.ok() // dump unhelpful `()` error
.context(error::FileUrlSnafu { path: &dest })?;

let stream = FilesystemTransport
.fetch(url.clone())
.await
.with_context(|_| error::TransportSnafu { url: url.clone() })?;
let stream = DigestAdapter::sha256(stream, &repo_target.hashes.sha256, url.clone());

// The act of reading with the DigestAdapter verifies the checksum, assuming the read
// succeeds.
stream
.try_for_each(|_| ready(Ok(())))
.await
.context(error::TransportSnafu { url })?;
self.verify_existing_target(dest.clone(), repo_target)
.await?;
}

let metadata = symlink_metadata(&dest)
Expand All @@ -819,4 +863,66 @@ trait TargetsWalker {
error::InvalidFileTypeSnafu { path: dest }.fail()
}
}

// Helper function: used in target_path function
async fn verify_existing_target(&self, dest: PathBuf, repo_target: &&Target) -> Result<()> {
let url = Url::from_file_path(&dest)
.ok()
.context(error::FileUrlSnafu { path: &dest })?;

let stream = FilesystemTransport
.fetch(url.clone())
.await
.with_context(|_| error::TransportSnafu { url: url.clone() })?;

let sha256_verified = if let Some(sha256) = &repo_target.hashes.sha256 {
let sha256_stream = DigestAdapter::sha256(stream, sha256, url.clone());
sha256_stream.try_for_each(|_| ready(Ok(()))).await.is_ok()
} else {
false
};

let sha512_verified = if sha256_verified {
true
} else {
let stream = FilesystemTransport
.fetch(url.clone())
.await
.with_context(|_| error::TransportSnafu { url: url.clone() })?;

if let Some(sha512) = &repo_target.hashes.sha512 {
let sha512_stream = DigestAdapter::sha512(stream, sha512, url.clone());
sha512_stream.try_for_each(|_| ready(Ok(()))).await.is_ok()
} else {
false
}
};

if !sha256_verified && !sha512_verified {
error::HashMismatchSnafu {
context: "target",
calculated: format!(
"SHA-256: {}, SHA-512: {}",
hex::encode(
repo_target
.hashes
.sha256
.as_ref()
.map_or(&[] as &[u8], |d| d.as_ref())
),
hex::encode(
repo_target
.hashes
.sha512
.as_ref()
.map_or(&[] as &[u8], |d| d.as_ref())
)
),
expected: "None".to_string(),
}
.fail()
} else {
Ok(())
}
}
}
3 changes: 3 additions & 0 deletions tough/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,9 @@ pub enum Error {
#[snafu(display("Source path for target must be file or symlink - '{}'", path.display()))]
InvalidFileType { path: PathBuf, backtrace: Backtrace },

#[snafu(display("No valid hash was found for target '{:?}'", name))]
NoValidHash { name: String, backtrace: Backtrace },

#[snafu(display("Encountered an invalid target name: {}", inner))]
InvalidTargetName { inner: String, backtrace: Backtrace },

Expand Down
11 changes: 11 additions & 0 deletions tough/src/fetch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,14 @@ pub(crate) async fn fetch_sha256(
let stream = fetch_max_size(transport, url.clone(), size, specifier).await?;
Ok(DigestAdapter::sha256(stream, sha256, url))
}

pub(crate) async fn fetch_sha512(
transport: &dyn Transport,
url: Url,
size: u64,
specifier: &'static str,
sha512: &[u8],
) -> Result<TransportStream> {
let stream = fetch_max_size(transport, url.clone(), size, specifier).await?;
Ok(DigestAdapter::sha512(stream, sha512, url))
}
11 changes: 10 additions & 1 deletion tough/src/io.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// SPDX-License-Identifier: MIT OR Apache-2.0

use crate::{error, transport::TransportStream, TransportError};
use aws_lc_rs::digest::{Context, SHA256};
use aws_lc_rs::digest::{Context, SHA256, SHA512};
use futures::StreamExt;
use futures_core::Stream;
use std::{convert::TryInto, path::Path, task::Poll};
Expand All @@ -26,6 +26,15 @@ impl DigestAdapter {
}
.boxed()
}
pub(crate) fn sha512(stream: TransportStream, hash: &[u8], url: Url) -> TransportStream {
Self {
url,
stream,
hash: hash.to_owned(),
digest: Context::new(&SHA512),
}
.boxed()
}
}

impl Stream for DigestAdapter {
Expand Down
Loading
Loading