Skip to content

Commit

Permalink
Merge pull request #19 from rustaceanrob/better-err
Browse files Browse the repository at this point in the history
refact(meta): standardize errors
  • Loading branch information
rustaceanrob authored Jun 25, 2024
2 parents 35d11f0 + e051c5a commit 9a54cd2
Show file tree
Hide file tree
Showing 14 changed files with 142 additions and 137 deletions.
2 changes: 1 addition & 1 deletion src/chain/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl Chain {
let mut loaded_headers = db
.load(anchor.height)
.await
.map_err(|_| HeaderPersistenceError::SQLite)?;
.map_err(HeaderPersistenceError::Database)?;
if loaded_headers.len().gt(&0) {
if loaded_headers
.values()
Expand Down
42 changes: 23 additions & 19 deletions src/chain/error.rs
Original file line number Diff line number Diff line change
@@ -1,43 +1,47 @@
use thiserror::Error;

use crate::db::error::DatabaseError;

#[derive(Error, Debug, PartialEq)]
pub enum HeaderSyncError {
#[error("empty headers message")]
pub(crate) enum HeaderSyncError {
#[error("Empty headers message.")]
EmptyMessage,
#[error("the headers received do not connect")]
#[error("The headers received do not connect.")]
HeadersNotConnected,
#[error("one or more headers does not match its own PoW target")]
#[error("One or more headers does not match its own PoW target.")]
InvalidHeaderWork,
#[error("one or more headers does not have a valid block time")]
#[error("One or more headers does not have a valid block time.")]
InvalidHeaderTimes,
#[error("the sync peer sent us a discontinuous chain")]
#[error("The sync peer sent us a discontinuous chain.")]
PreCheckpointFork,
#[error("a checkpoint in the chain did not match")]
#[error("A checkpoint in the chain did not match.")]
InvalidCheckpoint,
#[error("a computed difficulty adjustment did not match")]
#[error("A computed difficulty adjustment did not match.")]
MiscalculatedDifficulty,
#[error("the peer sent us a chain that does not connect to any header of ours")]
#[error("The peer sent us a chain that does not connect to any header of ours.")]
FloatingHeaders,
#[error("less work fork")]
#[error("A peer sent us a fork with less work than our chain.")]
LessWorkFork,
#[error("the database could not load a fork")]
#[error("The database could not load a fork.")]
DbError,
}

/// Errors with the block header representation that prevent the node from operating.
#[derive(Error, Debug)]
pub enum HeaderPersistenceError {
#[error("the headers loaded from the persistence layer do not match the network")]
GenesisMismatch,
#[error("the headers loaded from persistence do not link together")]
/// The block headers do not point to each other in a list.
#[error("The headers loaded from persistence do not link together.")]
HeadersDoNotLink,
#[error("the headers loaded do not match a known checkpoint")]
/// Some predefined checkpoint does not match.
#[error("The headers loaded do not match a known checkpoint.")]
MismatchedCheckpoints,
#[error("the headers could not be loaded from sqlite")]
SQLite,
/// A database error.
#[error("The headers could not be loaded from sqlite.")]
Database(DatabaseError),
}

#[derive(Error, Debug)]
pub enum BlockScanError {
#[error("unknown block hash")]
pub(crate) enum BlockScanError {
#[error("The block sent to us does not have a known hash.")]
NoBlockHash,
}
3 changes: 2 additions & 1 deletion src/chain/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ pub(crate) mod block_queue;
pub(crate) mod chain;
/// Expected block header checkpoints and corresponding structure.
pub mod checkpoints;
pub(crate) mod error;
/// Errors associated with the blockchain representation.
pub mod error;
pub(crate) mod header_batch;
pub(crate) mod header_chain;
30 changes: 24 additions & 6 deletions src/db/error.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,35 @@
use thiserror::Error;

/// Potential errors encountered by a persistence layer.
#[derive(Error, Debug)]
pub enum DatabaseError {
#[error("loading a query or data from the database failed")]
LoadError,
#[error("writing a query or data from the database failed")]
WriteError,
/// Loading a query or data from the database failed.
#[error("Loading a query or data from the database failed.")]
Load,
/// Writing a query or data from the database failed.
#[error("Writing a query or data from the database failed.")]
Write,
/// The data loading is corrupted.
#[error("Loaded data has been corrupted.")]
Corruption,
/// Serializing an object to write to the database failed.
#[error("Serializing an object to write to the database failed.")]
Serialization,
/// Deserializing an object after loading from the database failed.
#[error("Deserializing an object after loading from the database failed.")]
Deserialization,
/// Opening the database file failed.
#[error("Opening the database file failed.")]
Open,
}

/// Errors when managing persisted peers.
#[derive(Error, Debug)]
pub enum PeerManagerError {
#[error("DNS failed to respond")]
/// DNS failed to respond.
#[error("DNS failed to respond.")]
Dns,
#[error("reading or writing from the database failed")]
/// Reading or writing from the database failed.
#[error("Reading or writing from the database failed.")]
Database(DatabaseError),
}
4 changes: 2 additions & 2 deletions src/db/memory/peers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ impl PeerStore for StatelessPeerStore {
iter.choose(&mut rng).map(|(key, _)| *key)
};
match random_peer {
Some(ip) => self.list.remove(&ip).ok_or(DatabaseError::LoadError),
None => Err(DatabaseError::LoadError),
Some(ip) => self.list.remove(&ip).ok_or(DatabaseError::Load),
None => Err(DatabaseError::Load),
}
}

Expand Down
71 changes: 30 additions & 41 deletions src/db/sqlite/headers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,8 @@ impl SqliteHeaderDb {
if !path.exists() {
fs::create_dir_all(&path).unwrap();
}
let conn =
Connection::open(path.join("headers.db")).map_err(|_| DatabaseError::LoadError)?;
conn.execute(SCHEMA, [])
.map_err(|_| DatabaseError::LoadError)?;
let conn = Connection::open(path.join("headers.db")).map_err(|_| DatabaseError::Open)?;
conn.execute(SCHEMA, []).map_err(|_| DatabaseError::Load)?;
Ok(Self {
network,
conn: Arc::new(Mutex::new(conn)),
Expand All @@ -58,23 +56,21 @@ impl HeaderStore for SqliteHeaderDb {
let mut headers = BTreeMap::<u32, Header>::new();
let stmt = "SELECT * FROM headers ORDER BY height";
let write_lock = self.conn.lock().await;
let mut query = write_lock
.prepare(stmt)
.map_err(|_| DatabaseError::LoadError)?;
let mut rows = query.query([]).map_err(|_| DatabaseError::LoadError)?;
while let Some(row) = rows.next().map_err(|_| DatabaseError::LoadError)? {
let height: u32 = row.get(0).map_err(|_| DatabaseError::LoadError)?;
let mut query = write_lock.prepare(stmt).map_err(|_| DatabaseError::Load)?;
let mut rows = query.query([]).map_err(|_| DatabaseError::Load)?;
while let Some(row) = rows.next().map_err(|_| DatabaseError::Load)? {
let height: u32 = row.get(0).map_err(|_| DatabaseError::Load)?;
// The anchor height should not be included in the chain, as the anchor is non-inclusive
if height.le(&anchor_height) {
continue;
}
let hash: String = row.get(1).map_err(|_| DatabaseError::LoadError)?;
let version: i32 = row.get(2).map_err(|_| DatabaseError::LoadError)?;
let prev_hash: String = row.get(3).map_err(|_| DatabaseError::LoadError)?;
let merkle_root: String = row.get(4).map_err(|_| DatabaseError::LoadError)?;
let time: u32 = row.get(5).map_err(|_| DatabaseError::LoadError)?;
let bits: u32 = row.get(6).map_err(|_| DatabaseError::LoadError)?;
let nonce: u32 = row.get(7).map_err(|_| DatabaseError::LoadError)?;
let hash: String = row.get(1).map_err(|_| DatabaseError::Load)?;
let version: i32 = row.get(2).map_err(|_| DatabaseError::Load)?;
let prev_hash: String = row.get(3).map_err(|_| DatabaseError::Load)?;
let merkle_root: String = row.get(4).map_err(|_| DatabaseError::Load)?;
let time: u32 = row.get(5).map_err(|_| DatabaseError::Load)?;
let bits: u32 = row.get(6).map_err(|_| DatabaseError::Load)?;
let nonce: u32 = row.get(7).map_err(|_| DatabaseError::Load)?;

let next_header = Header {
version: Version::from_consensus(version),
Expand All @@ -84,19 +80,16 @@ impl HeaderStore for SqliteHeaderDb {
bits: CompactTarget::from_consensus(bits),
nonce,
};

assert_eq!(
BlockHash::from_str(&hash).unwrap(),
next_header.block_hash(),
"db corruption. incorrect header hash."
);

if BlockHash::from_str(&hash)
.unwrap()
.ne(&next_header.block_hash())
{
return Err(DatabaseError::Corruption);
}
if let Some(header) = headers.values().last() {
assert_eq!(
header.block_hash(),
next_header.prev_blockhash,
"db corruption. headers do not link."
);
if header.block_hash().ne(&next_header.prev_blockhash) {
return Err(DatabaseError::Corruption);
}
}
headers.insert(height, next_header);
}
Expand All @@ -108,12 +101,10 @@ impl HeaderStore for SqliteHeaderDb {
header_chain: &'a BTreeMap<u32, Header>,
) -> Result<(), DatabaseError> {
let mut write_lock = self.conn.lock().await;
let tx = write_lock
.transaction()
.map_err(|_| DatabaseError::WriteError)?;
let tx = write_lock.transaction().map_err(|_| DatabaseError::Load)?;
let best_height: Option<u32> = tx
.query_row("SELECT MAX(height) FROM headers", [], |row| row.get(0))
.map_err(|_| DatabaseError::WriteError)?;
.map_err(|_| DatabaseError::Write)?;
for (height, header) in header_chain {
if height.ge(&(best_height.unwrap_or(0))) {
let hash: String = header.block_hash().to_string();
Expand All @@ -137,10 +128,10 @@ impl HeaderStore for SqliteHeaderDb {
nonce
],
)
.map_err(|_| DatabaseError::WriteError)?;
.map_err(|_| DatabaseError::Write)?;
}
}
tx.commit().map_err(|_| DatabaseError::WriteError)?;
tx.commit().map_err(|_| DatabaseError::Write)?;
Ok(())
}

Expand All @@ -150,9 +141,7 @@ impl HeaderStore for SqliteHeaderDb {
height: u32,
) -> Result<(), DatabaseError> {
let mut write_lock = self.conn.lock().await;
let tx = write_lock
.transaction()
.map_err(|_| DatabaseError::WriteError)?;
let tx = write_lock.transaction().map_err(|_| DatabaseError::Write)?;
for (h, header) in header_chain {
if h.ge(&height) {
let hash: String = header.block_hash().to_string();
Expand All @@ -176,10 +165,10 @@ impl HeaderStore for SqliteHeaderDb {
nonce
],
)
.map_err(|_| DatabaseError::WriteError)?;
.map_err(|_| DatabaseError::Write)?;
}
}
tx.commit().map_err(|_| DatabaseError::WriteError)?;
tx.commit().map_err(|_| DatabaseError::Write)?;
Ok(())
}

Expand All @@ -191,7 +180,7 @@ impl HeaderStore for SqliteHeaderDb {
let stmt = "SELECT height FROM headers WHERE block_hash = ?1";
let row: Option<u32> = write_lock
.query_row(stmt, params![block_hash.to_string()], |row| row.get(0))
.map_err(|_| DatabaseError::LoadError)?;
.map_err(|_| DatabaseError::Load)?;
Ok(row)
}
}
31 changes: 15 additions & 16 deletions src/db/sqlite/peers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,9 @@ impl SqlitePeerDb {
if !path.exists() {
fs::create_dir_all(&path).unwrap();
}
let conn =
Connection::open(path.join("peers.db")).map_err(|_| DatabaseError::WriteError)?;
let conn = Connection::open(path.join("peers.db")).map_err(|_| DatabaseError::Open)?;
conn.execute(PEER_SCHEMA, [])
.map_err(|_| DatabaseError::WriteError)?;
.map_err(|_| DatabaseError::Write)?;
Ok(Self {
conn: Arc::new(Mutex::new(conn)),
})
Expand All @@ -66,40 +65,40 @@ impl PeerStore for SqlitePeerDb {
peer.banned,
],
)
.map_err(|_| DatabaseError::WriteError)?;
.map_err(|_| DatabaseError::Write)?;
Ok(())
}

async fn random(&mut self) -> Result<PersistedPeer, DatabaseError> {
let lock = self.conn.lock().await;
let mut stmt = lock
.prepare("SELECT * FROM peers WHERE banned = false ORDER BY RANDOM() LIMIT 1")
.map_err(|_| DatabaseError::LoadError)?;
let mut rows = stmt.query([]).map_err(|_| DatabaseError::LoadError)?;
if let Some(row) = rows.next().map_err(|_| DatabaseError::LoadError)? {
let ip_addr: String = row.get(0).map_err(|_| DatabaseError::LoadError)?;
let port: u16 = row.get(1).map_err(|_| DatabaseError::LoadError)?;
let service_flags: u64 = row.get(2).map_err(|_| DatabaseError::LoadError)?;
let tried: bool = row.get(3).map_err(|_| DatabaseError::LoadError)?;
let banned: bool = row.get(4).map_err(|_| DatabaseError::LoadError)?;
.map_err(|_| DatabaseError::Write)?;
let mut rows = stmt.query([]).map_err(|_| DatabaseError::Load)?;
if let Some(row) = rows.next().map_err(|_| DatabaseError::Load)? {
let ip_addr: String = row.get(0).map_err(|_| DatabaseError::Load)?;
let port: u16 = row.get(1).map_err(|_| DatabaseError::Load)?;
let service_flags: u64 = row.get(2).map_err(|_| DatabaseError::Load)?;
let tried: bool = row.get(3).map_err(|_| DatabaseError::Load)?;
let banned: bool = row.get(4).map_err(|_| DatabaseError::Load)?;
let ip = ip_addr
.parse::<IpAddr>()
.map_err(|_| DatabaseError::LoadError)?;
.map_err(|_| DatabaseError::Deserialization)?;
let services: ServiceFlags = ServiceFlags::from(service_flags);
return Ok(PersistedPeer::new(ip, port, services, tried, banned));
} else {
return Err(DatabaseError::LoadError);
return Err(DatabaseError::Load);
}
}

async fn num_unbanned(&mut self) -> Result<u32, DatabaseError> {
let lock = self.conn.lock().await;
let mut stmt = lock
.prepare("SELECT COUNT(*) FROM peers WHERE banned = false")
.map_err(|_| DatabaseError::LoadError)?;
.map_err(|_| DatabaseError::Load)?;
let count: u32 = stmt
.query_row([], |row| row.get(0))
.map_err(|_| DatabaseError::LoadError)?;
.map_err(|_| DatabaseError::Deserialization)?;
Ok(count)
}
}
2 changes: 1 addition & 1 deletion src/db/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ impl PeerStore for () {
}

async fn random(&mut self) -> Result<PersistedPeer, DatabaseError> {
Err(DatabaseError::LoadError)
Err(DatabaseError::Load)
}

async fn num_unbanned(&mut self) -> Result<u32, DatabaseError> {
Expand Down
Loading

0 comments on commit 9a54cd2

Please sign in to comment.