Skip to content

Commit

Permalink
Drop the uniqueness constraint (#655)
Browse files Browse the repository at this point in the history
* Drop the uniqueness constraint

* Clippy fixes

* clippy fix

* Remove clippy::all and clippy::pedantic

* Make clippy happy

* Fmt
  • Loading branch information
Dzejkop authored Nov 28, 2023
1 parent b918ad8 commit ce89841
Show file tree
Hide file tree
Showing 20 changed files with 72 additions and 22 deletions.
4 changes: 4 additions & 0 deletions schemas/database/011_drop_root_uniqueness.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
ALTER TABLE identities
DROP CONSTRAINT identities_root_key;

CREATE INDEX identities_root ON identities (root);
6 changes: 3 additions & 3 deletions src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ impl App {
&initial_leaf_value,
initial_root_hash,
mined_items.clone(),
&mmap_file_path,
mmap_file_path,
)
.await?
{
Expand Down Expand Up @@ -872,7 +872,7 @@ mod test {
);

// check if the index is correct
assert_eq!(last_mined_index_in_dense, identities.iter().count());
assert_eq!(last_mined_index_in_dense, identities.len());
// since there are less identities then dense prefix, the leavs should be empty
// vector
assert!(leaves.is_empty());
Expand All @@ -893,7 +893,7 @@ mod test {
assert_eq!(last_mined_index_in_dense, (1 << dense_prefix_depth) - 1);

// since there are more identities then dense prefix, the leavs should be 2
assert_eq!(leaves.iter().count(), 2);
assert_eq!(leaves.len(), 2);

// additional check for correctness
assert_eq!(leaves[0], identities[8].element);
Expand Down
2 changes: 1 addition & 1 deletion src/contracts/abi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl From<u8> for TreeChangeKind {
0 => Self::Insertion,
1 => Self::Deletion,
2 => Self::Update,
_ => panic!("Invalid value for TreeChangeKind: {}", value),
_ => panic!("Invalid value for TreeChangeKind: {value}"),
}
}
}
Expand Down
35 changes: 31 additions & 4 deletions src/database/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,6 @@ impl Database {
r#"
INSERT INTO identities (leaf_index, commitment, root, status, pending_as_of)
VALUES ($1, $2, $3, $4, CURRENT_TIMESTAMP)
ON CONFLICT (root) DO NOTHING;
"#,
)
.bind(leaf_index as i64)
Expand All @@ -166,7 +165,11 @@ impl Database {
) -> Result<Option<usize>, Error> {
let root_index_query = sqlx::query(
r#"
SELECT id FROM identities WHERE root = $1
SELECT id
FROM identities
WHERE root = $1
ORDER BY id ASC
LIMIT 1
"#,
)
.bind(root);
Expand Down Expand Up @@ -465,7 +468,9 @@ impl Database {
pending_as_of as pending_valid_as_of,
mined_at as mined_valid_as_of
FROM identities
WHERE root = $1;
WHERE root = $1
ORDER BY id
LIMIT 1
"#,
)
.bind(root);
Expand Down Expand Up @@ -1392,7 +1397,7 @@ mod test {

db.mark_all_as_pending().await?;

for root in roots.iter() {
for root in &roots {
let root = db
.get_root_state(root)
.await?
Expand Down Expand Up @@ -2042,4 +2047,26 @@ mod test {

Ok(())
}

#[tokio::test]
async fn can_insert_same_root_multiple_times() -> anyhow::Result<()> {
let (db, _db_container) = setup_db().await?;
let identities = mock_identities(2);
let roots = mock_roots(2);

db.insert_pending_identity(0, &identities[0], &roots[0])
.await?;

db.insert_pending_identity(1, &identities[1], &roots[0])
.await?;

let root_state = db
.get_root_state(&roots[0])
.await?
.context("Missing root")?;

assert_eq!(root_state.status, ProcessedStatus::Pending);

Ok(())
}
}
2 changes: 1 addition & 1 deletion src/identity_tree/status.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ mod tests {
let s = s.leak() as &'static str;

// Unwrap from the redundant JSON quotes
s.trim_start_matches("\"").trim_end_matches("\"")
s.trim_start_matches('\"').trim_end_matches('\"')
}

#[test_case("pending" => Status::Processed(ProcessedStatus::Pending))]
Expand Down
8 changes: 6 additions & 2 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
#![doc = include_str!("../Readme.md")]
#![warn(clippy::all, clippy::pedantic, clippy::cargo)]
#![allow(clippy::module_name_repetitions, clippy::wildcard_imports)]
#![warn(clippy::cargo)]
#![allow(
clippy::module_name_repetitions,
clippy::wildcard_imports,
clippy::too_many_arguments
)]

pub mod app;
mod contracts;
Expand Down
2 changes: 1 addition & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#![doc = include_str!("../Readme.md")]
#![warn(clippy::all, clippy::pedantic, clippy::cargo)]
#![warn(clippy::cargo)]
#![allow(clippy::module_name_repetitions, clippy::wildcard_imports)]

use cli_batteries::{run, version};
Expand Down
2 changes: 2 additions & 0 deletions src/server/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,10 +207,12 @@ impl ToResponseCode for VerifySemaphoreProofResponse {
}

impl IdentityHistoryEntryKind {
#[must_use]
pub fn is_insertion(&self) -> bool {
matches!(self, IdentityHistoryEntryKind::Insertion)
}

#[must_use]
pub fn is_deletion(&self) -> bool {
matches!(self, IdentityHistoryEntryKind::Deletion)
}
Expand Down
2 changes: 1 addition & 1 deletion src/task_monitor/tasks/delete_identities.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ async fn delete_identities(
);

// Insert the new items into pending identities
let items = data.into_iter().zip(leaf_indices.into_iter());
let items = data.into_iter().zip(leaf_indices);
for ((root, _proof), leaf_index) in items {
database
.insert_pending_identity(leaf_index, &Hash::ZERO, &root)
Expand Down
7 changes: 5 additions & 2 deletions src/task_monitor/tasks/finalize_identities.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ async fn finalize_mainnet_roots(
database,
identity_manager,
processed_tree,
&log,
log,
max_epoch_duration,
)
.await?;
Expand Down Expand Up @@ -311,7 +311,10 @@ async fn update_eligible_recoveries(
.context("Could not fetch deletion indices from tx")?;

let commitments = processed_tree.commitments_by_indices(commitments.iter().copied());
let commitments: Vec<U256> = commitments.into_iter().map(|c| c.into()).collect();
let commitments: Vec<U256> = commitments
.into_iter()
.map(std::convert::Into::into)
.collect();

// Check if any deleted commitments correspond with entries in the
// recoveries table and insert the new commitment into the unprocessed
Expand Down
2 changes: 1 addition & 1 deletion src/task_monitor/tasks/insert_identities.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ async fn insert_identities(
"Length mismatch when appending identities to tree"
);

let items = data.into_iter().zip(identities.into_iter());
let items = data.into_iter().zip(identities);

for ((root, _proof, leaf_index), identity) in items {
database
Expand Down
8 changes: 5 additions & 3 deletions src/task_monitor/tasks/monitor_txs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,11 @@ async fn monitor_txs_loop(
let mut monitored_txs_receiver = monitored_txs_receiver.lock().await;

while let Some(tx) = monitored_txs_receiver.recv().await {
if !identity_manager.mine_transaction(tx.clone()).await? {
panic!("Failed to mine transaction: {}", tx);
}
assert!(
(identity_manager.mine_transaction(tx.clone()).await?),
"Failed to mine transaction: {}",
tx
);
}

Ok(())
Expand Down
2 changes: 2 additions & 0 deletions src/utils/index_packing.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#[must_use]
pub fn pack_indices(indices: &[u32]) -> Vec<u8> {
let mut packed = Vec::with_capacity(indices.len() * 4);

Expand All @@ -8,6 +9,7 @@ pub fn pack_indices(indices: &[u32]) -> Vec<u8> {
packed
}

#[must_use]
pub fn unpack_indices(packed: &[u8]) -> Vec<u32> {
let mut indices = Vec::with_capacity(packed.len() / 4);

Expand Down
1 change: 1 addition & 0 deletions src/utils/tree_updates.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use crate::identity_tree::TreeUpdate;

#[must_use]
pub fn dedup_tree_updates(updates: Vec<TreeUpdate>) -> Vec<TreeUpdate> {
let mut deduped = Vec::new();
let mut temp: Option<TreeUpdate> = None;
Expand Down
2 changes: 1 addition & 1 deletion tests/common/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// We include this module in multiple in multiple integration
// test crates - so some code may not be used in some cases
#![allow(dead_code)]
#![allow(dead_code, clippy::too_many_arguments)]

pub mod abi;
mod chain_mock;
Expand Down
2 changes: 2 additions & 0 deletions tests/delete_identities.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(clippy::needless_range_loop)]

mod common;

use common::prelude::*;
Expand Down
2 changes: 2 additions & 0 deletions tests/delete_padded_identity.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(clippy::needless_range_loop)]

mod common;

use common::prelude::*;
Expand Down
1 change: 0 additions & 1 deletion tests/dynamic_batch_sizes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ use std::str::FromStr;

use common::prelude::*;
use hyper::Uri;
use tempfile;

use crate::common::{test_add_batch_size, test_remove_batch_size};

Expand Down
2 changes: 1 addition & 1 deletion tests/identity_history.rs
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ async fn fetch_identity_history(
) -> anyhow::Result<Option<IdentityHistoryResponse>> {
let uri = format!("{uri}/identityHistory");
let body = IdentityHistoryRequest {
identity_commitment: identity.clone(),
identity_commitment: *identity,
};

let req = Request::post(uri)
Expand Down
2 changes: 2 additions & 0 deletions tests/recover_identities.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(clippy::needless_range_loop)]

mod common;

use common::prelude::*;
Expand Down

0 comments on commit ce89841

Please sign in to comment.