diff --git a/Cargo.lock b/Cargo.lock index e8e79414..a698f31a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "Inflector" @@ -4827,6 +4827,7 @@ name = "prism-tests" version = "0.1.0" dependencies = [ "anyhow", + "jmt", "keystore-rs", "log", "pretty_env_logger", diff --git a/crates/common/src/digest.rs b/crates/common/src/digest.rs new file mode 100644 index 00000000..f8014237 --- /dev/null +++ b/crates/common/src/digest.rs @@ -0,0 +1,107 @@ +use anyhow::{anyhow, Result}; +use bls12_381::Scalar; +use jmt::RootHash; +use serde::{Deserialize, Serialize}; + +use crate::hasher::Hasher; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Copy)] +pub struct Digest(pub [u8; 32]); + +impl Digest { + pub fn hash(data: impl AsRef<[u8]>) -> Self { + let mut hasher = Hasher::new(); + hasher.update(data.as_ref()); + Self(hasher.finalize()) + } + + pub const fn zero() -> Self { + Self([0u8; 32]) + } +} + +// serializer and deserializer for rocksdb +// converts from bytearrays into digests +// padds it with zero if it is too small +impl From<[u8; N]> for Digest { + fn from(value: [u8; N]) -> Self { + assert!(N <= 32, "Input array must not exceed 32 bytes"); + let mut digest = [0u8; 32]; + digest[..N].copy_from_slice(&value); + Self(digest) + } +} + +// implementing it for now to get things to compile, curve choice will be made later +impl TryFrom for Scalar { + type Error = anyhow::Error; + + fn try_from(value: Digest) -> Result { + let mut byte_array = [0u8; 32]; + byte_array.copy_from_slice(value.as_ref()); + byte_array.reverse(); + + let val = + [ + u64::from_le_bytes(byte_array[0..8].try_into().map_err(|_| { + anyhow!(format!("slice to array: [0..8] for digest: {value:?}")) + })?), + u64::from_le_bytes(byte_array[8..16].try_into().map_err(|_| { + anyhow!(format!("slice to array: [8..16] for digest: {value:?}")) + })?), + u64::from_le_bytes(byte_array[16..24].try_into().map_err(|_| { + anyhow!(format!("slice to array: [16..24] for digest: {value:?}")) + })?), + u64::from_le_bytes(byte_array[24..32].try_into().map_err(|_| { + anyhow!(format!("slice to array: [24..32] for digest: {value:?}")) + })?), + ]; + + Ok(Scalar::from_raw(val)) + } +} + +impl From for RootHash { + fn from(val: Digest) -> RootHash { + RootHash::from(val.0) + } +} + +impl From for Digest { + fn from(val: RootHash) -> Digest { + Digest(val.0) + } +} + +impl AsRef<[u8]> for Digest { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +impl std::fmt::Display for Digest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.to_hex()) + } +} + +impl Digest { + pub const fn new(bytes: [u8; 32]) -> Self { + Digest(bytes) + } + + pub fn from_hex(hex_str: &str) -> Result { + let mut bytes = [0u8; 32]; + hex::decode_to_slice(hex_str, &mut bytes) + .map_err(|e| anyhow!(format!("Invalid Format: {e}")))?; + Ok(Digest(bytes)) + } + + pub fn to_hex(&self) -> String { + hex::encode(self.0) + } + + pub fn to_bytes(&self) -> [u8; 32] { + self.0 + } +} diff --git a/crates/common/src/hashchain.rs b/crates/common/src/hashchain.rs index 2b588952..0baa8692 100644 --- a/crates/common/src/hashchain.rs +++ b/crates/common/src/hashchain.rs @@ -7,12 +7,12 @@ use std::{ }; use crate::{ + digest::Digest, + hasher::Hasher, keys::VerifyingKey, operation::{ - AddDataArgs, CreateAccountArgs, KeyOperationArgs, Operation, RegisterServiceArgs, - ServiceChallenge, ServiceChallengeInput, + CreateAccountArgs, Operation, RegisterServiceArgs, ServiceChallenge, ServiceChallengeInput, }, - tree::{Digest, Hasher}, }; #[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] @@ -72,17 +72,19 @@ impl Hashchain { pub fn create_account( id: String, value: VerifyingKey, - signature: Vec, service_id: String, challenge: ServiceChallengeInput, + prev_hash: Digest, + signature: Vec, ) -> Result { let mut hc = Hashchain::empty(id.clone()); let operation = Operation::CreateAccount(CreateAccountArgs { id, - signature, value, service_id, challenge, + prev_hash, + signature, }); hc.perform_operation(operation)?; Ok(hc) @@ -93,6 +95,7 @@ impl Hashchain { let operation = Operation::RegisterService(RegisterServiceArgs { id, creation_gate: challenge, + prev_hash: Digest::zero(), }); hc.perform_operation(operation)?; Ok(hc) @@ -269,15 +272,16 @@ impl Hashchain { &self.entries[idx] } + pub fn last_hash(&self) -> Digest { + self.last().map_or(Digest::zero(), |entry| entry.hash) + } + fn push(&mut self, operation: Operation) -> Result { if operation.id() != self.id { bail!("Operation ID does not match Hashchain ID"); } - let previous_hash = self - .entries - .last() - .map_or(Digest::new([0u8; 32]), |entry| entry.hash); + let previous_hash = self.last_hash(); let entry = HashchainEntry::new(operation, previous_hash); self.entries.push(entry.clone()); @@ -293,31 +297,53 @@ impl Hashchain { /// Verifies the structure and signature of a new operation fn validate_new_operation(&self, operation: &Operation) -> Result<()> { match operation { - Operation::RegisterService(_) => { + Operation::RegisterService(args) => { if !self.entries.is_empty() { bail!("RegisterService operation must be the first entry"); } + + if args.prev_hash != Digest::zero() { + bail!("Previous hash for initial operation must be zero") + } + Ok(()) } - Operation::AddKey(KeyOperationArgs { signature, .. }) - | Operation::RevokeKey(KeyOperationArgs { signature, .. }) - | Operation::AddData(AddDataArgs { - op_signature: signature, - .. - }) => { - let signing_key = self.get_key_at_index(signature.key_idx)?; + Operation::AddKey(args) | Operation::RevokeKey(args) => { + if args.prev_hash != self.last_hash() { + bail!("Previous hash for key operation must be the last hash") + } + + let signing_key = self.get_key_at_index(args.signature.key_idx)?; if self.is_key_revoked(signing_key.clone()) { bail!("The signing key is revoked"); } - operation.verify_user_signature(signing_key.clone()) + operation.verify_user_signature(signing_key) + } + Operation::AddData(args) => { + if args.prev_hash != self.last_hash() { + bail!("Previous hash for add-data operation must be the last hash") + } + + let signing_key = self.get_key_at_index(args.op_signature.key_idx)?; + + if self.is_key_revoked(signing_key.clone()) { + bail!("The signing key is revoked"); + } + + operation.verify_user_signature(signing_key) } Operation::CreateAccount(args) => { if !self.entries.is_empty() { bail!("RegisterService operation must be the first entry"); } - operation.verify_user_signature(args.value.clone()) + + if args.prev_hash != Digest::zero() { + bail!("Previous hash for initial operation must be zero") + } + + operation.verify_user_signature(&args.value) } } } diff --git a/crates/common/src/hasher.rs b/crates/common/src/hasher.rs new file mode 100644 index 00000000..75dc6b3c --- /dev/null +++ b/crates/common/src/hasher.rs @@ -0,0 +1,68 @@ +use jmt::SimpleHasher; +use serde::{ser::SerializeTupleStruct, Deserialize, Serialize}; + +#[derive(Debug, Clone, Default)] +pub struct Hasher(sha2::Sha256); + +impl Hasher { + pub fn new() -> Self { + Self(sha2::Sha256::new()) + } + + pub fn update(&mut self, data: &[u8]) { + self.0.update(data); + } + + pub fn finalize(self) -> [u8; 32] { + self.0.finalize() + } +} + +impl Serialize for Hasher { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_tuple_struct("Sha256Wrapper", 0)?.end() + } +} + +impl<'de> Deserialize<'de> for Hasher { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct Sha256WrapperVisitor; + + impl<'de> serde::de::Visitor<'de> for Sha256WrapperVisitor { + type Value = Hasher; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a Sha256Wrapper") + } + + fn visit_seq(self, _seq: A) -> Result + where + A: serde::de::SeqAccess<'de>, + { + Ok(Hasher::default()) + } + } + + deserializer.deserialize_tuple_struct("Sha256Wrapper", 0, Sha256WrapperVisitor) + } +} + +impl SimpleHasher for Hasher { + fn new() -> Self { + Self::new() + } + + fn update(&mut self, data: &[u8]) { + self.update(data); + } + + fn finalize(self) -> [u8; 32] { + self.finalize() + } +} diff --git a/crates/common/src/keys.rs b/crates/common/src/keys.rs index c31a8c57..77704d92 100644 --- a/crates/common/src/keys.rs +++ b/crates/common/src/keys.rs @@ -11,7 +11,7 @@ use secp256k1::{ use serde::{Deserialize, Serialize}; use std::{self}; -use crate::tree::Digest; +use crate::digest::Digest; #[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq, Hash)] /// Represents a public key supported by the system. diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index b3407bbb..18b0a2be 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -1,4 +1,6 @@ +pub mod digest; pub mod hashchain; +pub mod hasher; pub mod keys; pub mod operation; pub mod tree; @@ -6,5 +8,7 @@ pub mod tree; #[macro_use] extern crate log; +#[cfg(feature = "test_utils")] +pub mod test_ops; #[cfg(feature = "test_utils")] pub mod test_utils; diff --git a/crates/common/src/operation.rs b/crates/common/src/operation.rs index 232a7e64..7279cbd8 100644 --- a/crates/common/src/operation.rs +++ b/crates/common/src/operation.rs @@ -5,7 +5,10 @@ use prism_errors::GeneralError; use serde::{Deserialize, Serialize}; use std::{self, fmt::Display}; -use crate::keys::{SigningKey, VerifyingKey}; +use crate::{ + digest::Digest, + keys::{SigningKey, VerifyingKey}, +}; #[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] /// An [`Operation`] represents a state transition in the system. @@ -65,11 +68,14 @@ pub struct CreateAccountArgs { pub id: String, /// Public key being added pub value: VerifyingKey, - pub signature: Vec, /// Associated service ID pub service_id: String, /// Challenge input for verification pub challenge: ServiceChallengeInput, + /// The hash of the previous operation + pub prev_hash: Digest, + /// The signature that signed the operation + pub signature: Vec, } #[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] @@ -79,6 +85,8 @@ pub struct RegisterServiceArgs { pub id: String, /// Challenge gate for access control pub creation_gate: ServiceChallenge, + /// The hash of the previous operation + pub prev_hash: Digest, } #[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] @@ -101,6 +109,8 @@ pub struct AddDataArgs { pub value: Vec, /// Optional external signature used to sign the data to be added pub value_signature: Option, + /// The hash of the previous operation + pub prev_hash: Digest, /// Signature to authorize the action pub op_signature: HashchainSignatureBundle, } @@ -112,6 +122,8 @@ pub struct KeyOperationArgs { pub id: String, /// Public key being added or revoked pub value: VerifyingKey, + /// The hash of the previous operation + pub prev_hash: Digest, /// Signature to authorize the action pub signature: HashchainSignatureBundle, } @@ -128,6 +140,7 @@ impl Operation { value: signing_key.clone().verifying_key(), service_id, challenge: ServiceChallengeInput::Signed(Vec::new()), + prev_hash: Digest::zero(), signature: Vec::new(), }); @@ -147,18 +160,24 @@ impl Operation { } pub fn new_register_service(id: String, creation_gate: ServiceChallenge) -> Self { - Operation::RegisterService(RegisterServiceArgs { id, creation_gate }) + Operation::RegisterService(RegisterServiceArgs { + id, + creation_gate, + prev_hash: Digest::zero(), + }) } pub fn new_add_key( id: String, value: VerifyingKey, + prev_hash: Digest, signing_key: &SigningKey, key_idx: usize, ) -> Result { let op_to_sign = Operation::AddKey(KeyOperationArgs { id: id.clone(), value: value.clone(), + prev_hash, signature: HashchainSignatureBundle::empty_with_idx(key_idx), }); @@ -171,6 +190,7 @@ impl Operation { Ok(Operation::AddKey(KeyOperationArgs { id, value, + prev_hash, signature, })) } @@ -178,12 +198,14 @@ impl Operation { pub fn new_revoke_key( id: String, value: VerifyingKey, + prev_hash: Digest, signing_key: &SigningKey, key_idx: usize, ) -> Result { let op_to_sign = Operation::RevokeKey(KeyOperationArgs { id: id.clone(), value: value.clone(), + prev_hash, signature: HashchainSignatureBundle::empty_with_idx(key_idx), }); @@ -196,6 +218,7 @@ impl Operation { Ok(Operation::RevokeKey(KeyOperationArgs { id, value, + prev_hash, signature, })) } @@ -204,6 +227,7 @@ impl Operation { id: String, value: Vec, value_signature: Option, + prev_hash: Digest, signing_key: &SigningKey, key_idx: usize, ) -> Result { @@ -211,6 +235,7 @@ impl Operation { id: id.clone(), value: value.clone(), value_signature: value_signature.clone(), + prev_hash, op_signature: HashchainSignatureBundle::empty_with_idx(key_idx), }); @@ -224,6 +249,7 @@ impl Operation { id, value, value_signature, + prev_hash, op_signature, })) } @@ -264,9 +290,10 @@ impl Operation { Operation::CreateAccount(args) => Operation::CreateAccount(CreateAccountArgs { id: args.id.clone(), value: args.value.clone(), - signature: args.signature.clone(), service_id: args.service_id.clone(), challenge: ServiceChallengeInput::Signed(Vec::new()), + prev_hash: args.prev_hash, + signature: args.signature.clone(), }), _ => self.clone(), } @@ -277,6 +304,7 @@ impl Operation { Operation::AddKey(args) => Operation::AddKey(KeyOperationArgs { id: args.id.clone(), value: args.value.clone(), + prev_hash: args.prev_hash, signature: HashchainSignatureBundle { key_idx: args.signature.key_idx, signature: Vec::new(), @@ -285,6 +313,7 @@ impl Operation { Operation::RevokeKey(args) => Operation::RevokeKey(KeyOperationArgs { id: args.id.clone(), value: args.value.clone(), + prev_hash: args.prev_hash, signature: HashchainSignatureBundle { key_idx: args.signature.key_idx, signature: Vec::new(), @@ -293,6 +322,7 @@ impl Operation { Operation::AddData(args) => Operation::AddData(AddDataArgs { id: args.id.clone(), value: args.value.clone(), + prev_hash: args.prev_hash, value_signature: args.value_signature.clone(), op_signature: HashchainSignatureBundle { key_idx: args.op_signature.key_idx, @@ -302,18 +332,20 @@ impl Operation { Operation::CreateAccount(args) => Operation::CreateAccount(CreateAccountArgs { id: args.id.clone(), value: args.value.clone(), - signature: Vec::new(), service_id: args.service_id.clone(), challenge: args.challenge.clone(), + prev_hash: args.prev_hash, + signature: Vec::new(), }), Operation::RegisterService(args) => Operation::RegisterService(RegisterServiceArgs { id: args.id.clone(), creation_gate: args.creation_gate.clone(), + prev_hash: args.prev_hash, }), } } - pub fn verify_user_signature(&self, pubkey: VerifyingKey) -> Result<()> { + pub fn verify_user_signature(&self, pubkey: &VerifyingKey) -> Result<()> { match self { Operation::RegisterService(_) => Ok(()), Operation::CreateAccount(args) => { diff --git a/crates/common/src/test_ops.rs b/crates/common/src/test_ops.rs new file mode 100644 index 00000000..2fe94f27 --- /dev/null +++ b/crates/common/src/test_ops.rs @@ -0,0 +1,310 @@ +use std::{collections::HashMap, sync::Arc}; + +use jmt::{mock::MockTreeStore, KeyHash}; + +use crate::{ + digest::Digest, + hasher::Hasher, + keys::{SigningKey, VerifyingKey}, + operation::{Operation, SignatureBundle}, + test_utils::create_mock_signing_key, + tree::{HashchainResponse::*, KeyDirectoryTree, SnarkableTree}, +}; + +enum PostCommitAction { + UpdateStorageOnly, + RememberServiceKey(String, SigningKey), + RememberAccountKey(String, SigningKey), +} + +pub struct UncommittedOperation<'a> { + operation: Operation, + builder: &'a mut OpsBuilder, + post_commit_action: PostCommitAction, +} + +impl UncommittedOperation<'_> { + pub fn ex(self) -> Operation { + self.builder + .tree + .process_operation(&self.operation) + .expect("Processing operation should work"); + + match self.post_commit_action { + PostCommitAction::UpdateStorageOnly => (), + PostCommitAction::RememberAccountKey(id, account_key) => { + self.builder.account_keys.insert(id, account_key); + } + PostCommitAction::RememberServiceKey(id, service_key) => { + self.builder.service_keys.insert(id, service_key); + } + } + + self.operation + } + + pub fn op(self) -> Operation { + self.operation + } +} + +pub struct OpsBuilder { + /// Simulated hashchain storage that is mutated when operations are applied + tree: Box, + /// Remembers private keys of services to simulate account creation via an external service + service_keys: HashMap, + /// Remembers private keys of accounts to simulate actions on behalf of these accounts + account_keys: HashMap, +} + +impl Default for OpsBuilder { + fn default() -> Self { + let store = Arc::new(MockTreeStore::default()); + let tree = Box::new(KeyDirectoryTree::new(store)); + let service_keys = HashMap::new(); + let account_keys = HashMap::new(); + + Self { + tree, + service_keys, + account_keys, + } + } +} + +impl OpsBuilder { + pub fn new() -> Self { + Self::default() + } + + pub fn register_service_with_random_key(&mut self, id: &str) -> UncommittedOperation { + let random_service_key = create_mock_signing_key(); + self.register_service(id, random_service_key) + } + + pub fn register_service( + &mut self, + id: &str, + service_signing_key: SigningKey, + ) -> UncommittedOperation { + let op = + Operation::new_register_service(id.to_string(), service_signing_key.clone().into()); + + UncommittedOperation { + operation: op, + builder: self, + post_commit_action: PostCommitAction::RememberServiceKey( + id.to_string(), + service_signing_key, + ), + } + } + + pub fn create_account_with_random_key( + &mut self, + id: &str, + service_id: &str, + ) -> UncommittedOperation { + let random_signing_key = create_mock_signing_key(); + self.create_account(id, service_id, random_signing_key) + } + + pub fn create_account( + &mut self, + id: &str, + service_id: &str, + signing_key: SigningKey, + ) -> UncommittedOperation { + let Some(service_signing_key) = self.service_keys.get(service_id) else { + panic!("No existing service found for {}", service_id) + }; + + let op = Operation::new_create_account( + id.to_string(), + &signing_key, + service_id.to_string(), + service_signing_key, + ) + .expect("Creating account operation should work"); + + UncommittedOperation { + operation: op, + builder: self, + post_commit_action: PostCommitAction::RememberAccountKey(id.to_string(), signing_key), + } + } + + pub fn add_random_key_verified_with_root(&mut self, id: &str) -> UncommittedOperation { + let Some(account_signing_key) = self.account_keys.get(id).cloned() else { + panic!("No existing account key for {}", id) + }; + + self.add_random_key(id, &account_signing_key, 0) + } + + pub fn add_random_key( + &mut self, + id: &str, + signing_key: &SigningKey, + key_idx: usize, + ) -> UncommittedOperation { + let random_key = create_mock_signing_key().verifying_key(); + self.add_key(id, random_key, signing_key, key_idx) + } + + pub fn add_key_verified_with_root( + &mut self, + id: &str, + key: VerifyingKey, + ) -> UncommittedOperation { + let Some(account_signing_key) = self.account_keys.get(id).cloned() else { + panic!("No existing account key for {}", id) + }; + + self.add_key(id, key, &account_signing_key, 0) + } + + pub fn add_key( + &mut self, + id: &str, + key: VerifyingKey, + signing_key: &SigningKey, + key_idx: usize, + ) -> UncommittedOperation { + let hashed_id = Digest::hash(id); + let key_hash = KeyHash::with::(hashed_id); + + let Ok(Found(hc, _)) = self.tree.get(key_hash) else { + panic!("No existing hashchain found for {}", id) + }; + + let op = Operation::new_add_key(id.to_string(), key, hc.last_hash(), signing_key, key_idx) + .expect("Creating add-key operation should work"); + + UncommittedOperation { + operation: op, + builder: self, + post_commit_action: PostCommitAction::UpdateStorageOnly, + } + } + + pub fn revoke_key_verified_with_root( + &mut self, + id: &str, + key: VerifyingKey, + ) -> UncommittedOperation { + let Some(account_signing_key) = self.account_keys.get(id).cloned() else { + panic!("No existing account key for {}", id) + }; + + self.revoke_key(id, key, &account_signing_key, 0) + } + + pub fn revoke_key( + &mut self, + id: &str, + key: VerifyingKey, + signing_key: &SigningKey, + key_idx: usize, + ) -> UncommittedOperation { + let hashed_id = Digest::hash(id); + let key_hash = KeyHash::with::(hashed_id); + + let Ok(Found(hc, _)) = self.tree.get(key_hash) else { + panic!("No existing hashchain found for {}", id) + }; + + let op = + Operation::new_revoke_key(id.to_string(), key, hc.last_hash(), signing_key, key_idx) + .expect("Creating account operation should work"); + + UncommittedOperation { + operation: op, + builder: self, + post_commit_action: PostCommitAction::UpdateStorageOnly, + } + } + + pub fn add_signed_data( + &mut self, + id: &str, + value: Vec, + value_signature: SignatureBundle, + signing_key: &SigningKey, + key_idx: usize, + ) -> UncommittedOperation { + self.add_data(id, value, Some(value_signature), signing_key, key_idx) + } + + pub fn add_signed_data_verified_with_root( + &mut self, + id: &str, + value: Vec, + value_signature: SignatureBundle, + ) -> UncommittedOperation { + self.add_data_verified_with_root(id, value, Some(value_signature)) + } + + pub fn add_unsigned_data( + &mut self, + id: &str, + value: Vec, + signing_key: &SigningKey, + key_idx: usize, + ) -> UncommittedOperation { + self.add_data(id, value, None, signing_key, key_idx) + } + + pub fn add_unsigned_data_verified_with_root( + &mut self, + id: &str, + value: Vec, + ) -> UncommittedOperation { + self.add_data_verified_with_root(id, value, None) + } + + fn add_data_verified_with_root( + &mut self, + id: &str, + value: Vec, + value_signature: Option, + ) -> UncommittedOperation { + let Some(account_signing_key) = self.account_keys.get(id).cloned() else { + panic!("No existing account key for {}", id) + }; + + self.add_data(id, value, value_signature, &account_signing_key, 0) + } + + fn add_data( + &mut self, + id: &str, + value: Vec, + value_signature: Option, + signing_key: &SigningKey, + key_idx: usize, + ) -> UncommittedOperation { + let hashed_id = Digest::hash(id); + let key_hash = KeyHash::with::(hashed_id); + + let Ok(Found(hc, _)) = self.tree.get(key_hash) else { + panic!("No existing hashchain found for {}", id) + }; + + let op = Operation::new_add_signed_data( + id.to_string(), + value, + value_signature, + hc.last_hash(), + signing_key, + key_idx, + ) + .expect("Creating add-data operation should work"); + + UncommittedOperation { + operation: op, + builder: self, + post_commit_action: PostCommitAction::UpdateStorageOnly, + } + } +} diff --git a/crates/common/src/test_utils.rs b/crates/common/src/test_utils.rs index 626fe722..2ed4831c 100644 --- a/crates/common/src/test_utils.rs +++ b/crates/common/src/test_utils.rs @@ -116,6 +116,7 @@ impl TestTreeState { let op = Operation::new_add_key( account.hashchain.id.clone(), key_to_add.clone(), + account.hashchain.last_hash(), self.signing_keys.get(&account.hashchain.id).unwrap(), 0, )?; @@ -154,10 +155,13 @@ impl TestTreeState { let op_signing_key = self.signing_keys.get(&account.hashchain.id).unwrap(); + let prev_hash = account.hashchain.last_hash(); + let op = Operation::new_add_signed_data( account.hashchain.id.clone(), data.to_vec(), signature_bundle, + prev_hash, op_signing_key, 0, )?; @@ -229,8 +233,14 @@ pub fn create_random_update(state: &mut TestTreeState, rng: &mut StdRng) -> Upda .ok_or_else(|| anyhow::anyhow!("Signing key not found for hashchain")) .unwrap(); - let operation = - Operation::new_add_key(hc.id.clone(), verifying_key.clone(), signer, 0).unwrap(); + let operation = Operation::new_add_key( + hc.id.clone(), + verifying_key.clone(), + hc.last_hash(), + signer, + 0, + ) + .unwrap(); hc.perform_operation(operation) .expect("Adding to hashchain should succeed"); diff --git a/crates/common/src/tree.rs b/crates/common/src/tree.rs index e6886b43..83dac118 100644 --- a/crates/common/src/tree.rs +++ b/crates/common/src/tree.rs @@ -1,20 +1,18 @@ use anyhow::{anyhow, bail, Context, Result}; use bincode; -use bls12_381::Scalar; use jmt::{ proof::{SparseMerkleProof, UpdateMerkleProof}, storage::{NodeBatch, TreeReader, TreeUpdateBatch, TreeWriter}, - JellyfishMerkleTree, KeyHash, RootHash, SimpleHasher, + JellyfishMerkleTree, KeyHash, RootHash, }; use prism_errors::DatabaseError; -use serde::{ser::SerializeTupleStruct, Deserialize, Serialize}; -use std::{ - convert::{From, Into}, - sync::Arc, -}; +use serde::{Deserialize, Serialize}; +use std::{convert::Into, sync::Arc}; use crate::{ + digest::Digest, hashchain::{Hashchain, HashchainEntry}, + hasher::Hasher, operation::{ AddDataArgs, CreateAccountArgs, KeyOperationArgs, Operation, RegisterServiceArgs, ServiceChallenge, ServiceChallengeInput, @@ -26,155 +24,6 @@ use HashchainResponse::*; pub const SPARSE_MERKLE_PLACEHOLDER_HASH: Digest = Digest::new(*b"SPARSE_MERKLE_PLACEHOLDER_HASH__"); -#[derive(Debug, Clone, Default)] -pub struct Hasher(sha2::Sha256); - -impl Serialize for Hasher { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - serializer.serialize_tuple_struct("Sha256Wrapper", 0)?.end() - } -} - -impl<'de> Deserialize<'de> for Hasher { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - struct Sha256WrapperVisitor; - - impl<'de> serde::de::Visitor<'de> for Sha256WrapperVisitor { - type Value = Hasher; - - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("a Sha256Wrapper") - } - - fn visit_seq(self, _seq: A) -> Result - where - A: serde::de::SeqAccess<'de>, - { - Ok(Hasher::default()) - } - } - - deserializer.deserialize_tuple_struct("Sha256Wrapper", 0, Sha256WrapperVisitor) - } -} - -impl SimpleHasher for Hasher { - fn new() -> Self { - Self(sha2::Sha256::new()) - } - - fn update(&mut self, data: &[u8]) { - self.0.update(data); - } - - fn finalize(self) -> [u8; 32] { - self.0.finalize() - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Copy)] -pub struct Digest(pub [u8; 32]); - -impl Digest { - pub fn hash(data: impl AsRef<[u8]>) -> Self { - let mut hasher = Hasher::new(); - hasher.update(data.as_ref()); - Self(hasher.finalize()) - } -} - -// serializer and deserializer for rocksdb -// converts from bytearrays into digests -// padds it with zero if it is too small -impl From<[u8; N]> for Digest { - fn from(value: [u8; N]) -> Self { - assert!(N <= 32, "Input array must not exceed 32 bytes"); - let mut digest = [0u8; 32]; - digest[..N].copy_from_slice(&value); - Self(digest) - } -} - -// implementing it for now to get things to compile, curve choice will be made later -impl TryFrom for Scalar { - type Error = anyhow::Error; - - fn try_from(value: Digest) -> Result { - let mut byte_array = [0u8; 32]; - byte_array.copy_from_slice(value.as_ref()); - byte_array.reverse(); - - let val = - [ - u64::from_le_bytes(byte_array[0..8].try_into().map_err(|_| { - anyhow!(format!("slice to array: [0..8] for digest: {value:?}")) - })?), - u64::from_le_bytes(byte_array[8..16].try_into().map_err(|_| { - anyhow!(format!("slice to array: [8..16] for digest: {value:?}")) - })?), - u64::from_le_bytes(byte_array[16..24].try_into().map_err(|_| { - anyhow!(format!("slice to array: [16..24] for digest: {value:?}")) - })?), - u64::from_le_bytes(byte_array[24..32].try_into().map_err(|_| { - anyhow!(format!("slice to array: [24..32] for digest: {value:?}")) - })?), - ]; - - Ok(Scalar::from_raw(val)) - } -} - -impl From for RootHash { - fn from(val: Digest) -> RootHash { - RootHash::from(val.0) - } -} - -impl From for Digest { - fn from(val: RootHash) -> Digest { - Digest(val.0) - } -} - -impl AsRef<[u8]> for Digest { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl std::fmt::Display for Digest { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.to_hex()) - } -} - -impl Digest { - pub const fn new(bytes: [u8; 32]) -> Self { - Digest(bytes) - } - - pub fn from_hex(hex_str: &str) -> Result { - let mut bytes = [0u8; 32]; - hex::decode_to_slice(hex_str, &mut bytes) - .map_err(|e| anyhow!(format!("Invalid Format: {e}")))?; - Ok(Digest(bytes)) - } - - pub fn to_hex(&self) -> String { - hex::encode(self.0) - } - - pub fn to_bytes(&self) -> [u8; 32] { - self.0 - } -} - #[derive(Serialize, Deserialize)] pub struct Batch { pub prev_root: Digest, @@ -409,9 +258,10 @@ where Operation::CreateAccount(CreateAccountArgs { id, value, - signature, service_id, challenge, + prev_hash, + signature, }) => { let hashed_id = Digest::hash(id); let account_key_hash = KeyHash::with::(hashed_id); @@ -446,9 +296,10 @@ where let new_account_chain = Hashchain::create_account( id.clone(), value.clone(), - signature.clone(), service_id.clone(), challenge.clone(), + *prev_hash, + signature.clone(), )?; let new_account_entry = new_account_chain.last().unwrap(); diff --git a/crates/da/src/lib.rs b/crates/da/src/lib.rs index 2156b1c3..adf79c71 100644 --- a/crates/da/src/lib.rs +++ b/crates/da/src/lib.rs @@ -1,7 +1,7 @@ use anyhow::Result; use async_trait::async_trait; use ed25519_consensus::{Signature, SigningKey, VerificationKey as VerifyingKey}; -use prism_common::{operation::Operation, tree::Digest}; +use prism_common::{digest::Digest, operation::Operation}; use serde::{Deserialize, Serialize}; use sp1_sdk::SP1ProofWithPublicValues; use tokio::sync::broadcast; diff --git a/crates/node_types/lightclient/src/lightclient.rs b/crates/node_types/lightclient/src/lightclient.rs index 0d199359..c3ab367e 100644 --- a/crates/node_types/lightclient/src/lightclient.rs +++ b/crates/node_types/lightclient/src/lightclient.rs @@ -1,6 +1,6 @@ use anyhow::{Context, Result}; use ed25519_consensus::VerificationKey as VerifyingKey; -use prism_common::tree::Digest; +use prism_common::digest::Digest; use prism_da::{celestia::CelestiaConfig, DataAvailabilityLayer}; use prism_errors::{DataAvailabilityError, GeneralError}; use sp1_sdk::{ProverClient, SP1VerifyingKey}; diff --git a/crates/node_types/prover/src/prover/mod.rs b/crates/node_types/prover/src/prover/mod.rs index b6bf14c9..120fafd4 100644 --- a/crates/node_types/prover/src/prover/mod.rs +++ b/crates/node_types/prover/src/prover/mod.rs @@ -2,9 +2,14 @@ use anyhow::{anyhow, bail, Context, Result}; use ed25519_consensus::SigningKey; use jmt::KeyHash; use keystore_rs::create_signing_key; -use prism_common::tree::{ - Batch, Digest, HashchainResponse, HashchainResponse::*, Hasher, KeyDirectoryTree, Proof, - SnarkableTree, +use prism_common::{ + digest::Digest, + hasher::Hasher, + tree::{ + Batch, + HashchainResponse::{self, *}, + KeyDirectoryTree, Proof, SnarkableTree, + }, }; use prism_errors::DataAvailabilityError; use std::{self, collections::VecDeque, sync::Arc}; diff --git a/crates/node_types/prover/src/prover/tests.rs b/crates/node_types/prover/src/prover/tests.rs index a531a506..b0cc56a7 100644 --- a/crates/node_types/prover/src/prover/tests.rs +++ b/crates/node_types/prover/src/prover/tests.rs @@ -1,5 +1,5 @@ use super::*; -use prism_common::tree::Proof; +use prism_common::{test_ops::OpsBuilder, tree::Proof}; use std::{self, sync::Arc, time::Duration}; use tokio::spawn; @@ -17,28 +17,13 @@ async fn create_test_prover() -> Arc { } fn create_mock_operations(service_id: String) -> Vec { - let signing_key_1 = create_mock_signing_key(); - let signing_key_2 = create_mock_signing_key(); - let new_key = create_mock_signing_key().verifying_key(); - let service_key = create_mock_signing_key(); + let mut ops_builder = OpsBuilder::new(); + vec![ - Operation::new_register_service(service_id.clone(), service_key.clone().into()), - Operation::new_create_account( - "user1@example.com".to_string(), - &signing_key_1, - service_id.clone(), - &service_key, - ) - .unwrap(), - Operation::new_create_account( - "user2@example.com".to_string(), - &signing_key_2, - service_id.clone(), - &service_key, - ) - .unwrap(), - Operation::new_add_key("user1@example.com".to_string(), new_key, &signing_key_1, 0) - .unwrap(), + ops_builder.register_service_with_random_key(&service_id).ex(), + ops_builder.create_account_with_random_key("user1@example.com", &service_id).ex(), + ops_builder.create_account_with_random_key("user2@example.com", &service_id).ex(), + ops_builder.add_random_key_verified_with_root("user1@example.com").ex(), ] } @@ -46,8 +31,8 @@ fn create_mock_operations(service_id: String) -> Vec { async fn test_validate_and_queue_update() { let prover = create_test_prover().await; - let service_key = create_mock_signing_key(); - let op = Operation::new_register_service("service_id".to_string(), service_key.clone().into()); + let mut ops_builder = OpsBuilder::new(); + let op = ops_builder.register_service_with_random_key("test_service").ex(); prover.clone().validate_and_queue_update(&op).await.unwrap(); @@ -61,42 +46,34 @@ async fn test_validate_and_queue_update() { async fn test_process_operation() { let prover = create_test_prover().await; - let signing_key = create_mock_signing_key(); - let original_pubkey = signing_key.verifying_key(); - let service_key = create_mock_signing_key(); - - let register_service_op = - Operation::new_register_service("service_id".to_string(), service_key.clone().into()); - let create_account_op = Operation::new_create_account( - "test@example.com".to_string(), - &signing_key, - "service_id".to_string(), - &service_key, - ) - .unwrap(); - - let proof = prover - .process_operation(®ister_service_op) - .await - .unwrap(); + let mut ops_builder = OpsBuilder::new(); + let register_service_op = ops_builder.register_service_with_random_key("test_service").ex(); + let create_account_op = + ops_builder.create_account_with_random_key("test_account", "test_service").ex(); + + let proof = prover.process_operation(®ister_service_op).await.unwrap(); assert!(matches!(proof, Proof::Insert(_))); let proof = prover.process_operation(&create_account_op).await.unwrap(); assert!(matches!(proof, Proof::Insert(_))); let new_key = create_mock_signing_key(); - let pubkey = new_key.verifying_key(); let add_key_op = - Operation::new_add_key("test@example.com".to_string(), pubkey, &signing_key, 0).unwrap(); + ops_builder.add_key_verified_with_root("test_account", new_key.verifying_key()).ex(); let proof = prover.process_operation(&add_key_op).await.unwrap(); assert!(matches!(proof, Proof::Update(_))); // Revoke original key - let revoke_op = - Operation::new_revoke_key("test@example.com".to_string(), original_pubkey, &new_key, 1) - .unwrap(); + let revoke_op = ops_builder + .revoke_key( + "test_account", + create_account_op.get_public_key().cloned().unwrap(), + &new_key, + 1, + ) + .ex(); let proof = prover.process_operation(&revoke_op).await.unwrap(); assert!(matches!(proof, Proof::Update(_))); } @@ -105,45 +82,20 @@ async fn test_process_operation() { async fn test_execute_block_with_invalid_tx() { let prover = create_test_prover().await; - let signing_key_1 = create_mock_signing_key(); - let signing_key_2 = create_mock_signing_key(); - let signing_key_3 = create_mock_signing_key(); - let service_key = create_mock_signing_key(); + let mut ops_builder = OpsBuilder::new(); + + let new_key_1 = create_mock_signing_key(); let operations = vec![ - Operation::new_register_service("service_id".to_string(), service_key.clone().into()), - Operation::new_create_account( - "user1@example.com".to_string(), - &signing_key_1, - "service_id".to_string(), - &service_key, - ) - .unwrap(), - // add signing_key_2, so it will be index = 1 - Operation::new_add_key( - "user1@example.com".to_string(), - signing_key_2.verifying_key(), - &signing_key_1, - 0, - ) - .unwrap(), - // try revoking signing_key_2 - Operation::new_revoke_key( - "user1@example.com".to_string(), - signing_key_2.verifying_key(), - &signing_key_1, - 0, - ) - .unwrap(), + ops_builder.register_service_with_random_key("service_id").ex(), + ops_builder.create_account_with_random_key("account_id", "service_id").ex(), + // add new key, so it will be index = 1 + ops_builder.add_key_verified_with_root("account_id", new_key_1.verifying_key()).ex(), + // revoke new key again + ops_builder.revoke_key_verified_with_root("account_id", new_key_1.verifying_key()).ex(), // and adding in same block. // both of these operations are valid individually, but when processed together it will fail. - Operation::new_add_key( - "user1@example.com".to_string(), - signing_key_3.verifying_key(), - &signing_key_2, - 1, - ) - .unwrap(), + ops_builder.add_random_key("account_id", &new_key_1, 1).op(), ]; let proofs = prover.execute_block(operations).await.unwrap(); diff --git a/crates/node_types/prover/src/webserver.rs b/crates/node_types/prover/src/webserver.rs index 168d9c8b..325d177d 100644 --- a/crates/node_types/prover/src/webserver.rs +++ b/crates/node_types/prover/src/webserver.rs @@ -13,9 +13,7 @@ use indexed_merkle_tree::{ }; use jmt::proof::SparseMerkleProof; use prism_common::{ - hashchain::Hashchain, - operation::Operation, - tree::{HashchainResponse, Hasher}, + hashchain::Hashchain, hasher::Hasher, operation::Operation, tree::HashchainResponse, }; use serde::{Deserialize, Serialize}; use std::{self, sync::Arc}; diff --git a/crates/storage/src/database.rs b/crates/storage/src/database.rs index 51bf0108..85f2e6b1 100644 --- a/crates/storage/src/database.rs +++ b/crates/storage/src/database.rs @@ -1,7 +1,7 @@ use anyhow::Result; use auto_impl::auto_impl; use jmt::storage::{TreeReader, TreeWriter}; -use prism_common::tree::Digest; +use prism_common::digest::Digest; use prism_errors::{DatabaseError, PrismError}; #[auto_impl(&, Box, Arc)] diff --git a/crates/storage/src/inmemory.rs b/crates/storage/src/inmemory.rs index 4990e521..bc7a2e00 100644 --- a/crates/storage/src/inmemory.rs +++ b/crates/storage/src/inmemory.rs @@ -3,7 +3,7 @@ use jmt::{ storage::{LeafNode, Node, NodeBatch, NodeKey, TreeReader, TreeWriter}, KeyHash, OwnedValue, Version, }; -use prism_common::tree::Digest; +use prism_common::digest::Digest; use prism_errors::DatabaseError; use std::{ collections::HashMap, diff --git a/crates/storage/src/redis.rs b/crates/storage/src/redis.rs index 910bdff4..1ad6ed1a 100644 --- a/crates/storage/src/redis.rs +++ b/crates/storage/src/redis.rs @@ -4,6 +4,7 @@ use jmt::{ KeyHash, OwnedValue, Version, }; use mockall::predicate::*; +use prism_common::digest::Digest; use redis::{Client, Commands, Connection}; use serde::{Deserialize, Serialize}; use std::{ @@ -14,7 +15,6 @@ use std::{ time::Duration, }; -use prism_common::tree::Digest; use prism_errors::DatabaseError; use crate::database::{convert_to_connection_error, Database}; diff --git a/crates/storage/src/rocksdb.rs b/crates/storage/src/rocksdb.rs index 5dba41d5..ac426d29 100644 --- a/crates/storage/src/rocksdb.rs +++ b/crates/storage/src/rocksdb.rs @@ -4,7 +4,7 @@ use jmt::{ storage::{LeafNode, Node, NodeBatch, NodeKey, TreeReader, TreeWriter}, KeyHash, OwnedValue, Version, }; -use prism_common::tree::Digest; +use prism_common::digest::Digest; use prism_errors::DatabaseError; use rocksdb::{DBWithThreadMode, MultiThreaded, Options, DB}; @@ -109,7 +109,6 @@ impl TreeReader for RocksDBConnection { #[cfg(test)] mod tests { use super::*; - use prism_common::tree::Digest; use tempfile::TempDir; #[test] diff --git a/crates/tests/Cargo.toml b/crates/tests/Cargo.toml index 1961c882..4d88f506 100644 --- a/crates/tests/Cargo.toml +++ b/crates/tests/Cargo.toml @@ -17,6 +17,7 @@ mock_prover = [] log = { workspace = true } pretty_env_logger = { workspace = true } anyhow = { workspace = true } +jmt = { workspace = true } keystore-rs = { workspace = true } prism-common = { workspace = true, features = ["test_utils"] } prism-storage = { workspace = true } diff --git a/crates/tests/src/lib.rs b/crates/tests/src/lib.rs index d9ab4660..7cf13f76 100644 --- a/crates/tests/src/lib.rs +++ b/crates/tests/src/lib.rs @@ -3,11 +3,15 @@ #[macro_use] extern crate log; -use anyhow::Result; +use anyhow::{bail, Result}; +use jmt::KeyHash; use keystore_rs::create_signing_key; use prism_common::{ + digest::Digest, + hasher::Hasher, operation::{Operation, ServiceChallenge}, test_utils::create_mock_signing_key, + tree::{HashchainResponse::*, SnarkableTree}, }; use prism_da::{ celestia::{CelestiaConfig, CelestiaConnection}, @@ -28,10 +32,16 @@ fn create_random_user(id: &str, state: &mut TestTreeState, service: &Service) -> } fn add_key(id: &str, state: &mut TestTreeState) -> Result { - let signing_key = state - .signing_keys - .get(id) - .ok_or_else(|| anyhow::anyhow!("Signing key not found for account {}", id))?; + let hashed_id = Digest::hash(id); + let key_hash = KeyHash::with::(hashed_id); + + let Found(hc, _) = state.tree.get(key_hash)? else { + bail!("Hashchain not found for account {}", id); + }; + + let Some(signing_key) = state.signing_keys.get(id) else { + bail!("Signing key not found for account {}", id); + }; let new_key = create_mock_signing_key(); let new_public_key = new_key.verifying_key(); @@ -39,6 +49,7 @@ fn add_key(id: &str, state: &mut TestTreeState) -> Result { let op = Operation::new_add_key( id.to_string(), new_public_key, + hc.last_hash(), signing_key, 0, // Assuming this is the key index, you might need to adjust this )?; diff --git a/crates/zk/sp1/src/main.rs b/crates/zk/sp1/src/main.rs index c12f82b6..5669ef0b 100644 --- a/crates/zk/sp1/src/main.rs +++ b/crates/zk/sp1/src/main.rs @@ -1,7 +1,10 @@ #![no_main] sp1_zkvm::entrypoint!(main); -use prism_common::tree::{Batch, Digest, Proof}; +use prism_common::{ + digest::Digest, + tree::{Batch, Proof}, +}; pub fn main() { let batch = sp1_zkvm::io::read::(); diff --git a/elf/riscv32im-succinct-zkvm-elf b/elf/riscv32im-succinct-zkvm-elf index be80f892..a3bff7e5 100755 Binary files a/elf/riscv32im-succinct-zkvm-elf and b/elf/riscv32im-succinct-zkvm-elf differ diff --git a/rustfmt.toml b/rustfmt.toml index a811a490..7ad6dc85 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,2 +1,4 @@ merge_imports = true imports_granularity = "Crate" +max_width = 100 +chain_width = 100