Skip to content

Commit

Permalink
Handle DA errors, progress towards allowing retry (#20)
Browse files Browse the repository at this point in the history
* feat: start work on separating out celestia and zkp client calls
* feat: move to recursive worker model to make more clear state machine progression for workers
* feat: handle errors in job progress, add some debug helpers
* feat: add failure retry possibility
* docs: add some rust API docs
* cargo machete
* justfile helper improvements
* feature gate some parts of common and make mods for them
  • Loading branch information
nuke-web3 authored Jan 31, 2025
1 parent d6f6cdb commit f9b66d4
Show file tree
Hide file tree
Showing 23 changed files with 826 additions and 4,162 deletions.
328 changes: 244 additions & 84 deletions Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ tonic-build = { version = "0.11.0", default-features = false }
env_logger = "0.11"
log = "0.4"
base64 = "0.22"
jsonrpsee = "0.24"
alloy = { version = "0.9", default-features = false }

[patch.crates-io]
Expand Down
9 changes: 8 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,15 +97,22 @@ Set the corret info in your `.env` file of choice.
### Launch the Eq Service

```sh
# Bring required vars into scope, or replace $<variable> below
source .env

# Fetching the Keccak inclusion proof for a specific Celestia commitment, namespace, and height
grpcurl -import-path $EQ_PROTO_DIR -proto eqservice.proto \
-d '{height": <block height (integer)>", "namespace": "<your_namespace_hex>", commitment": "<your_commitment_hex>"}'
-plaintext $EQ_SOCKET eqs.Inclusion.GetKeccakInclusion

# Working example using Celestia's mainnet network
# Working examples using Celestia's mainnet network
grpcurl -import-path $EQ_PROTO_DIR -proto eqservice.proto \
-d '{"height": 4214864, "namespace": "3q2+796tvu8=", "commitment":"YcARQRj9KE/7sSXd4090FAONKkPz9ajYKIZq8liv3A0="}' \
-plaintext $EQ_SOCKET eqs.Inclusion.GetKeccakInclusion

grpcurl -import-path $EQ_PROTO_DIR -proto eqservice.proto \
-d '{"height": 4409088, "namespace": "XSUTEfJbE6VJ4A==", "commitment":"DYoAZpU7FrviV7Ui/AjQv0BpxCwexPWaOW/hQVpEl/s="}' \
-plaintext $EQ_SOCKET eqs.Inclusion.GetKeccakInclusion
```

## Development
Expand Down
2 changes: 2 additions & 0 deletions blob-tool/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![doc = include_str!("../README.md")]

use base64::Engine;
use celestia_rpc::{BlobClient, Client, HeaderClient};
use celestia_types::blob::Commitment;
Expand Down
15 changes: 10 additions & 5 deletions common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,18 @@ edition = "2021"
celestia-types = {workspace = true}
nmt-rs = {workspace = true}
serde = {workspace = true}
tendermint = {workspace = true}
tendermint-proto = {workspace = true}
tendermint-proto = {workspace = true, optional = true}
sha3 = {workspace = true}
thiserror = {workspace = true}
alloy = {workspace = true, features = ["sol-types"]}
prost = {workspace = true}
tonic = {workspace = true, features = ["codegen", "prost"] }
prost = {workspace = true, optional = true}
tonic = {workspace = true, features = ["codegen", "prost"], optional = true}

[build-dependencies]
tonic-build = {workspace = true, features=["prost"] }
tonic-build = {workspace = true, features=["prost"], optional = true}

[features]
default = ["grpc", "utils"]

utils = ["tendermint-proto"]
grpc = ["tonic", "prost", "tonic-build"]
21 changes: 13 additions & 8 deletions common/build.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
fn main() -> Result<(), Box<dyn std::error::Error>> {
let code_gen_path = std::path::Path::new("src/generated");
if !code_gen_path.exists() {
std::fs::create_dir_all(code_gen_path)?;
#[cfg(feature = "grpc")]
{
let code_gen_path = std::path::Path::new("src/generated");
if !code_gen_path.exists() {
std::fs::create_dir_all(code_gen_path)?;
}
tonic_build::configure()
.build_server(true)
.build_client(true)
.out_dir(code_gen_path)
.compile(&["proto/eqservice.proto"], &["proto/"])?;
Ok(())
}
tonic_build::configure()
.build_server(true)
.build_client(true)
.out_dir(code_gen_path)
.compile(&["proto/eqservice.proto"], &["proto/"])?;
#[cfg(not(feature = "grpc"))]
Ok(())
}
2 changes: 1 addition & 1 deletion common/proto/eqservice.proto
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,4 @@ message GetKeccakInclusionResponse {
string error_message = 4; // Used when status is FAILED
string status_message = 5; // Used when status is WAITING, this is the status message of the prover network
}
}
}
3 changes: 2 additions & 1 deletion common/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize};
use thiserror::Error;

#[derive(Error, Debug)]
#[derive(Clone, Error, Debug, Serialize, Deserialize)]
pub enum InclusionServiceError {
#[error("Blob index not found")]
MissingBlobIndex,
Expand Down
196 changes: 19 additions & 177 deletions common/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,28 +1,24 @@
use celestia_types::{
blob::Blob,
nmt::{Namespace, NamespaceProof, NamespacedHashExt},
ExtendedHeader,
};
use nmt_rs::{
simple_merkle::{
db::MemDb,
proof::Proof,
tree::{MerkleHash, MerkleTree},
},
NamespacedHash, TmSha2Hasher,
};
use serde::{Deserialize, Serialize};
use sha3::{Digest, Keccak256};
use std::cmp::max;
use tendermint::hash::Hash as TmHash;
use tendermint_proto::{
v0_37::{types::BlockId as RawBlockId, version::Consensus as RawConsensusVersion},
Protobuf,
};
use alloy::sol;
use celestia_types::nmt::{Namespace, NamespaceProof};
use nmt_rs::{simple_merkle::proof::Proof, NamespacedHash, TmSha2Hasher};
use serde::{Deserialize, Serialize};

#[cfg(feature = "utils")]
mod error;
#[cfg(feature = "utils")]
pub use error::InclusionServiceError;

#[cfg(feature = "utils")]
pub mod utils;
#[cfg(feature = "utils")]
pub use utils::*;

#[cfg(feature = "grpc")]
/// gRPC generated bindings
pub mod eqs {
include!("generated/eqs.rs");
}

/*
The types of proofs we expect to support:
1. KeccakInclusionToDataRootProof
Expand All @@ -43,162 +39,8 @@ pub struct KeccakInclusionToDataRootProofInput {
pub keccak_hash: [u8; 32],
}

/*#[derive(Serialize, Deserialize)]
pub struct KeccakInclusionToDataRootProofOutput {
pub keccak_hash: [u8; 32],
pub data_root: Vec<u8>,
}*/

pub mod eqs {
include!("generated/eqs.rs");
}

/// Expecting bytes:
/// (keccak_hash: [u8; 32], pub data_root: [u8; 32])
pub type KeccakInclusionToDataRootProofOutput = sol! {
tuple(bytes32, bytes32)
};

pub fn create_inclusion_proof_input(
blob: &Blob,
header: &ExtendedHeader,
nmt_multiproofs: Vec<NamespaceProof>,
) -> Result<KeccakInclusionToDataRootProofInput, InclusionServiceError> {
let eds_row_roots = header.dah.row_roots();
let eds_column_roots = header.dah.column_roots();

// Compute these values needed for proving inclusion
let eds_size: u64 = eds_row_roots.len().try_into().unwrap();
let ods_size = eds_size / 2;

let blob_index = blob.index.ok_or(InclusionServiceError::MissingBlobIndex)?;
let blob_size: u64 = max(
1,
blob.to_shares()
.map_err(|e| InclusionServiceError::ShareConversionError(e.to_string()))?
.len() as u64,
);
let first_row_index: u64 = blob_index.div_ceil(eds_size) - 1;
let ods_index = blob_index - (first_row_index * ods_size);

let last_row_index: u64 = (ods_index + blob_size).div_ceil(ods_size) - 1;

let hasher = TmSha2Hasher {};
let mut row_root_tree: MerkleTree<MemDb<[u8; 32]>, TmSha2Hasher> =
MerkleTree::with_hasher(hasher);

let leaves = eds_row_roots
.iter()
.chain(eds_column_roots.iter())
.map(|root| root.to_array())
.collect::<Vec<[u8; 90]>>();

for root in &leaves {
row_root_tree.push_raw_leaf(root);
}

// assert that the row root tree equals the data hash
assert_eq!(
row_root_tree.root(),
header.header.data_hash.unwrap().as_bytes()
);
// Get range proof of the row roots spanned by the blob
// +1 is so we include the last row root
let row_root_multiproof =
row_root_tree.build_range_proof(first_row_index as usize..(last_row_index + 1) as usize);
// Sanity check, verify the row root range proof
let hasher = TmSha2Hasher {};
let leaves_hashed = leaves
.iter()
.map(|leaf| hasher.hash_leaf(leaf))
.collect::<Vec<[u8; 32]>>();
row_root_multiproof
.verify_range(
header
.header
.data_hash
.unwrap()
.as_bytes()
.try_into()
.unwrap(),
&leaves_hashed[first_row_index as usize..(last_row_index + 1) as usize],
)
.map_err(|_| InclusionServiceError::RowRootVerificationFailed)?;

let mut hasher = Keccak256::new();
hasher.update(&blob.data);
let hash: [u8; 32] = hasher
.finalize()
.try_into()
.map_err(|_| InclusionServiceError::KeccakHashConversion)?;

Ok(KeccakInclusionToDataRootProofInput {
blob_data: blob.data.clone(),
blob_index: blob.index.unwrap(),
blob_namespace: blob.namespace,
keccak_hash: hash,
nmt_multiproofs,
row_root_multiproof,
row_roots: eds_row_roots[first_row_index as usize..=last_row_index as usize].to_vec(),
data_root: header.header.data_hash.unwrap().encode_vec(),
})
}

pub fn create_header_field_tree(
header: &ExtendedHeader,
) -> (
MerkleTree<MemDb<[u8; 32]>, TmSha2Hasher>,
Proof<TmSha2Hasher>,
) {
let hasher = TmSha2Hasher {};
let mut header_field_tree: MerkleTree<MemDb<[u8; 32]>, TmSha2Hasher> =
MerkleTree::with_hasher(hasher);

let field_bytes = vec![
Protobuf::<RawConsensusVersion>::encode_vec(header.header.version),
header.header.chain_id.clone().encode_vec(),
header.header.height.encode_vec(),
header.header.time.encode_vec(),
Protobuf::<RawBlockId>::encode_vec(header.header.last_block_id.unwrap_or_default()),
header
.header
.last_commit_hash
.unwrap_or_default()
.encode_vec(),
header.header.data_hash.unwrap_or_default().encode_vec(),
header.header.validators_hash.encode_vec(),
header.header.next_validators_hash.encode_vec(),
header.header.consensus_hash.encode_vec(),
header.header.app_hash.clone().encode_vec(),
header
.header
.last_results_hash
.unwrap_or_default()
.encode_vec(),
header.header.evidence_hash.unwrap_or_default().encode_vec(),
header.header.proposer_address.encode_vec(),
];

for leaf in field_bytes {
header_field_tree.push_raw_leaf(&leaf);
}

let (data_hash_bytes_from_tree, data_hash_proof) = header_field_tree.get_index_with_proof(6);

// Verify the data hash
let data_hash_from_tree = TmHash::decode_vec(&data_hash_bytes_from_tree).unwrap();
assert_eq!(
data_hash_from_tree.as_bytes(),
header.header.data_hash.unwrap().as_bytes()
);
assert_eq!(header.hash().as_ref(), header_field_tree.root());

// Verify the proof
let hasher = TmSha2Hasher {};
data_hash_proof
.verify_range(
&header_field_tree.root(),
&[hasher.hash_leaf(&data_hash_bytes_from_tree)],
)
.unwrap();

(header_field_tree, data_hash_proof)
}
Loading

0 comments on commit f9b66d4

Please sign in to comment.