Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci: add packets verification to Github Actions #5335

Merged
merged 1 commit into from
Mar 18, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions .github/workflows/verify-packets.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: Verify Packets

on:
push:
branches:
- master
pull_request:
branches:
- master
paths:
- .github/workflows/verify-packets.yml
Comment on lines +4 to +11
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the pipeline will be triggered when

  • pushing to master
  • a PR is based on master and modifies .github/workflows/verify-packets.yml

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this sounds good, no need to poke this on every CI run.


concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

jobs:
gossip:
timeout-minutes: 30
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Install required packages
run: |
sudo apt update
sudo apt install -y \
libclang-dev \
libprotobuf-dev \
libssl-dev \
libudev-dev \
pkg-config \
zlib1g-dev \
llvm \
clang \
cmake \
make \
protobuf-compiler \
git-lfs

- name: Run packet verify
run: |
./ci/test-verify-packets-gossip.sh
46 changes: 44 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 13 additions & 0 deletions ci/test-verify-packets-gossip.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/usr/bin/env bash

set -e
here=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)

if ! git lfs --version &>/dev/null; then
echo "Git LFS is not installed. Please install Git LFS to proceed."
exit 1
fi

rm -rf "$here"/solana-packets
git clone https://github.com/anza-xyz/solana-packets.git "$here"/solana-packets
GOSSIP_WIRE_FORMAT_PACKETS="$here/solana-packets/GOSSIP_PACKETS" cargo test --package solana-gossip -- wire_format_tests::tests::test_gossip_wire_format --exact --show-output
2 changes: 2 additions & 0 deletions gossip/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,14 @@ static_assertions = { workspace = true }
thiserror = { workspace = true }

[dev-dependencies]
anyhow = { workspace = true }
bs58 = { workspace = true }
criterion = { workspace = true }
num_cpus = { workspace = true }
rand0-7 = { workspace = true }
rand_chacha0-2 = { workspace = true }
serial_test = { workspace = true }
solana-net-utils = { workspace = true, features = ["dev-context-only-utils"] }
solana-perf = { workspace = true, features = ["dev-context-only-utils"] }
solana-runtime = { workspace = true, features = ["dev-context-only-utils"] }
solana-sdk = { workspace = true }
Expand Down
2 changes: 2 additions & 0 deletions gossip/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,5 @@ extern crate solana_frozen_abi_macro;

#[macro_use]
extern crate solana_metrics;

mod wire_format_tests;
65 changes: 65 additions & 0 deletions gossip/src/wire_format_tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#![allow(clippy::arithmetic_side_effects)]

#[cfg(test)]
mod tests {

use {
crate::protocol::Protocol,
serde::Serialize,
solana_net_utils::tooling_for_tests::{hexdump, validate_packet_format},
solana_sanitize::Sanitize,
std::path::PathBuf,
};

fn parse_gossip(bytes: &[u8]) -> anyhow::Result<Protocol> {
let pkt: Protocol = solana_perf::packet::deserialize_from_with_limit(bytes)?;
pkt.sanitize()?;
Ok(pkt)
}

fn serialize<T: Serialize>(pkt: T) -> Vec<u8> {
bincode::serialize(&pkt).unwrap()
}

fn find_differences(a: &[u8], b: &[u8]) -> Option<usize> {
if a.len() != b.len() {
return Some(a.len().min(b.len()));
}
for (idx, (e1, e2)) in a.iter().zip(b).enumerate() {
if e1 != e2 {
return Some(idx);
}
}
None
}

/// Test the ability of gossip parsers to understand and re-serialize a corpus of
/// packets captured from mainnet.
///
/// This test requires external files and is not run by default.
/// Export the "GOSSIP_WIRE_FORMAT_PACKETS" variable to run this test
#[test]
fn test_gossip_wire_format() {
solana_logger::setup();
let path_base = match std::env::var_os("GOSSIP_WIRE_FORMAT_PACKETS") {
Some(p) => PathBuf::from(p),
None => {
eprintln!("Test requires GOSSIP_WIRE_FORMAT_PACKETS env variable, skipping!");
return;
}
};
for entry in
std::fs::read_dir(path_base).expect("Expecting env var to point to a directory")
{
let entry = entry.expect("Expecting a readable file");
validate_packet_format(
&entry.path(),
parse_gossip,
serialize,
hexdump,
find_differences,
)
.unwrap();
}
}
}
3 changes: 3 additions & 0 deletions ledger/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ license = { workspace = true }
edition = { workspace = true }

[dependencies]
anyhow = { workspace = true }
assert_matches = { workspace = true }
bincode = { workspace = true }
bitflags = { workspace = true, features = ["serde"] }
Expand Down Expand Up @@ -55,6 +56,7 @@ solana-frozen-abi-macro = { workspace = true, optional = true, features = [
] }
solana-measure = { workspace = true }
solana-metrics = { workspace = true }
solana-net-utils = { workspace = true }
solana-perf = { workspace = true }
solana-program-runtime = { workspace = true, features = ["metrics"] }
solana-pubkey = { workspace = true }
Expand Down Expand Up @@ -96,6 +98,7 @@ bs58 = { workspace = true }
criterion = { workspace = true }
solana-account-decoder = { workspace = true }
solana-logger = { workspace = true }
solana-net-utils = { workspace = true, features = ["dev-context-only-utils"] }
solana-runtime = { workspace = true, features = ["dev-context-only-utils"] }
spl-pod = { workspace = true }
test-case = { workspace = true }
Expand Down
2 changes: 2 additions & 0 deletions ledger/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,3 +51,5 @@ extern crate solana_frozen_abi_macro;
pub mod macro_reexports {
pub use solana_accounts_db::hardened_unpack::MAX_GENESIS_ARCHIVE_UNPACKED_SIZE;
}

mod wire_format_tests;
93 changes: 93 additions & 0 deletions ledger/src/wire_format_tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
#![allow(clippy::arithmetic_side_effects)]

#[cfg(test)]
mod tests {
use {
crate::shred::Shred,
solana_net_utils::tooling_for_tests::{hexdump, validate_packet_format},
std::path::PathBuf,
};

fn parse_turbine(bytes: &[u8]) -> anyhow::Result<Shred> {
let shred = Shred::new_from_serialized_shred(bytes.to_owned())
.map_err(|_e| anyhow::anyhow!("Can not deserialize"))?;
shred
.sanitize()
.map_err(|_e| anyhow::anyhow!("Failed sanitize"))?;
Ok(shred)
}

fn serialize(pkt: Shred) -> Vec<u8> {
pkt.payload().to_vec()
}

fn find_differences(a: &[u8], b: &[u8]) -> Option<usize> {
if a.len() != b.len() {
return Some(a.len());
}
for (idx, (e1, e2)) in a.iter().zip(b).enumerate() {
if e1 != e2 {
return Some(idx);
}
}
None
}

fn show_packet(bytes: &[u8]) -> anyhow::Result<()> {
let shred = parse_turbine(bytes)?;
let merkle_root = shred.merkle_root();
let chained_merkle_root = shred.chained_merkle_root();
let rtx_sign = shred.retransmitter_signature();

println!("=== {} bytes ===", bytes.len());
println!(
"Shred ID={ID:?} ErasureSetID={ESI:?}",
ID = shred.id(),
ESI = shred.erasure_set()
);
println!(
"Shred merkle root {:X?}, chained root {:X?}, rtx_sign {:X?}",
merkle_root.map(|v| v.as_ref().to_vec()),
chained_merkle_root.map(|v| v.as_ref().to_vec()),
rtx_sign.map(|v| v.as_ref().to_vec())
);
println!(
"Data shreds: {:?}, Coding shreds: {:?}",
shred.num_data_shreds(),
shred.num_coding_shreds()
);
hexdump(bytes)?;
println!("===");
Ok(())
}

/// Test the ability of turbine parser to understand and re-serialize a corpus of
/// packets captured from mainnet.
///
/// This test requires external files and is not run by default.
/// Export the "TURBINE_WIRE_FORMAT_PACKETS" env variable to run this test.
#[test]
fn test_turbine_wire_format() {
solana_logger::setup();
let path_base = match std::env::var_os("TURBINE_WIRE_FORMAT_PACKETS") {
Some(p) => PathBuf::from(p),
None => {
eprintln!("Test requires TURBINE_WIRE_FORMAT_PACKETS env variable, skipping!");
return;
}
};
for entry in
std::fs::read_dir(path_base).expect("Expecting env var to point to a directory")
{
let entry = entry.expect("Expecting a readable file");
validate_packet_format(
&entry.path(),
parse_turbine,
serialize,
show_packet,
find_differences,
)
.unwrap();
}
}
}
Loading
Loading