Skip to content

Commit 6b33525

Browse files
authored
Merge pull request #442 from input-output-hk/lowhung/381-header-nonces
feat: add header and nonces modules for snapshot bootstrapper
2 parents 30d1e20 + 415bb4d commit 6b33525

File tree

10 files changed

+362
-0
lines changed

10 files changed

+362
-0
lines changed

Cargo.lock

Lines changed: 2 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

common/src/types.rs

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -735,6 +735,31 @@ pub enum Point {
735735
},
736736
}
737737

738+
impl Point {
739+
pub fn slot(&self) -> Slot {
740+
match self {
741+
Self::Origin => 0,
742+
Self::Specific { slot, .. } => *slot,
743+
}
744+
}
745+
746+
pub fn hash(&self) -> Option<&BlockHash> {
747+
match self {
748+
Self::Origin => None,
749+
Self::Specific { hash, .. } => Some(hash),
750+
}
751+
}
752+
}
753+
754+
impl Display for Point {
755+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
756+
match self {
757+
Self::Origin => write!(f, "origin"),
758+
Self::Specific { hash, slot } => write!(f, "{}@{}", hash, slot),
759+
}
760+
}
761+
}
762+
738763
/// Amount of Ada, in Lovelace
739764
pub type Lovelace = u64;
740765
pub type LovelaceDelta = i64;

modules/snapshot_bootstrapper/Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@ acropolis_common = { path = "../../common" }
1414
caryatid_sdk = { workspace = true }
1515

1616
anyhow = { workspace = true }
17+
pallas-primitives = { workspace = true }
1718
config = { workspace = true }
19+
minicbor = { version = "0.25.1", features = ["std", "half", "derive"] }
1820
tokio = { workspace = true }
1921
tracing = { workspace = true }
2022
serde = { workspace = true, features = ["rc"] }
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
[
2+
"134524816.82dbc35b32bcbbad4e14cda9b1bfb1ceeee4d2fb8d2f299caf2008cfe072bd54",
3+
"134956789.6558deef007ba372a414466e49214368c17c1f8428093193fc187d1c4587053c"
4+
]
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
{
2+
"at": "134956789.6558deef007ba372a414466e49214368c17c1f8428093193fc187d1c4587053c",
3+
"active": "0b9e320e63bf995b81287ce7a624b6735d98b083cc1a0e2ae8b08b680c79c983",
4+
"candidate": "6cc4dafecbe0d593ca0dee64518542f5faa741538791ac7fc2d5008f32d5c4d5",
5+
"evolving": "f5589f01dd0efd0add0c58e8b27dc73ba3fcd662d9026b3fedbf06c648adb313",
6+
"tail": "29011cc1320d03b3da0121236dc66e6bc391feef4bb1d506a7fb20e769d6a494"
7+
}

modules/snapshot_bootstrapper/src/bootstrapper.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
mod configuration;
22
mod downloader;
3+
mod header;
4+
mod nonces;
35
mod progress_reader;
46
mod publisher;
57

Lines changed: 173 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,173 @@
1+
#![allow(dead_code, unused)]
2+
use acropolis_common::hash::Hash;
3+
use acropolis_common::Point;
4+
use pallas_primitives::babbage::MintedHeader;
5+
use pallas_primitives::conway::Header as ConwayHeader;
6+
use std::fs;
7+
use std::path::{Path, PathBuf};
8+
use thiserror::Error;
9+
10+
#[derive(Debug, Error)]
11+
pub enum HeaderContextError {
12+
#[error("Failed to read header file {0}: {1}")]
13+
ReadFile(PathBuf, std::io::Error),
14+
15+
#[error("Failed to decode header at slot {0}: {1}")]
16+
Decode(u64, String),
17+
18+
#[error("Origin point has no hash")]
19+
OriginPoint,
20+
21+
#[error("Failed to convert hash: {0}")]
22+
HashConversion(String),
23+
}
24+
25+
#[derive(Debug)]
26+
pub struct HeaderContext {
27+
pub point: Point,
28+
pub block_number: u64,
29+
}
30+
31+
impl HeaderContext {
32+
/// Generate the path for a header file.
33+
/// Returns an error if the point is Origin (has no hash).
34+
pub fn path(network_dir: &Path, point: &Point) -> Result<PathBuf, HeaderContextError> {
35+
let hash = point.hash().ok_or(HeaderContextError::OriginPoint)?;
36+
let filename = format!("header.{}.{}.cbor", point.slot(), hash);
37+
Ok(network_dir.join("headers").join(filename))
38+
}
39+
40+
/// Convert raw hash bytes to our Hash type.
41+
pub fn convert_hash(block_body_hash: &[u8]) -> Result<Hash<32>, HeaderContextError> {
42+
block_body_hash
43+
.try_into()
44+
.map_err(|_| HeaderContextError::HashConversion(format!("{:02x?}", block_body_hash)))
45+
}
46+
47+
/// Load and decode header from `headers/header.{slot}.{hash}.cbor`
48+
pub fn load(network_dir: &Path, point: &Point) -> Result<Self, HeaderContextError> {
49+
let path = Self::path(network_dir, point)?;
50+
let cbor = fs::read(&path).map_err(|e| HeaderContextError::ReadFile(path, e))?;
51+
52+
let minted: MintedHeader<'_> = minicbor::decode(&cbor)
53+
.map_err(|e| HeaderContextError::Decode(point.slot(), e.to_string()))?;
54+
let header = ConwayHeader::from(minted);
55+
Ok(Self {
56+
point: point.clone(),
57+
block_number: header.header_body.block_number,
58+
})
59+
}
60+
}
61+
62+
#[cfg(test)]
63+
mod header_tests {
64+
use super::*;
65+
use std::fs;
66+
use tempfile::TempDir;
67+
68+
fn specific_point(slot: u64, hash_str: &str) -> Point {
69+
Point::Specific {
70+
slot,
71+
hash: hash_str.parse().expect("valid hash"),
72+
}
73+
}
74+
75+
fn setup_headers_dir() -> TempDir {
76+
let temp_dir = TempDir::new().unwrap();
77+
fs::create_dir_all(temp_dir.path().join("headers")).unwrap();
78+
temp_dir
79+
}
80+
81+
const ZERO_HASH: &str = "0000000000000000000000000000000000000000000000000000000000000000";
82+
83+
#[test]
84+
fn path_fails_for_origin_point() {
85+
let result = HeaderContext::path(Path::new("/test"), &Point::Origin);
86+
87+
let err = result.unwrap_err();
88+
assert!(matches!(err, HeaderContextError::OriginPoint));
89+
assert_eq!(err.to_string(), "Origin point has no hash");
90+
}
91+
92+
#[test]
93+
fn path_succeeds_for_specific_point() {
94+
let point = specific_point(42, ZERO_HASH);
95+
96+
let path = HeaderContext::path(Path::new("/test"), &point).unwrap();
97+
98+
assert!(path.ends_with(format!("headers/header.42.{}.cbor", ZERO_HASH)));
99+
}
100+
101+
#[test]
102+
fn convert_hash_fails_for_wrong_length() {
103+
// Too short
104+
assert!(matches!(
105+
HeaderContext::convert_hash(&[0u8; 16]),
106+
Err(HeaderContextError::HashConversion(_))
107+
));
108+
109+
// Too long
110+
assert!(matches!(
111+
HeaderContext::convert_hash(&[0u8; 64]),
112+
Err(HeaderContextError::HashConversion(_))
113+
));
114+
}
115+
116+
#[test]
117+
fn convert_hash_succeeds_for_32_bytes() {
118+
let bytes = [0xab; 32];
119+
assert!(HeaderContext::convert_hash(&bytes).is_ok());
120+
}
121+
122+
#[test]
123+
fn hash_conversion_error_includes_hex_representation() {
124+
let err = HeaderContext::convert_hash(&[0xde, 0xad, 0xbe, 0xef]).unwrap_err();
125+
let msg = err.to_string().to_lowercase();
126+
127+
assert!(msg.contains("de") && msg.contains("ad"));
128+
}
129+
130+
#[test]
131+
fn load_fails_for_origin_point() {
132+
let temp_dir = setup_headers_dir();
133+
134+
let err = HeaderContext::load(temp_dir.path(), &Point::Origin).unwrap_err();
135+
136+
assert!(matches!(err, HeaderContextError::OriginPoint));
137+
}
138+
139+
#[test]
140+
fn load_fails_when_file_missing() {
141+
let temp_dir = setup_headers_dir();
142+
let point = specific_point(12345, ZERO_HASH);
143+
144+
let err = HeaderContext::load(temp_dir.path(), &point).unwrap_err();
145+
146+
assert!(matches!(err, HeaderContextError::ReadFile(_, _)));
147+
assert!(err.to_string().contains("header.12345"));
148+
}
149+
150+
#[test]
151+
fn load_fails_for_invalid_cbor() {
152+
let temp_dir = setup_headers_dir();
153+
let point = specific_point(12345, ZERO_HASH);
154+
let path = HeaderContext::path(temp_dir.path(), &point).unwrap();
155+
fs::write(&path, b"not valid cbor").unwrap();
156+
157+
let err = HeaderContext::load(temp_dir.path(), &point).unwrap_err();
158+
159+
assert!(matches!(err, HeaderContextError::Decode(12345, _)));
160+
}
161+
162+
#[test]
163+
fn load_fails_for_wrong_cbor_structure() {
164+
let temp_dir = setup_headers_dir();
165+
let point = specific_point(555, ZERO_HASH);
166+
let path = HeaderContext::path(temp_dir.path(), &point).unwrap();
167+
fs::write(&path, minicbor::to_vec(42u64).unwrap()).unwrap();
168+
169+
let err = HeaderContext::load(temp_dir.path(), &point).unwrap_err();
170+
171+
assert!(matches!(err, HeaderContextError::Decode(555, _)));
172+
}
173+
}
Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
#![allow(dead_code, unused)]
2+
use acropolis_common::protocol_params::{Nonce, Nonces};
3+
use acropolis_common::{BlockHash, Point};
4+
use serde::{Deserialize, Deserializer};
5+
use std::fs;
6+
use std::path::{Path, PathBuf};
7+
use thiserror::Error;
8+
9+
#[derive(Debug, Error)]
10+
pub enum NonceContextError {
11+
#[error("Failed to read {0}: {1}")]
12+
ReadFile(PathBuf, std::io::Error),
13+
14+
#[error("Failed to parse {0}: {1}")]
15+
Parse(PathBuf, serde_json::Error),
16+
}
17+
18+
fn deserialize_nonce<'de, D>(deserializer: D) -> Result<Nonce, D::Error>
19+
where
20+
D: Deserializer<'de>,
21+
{
22+
let hash: BlockHash = Deserialize::deserialize(deserializer)?;
23+
Ok(Nonce::from(hash))
24+
}
25+
26+
fn deserialize_point<'de, D>(deserializer: D) -> Result<Point, D::Error>
27+
where
28+
D: Deserializer<'de>,
29+
{
30+
let s = String::deserialize(deserializer)?;
31+
s.split_once('.')
32+
.and_then(|(slot_str, hash_str)| {
33+
Some(Point::Specific {
34+
slot: slot_str.parse().ok()?,
35+
hash: hash_str.parse().ok()?,
36+
})
37+
})
38+
.ok_or_else(|| serde::de::Error::custom("invalid point format"))
39+
}
40+
41+
#[derive(Debug, Deserialize)]
42+
pub struct NonceContext {
43+
#[serde(deserialize_with = "deserialize_point")]
44+
pub at: Point,
45+
#[serde(deserialize_with = "deserialize_nonce")]
46+
pub active: Nonce,
47+
#[serde(deserialize_with = "deserialize_nonce")]
48+
pub candidate: Nonce,
49+
#[serde(deserialize_with = "deserialize_nonce")]
50+
pub evolving: Nonce,
51+
#[serde(deserialize_with = "deserialize_nonce")]
52+
pub tail: Nonce,
53+
}
54+
55+
impl NonceContext {
56+
pub fn path(network_dir: &Path) -> PathBuf {
57+
network_dir.join("nonces.json")
58+
}
59+
60+
pub fn load(network_dir: &Path) -> Result<Self, NonceContextError> {
61+
let path = Self::path(network_dir);
62+
let content =
63+
fs::read_to_string(&path).map_err(|e| NonceContextError::ReadFile(path.clone(), e))?;
64+
serde_json::from_str(&content).map_err(|e| NonceContextError::Parse(path, e))
65+
}
66+
67+
pub fn into_nonces(self, epoch: u64, lab_hash: BlockHash) -> Nonces {
68+
Nonces {
69+
epoch,
70+
active: self.active,
71+
evolving: self.evolving,
72+
candidate: self.candidate,
73+
lab: Nonce::from(lab_hash),
74+
prev_lab: self.tail,
75+
}
76+
}
77+
}
78+
79+
#[cfg(test)]
80+
mod nonces_tests {
81+
use super::*;
82+
use std::fs;
83+
use tempfile::TempDir;
84+
85+
const ZERO_HASH: &str = "0000000000000000000000000000000000000000000000000000000000000000";
86+
87+
fn valid_json_with_point(point: &str) -> String {
88+
format!(
89+
r#"{{
90+
"at": "{point}",
91+
"active": "{ZERO_HASH}",
92+
"candidate": "{ZERO_HASH}",
93+
"evolving": "{ZERO_HASH}",
94+
"tail": "{ZERO_HASH}"
95+
}}"#
96+
)
97+
}
98+
99+
#[test]
100+
fn load_fails_when_file_missing() {
101+
let temp_dir = TempDir::new().unwrap();
102+
103+
let err = NonceContext::load(temp_dir.path()).unwrap_err();
104+
105+
assert!(matches!(err, NonceContextError::ReadFile(_, _)));
106+
assert!(err.to_string().contains("nonces.json"));
107+
}
108+
109+
#[test]
110+
fn load_fails_for_invalid_json() {
111+
let temp_dir = TempDir::new().unwrap();
112+
fs::write(NonceContext::path(temp_dir.path()), "not valid json {{{").unwrap();
113+
114+
let err = NonceContext::load(temp_dir.path()).unwrap_err();
115+
116+
assert!(matches!(err, NonceContextError::Parse(_, _)));
117+
}
118+
119+
#[test]
120+
fn load_fails_when_missing_required_fields() {
121+
let temp_dir = TempDir::new().unwrap();
122+
fs::write(NonceContext::path(temp_dir.path()), r#"{"at": "123.abc"}"#).unwrap();
123+
124+
let err = NonceContext::load(temp_dir.path()).unwrap_err();
125+
126+
assert!(matches!(err, NonceContextError::Parse(_, _)));
127+
}
128+
129+
#[test]
130+
fn load_fails_for_invalid_point_format() {
131+
let temp_dir = TempDir::new().unwrap();
132+
133+
let bad_case = format!("not_a_number.{ZERO_HASH}").clone();
134+
let cases = ["no_dot_separator", bad_case.as_str()];
135+
136+
for invalid_point in cases {
137+
fs::write(
138+
NonceContext::path(temp_dir.path()),
139+
valid_json_with_point(invalid_point),
140+
)
141+
.unwrap();
142+
143+
let err = NonceContext::load(temp_dir.path()).unwrap_err();
144+
assert!(matches!(err, NonceContextError::Parse(_, _)));
145+
}
146+
}
147+
}

0 commit comments

Comments
 (0)