Skip to content

Commit d882e39

Browse files
authored
Merge pull request #26 from alephium/verify-token-metadata
Verify token metadata
2 parents bad9e26 + c2621b1 commit d882e39

27 files changed

+1878
-221
lines changed

.github/workflows/misspellings_checks.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,4 +27,4 @@ jobs:
2727
builtin: clear,rare
2828
check_filenames: true
2929
ignore_words_list: crate,Crate,alph,ALPH
30-
exclude_file: js/package-lock.json
30+
exclude_file: js/package-lock.json,js/merkle-tree/token.json

Makefile

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,9 @@ check:
2828
cargo fmt --all -- --check && \
2929
echo 'Cargo clippy' && \
3030
cargo +nightly-2023-11-10 clippy --target=nanos && \
31-
cargo +nightly-2023-11-10 clippy --target=stax \
31+
cargo +nightly-2023-11-10 clippy --target=stax && \
32+
cargo install cargo-audit && cargo audit && \
33+
cargo install --locked cargo-deny && cargo +nightly-2023-11-10 deny check \
3234
"
3335

3436
_run-speculos:

app/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ name = "alephium"
33
version = "0.4.0"
44
authors = ["alephium devs"]
55
edition = "2021"
6+
license = "MIT"
67

78
[dependencies]
89
ledger_device_sdk = "=1.12.0"

app/deny.toml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
[bans]
2+
multiple-versions = "allow"
3+
4+
[licenses]
5+
allow = [ "MIT", "Apache-2.0", "Unicode-DFS-2016", "ISC", "BSD-3-Clause" ]

app/src/blake2b_hasher.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use crate::error_code::ErrorCode;
22
use ledger_secure_sdk_sys::*;
33

44
pub const BLAKE2B_HASH_SIZE: usize = 32;
5+
pub type Blake2bHash = [u8; BLAKE2B_HASH_SIZE];
56
pub struct Blake2bHasher(cx_blake2b_s);
67

78
// A wrapper around the Ledger SDK's blake2b implementation
@@ -12,7 +13,7 @@ impl Blake2bHasher {
1213
Self(v)
1314
}
1415

15-
pub fn hash(input: &[u8]) -> Result<[u8; BLAKE2B_HASH_SIZE], ErrorCode> {
16+
pub fn hash(input: &[u8]) -> Result<Blake2bHash, ErrorCode> {
1617
let mut hasher = Blake2bHasher::new();
1718
hasher.update(input)?;
1819
hasher.finalize()
@@ -37,7 +38,7 @@ impl Blake2bHasher {
3738
}
3839
}
3940

40-
pub fn finalize(&mut self) -> Result<[u8; BLAKE2B_HASH_SIZE], ErrorCode> {
41+
pub fn finalize(&mut self) -> Result<Blake2bHash, ErrorCode> {
4142
let mut result = [0u8; BLAKE2B_HASH_SIZE];
4243
let rc = unsafe {
4344
cx_hash_final(

app/src/error_code.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ pub enum ErrorCode {
1818
DerivingPublicKeyFailed = 0xE005,
1919
InvalidTokenSize = 0xE006,
2020
InvalidMetadataVersion = 0xE007,
21+
InvalidTokenProofSize = 0xE008,
22+
InvalidTokenMetadata = 0xE009,
2123
InternalError = 0xEF00,
2224
}
2325

app/src/handler.rs

Lines changed: 54 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,16 @@ use crate::{
66
error_code::ErrorCode,
77
public_key::derive_pub_key,
88
sign_tx_context::{check_blind_signing, SignTxContext},
9-
ui::{
10-
review_address, sign_hash_ui,
11-
tx_reviewer::{TxReviewer, TOKEN_METADATA_SIZE},
12-
},
9+
ui::{review_address, sign_hash_ui, tx_reviewer::TxReviewer},
1310
};
1411

1512
const MAX_TOKEN_SIZE: u8 = 5;
1613
const PATH_LENGTH: usize = 20;
1714
const HASH_LENGTH: usize = 32;
1815
const PATH_HEX_LENGTH: usize = PATH_LENGTH * 2;
19-
const FIRST_FRAME_PREFIX_LENGTH: usize = PATH_LENGTH + 1; // path + 1 byte token size
2016
const CALL_CONTRACT_FLAG: u8 = 0x01;
2117
const SCRIPT_OFFSET: usize = 3; // the encoded script offset in the tx
18+
pub const TOKEN_METADATA_SIZE: usize = 46;
2219

2320
#[repr(u8)]
2421
pub enum Ins {
@@ -110,7 +107,13 @@ pub fn handle_apdu(
110107
}
111108
}
112109
Ins::SignTx => {
113-
let data = comm.get_data()?;
110+
let data = match comm.get_data() {
111+
Ok(data) => data,
112+
Err(code) => {
113+
reset(sign_tx_context, tx_reviewer);
114+
return Err(code.into());
115+
}
116+
};
114117
match handle_sign_tx(apdu_header, data, sign_tx_context, tx_reviewer) {
115118
Ok(()) if !sign_tx_context.is_complete() => {
116119
return Ok(());
@@ -128,9 +131,11 @@ pub fn handle_apdu(
128131
}
129132
Err(code) => Err(code.into()),
130133
};
134+
reset(sign_tx_context, tx_reviewer);
131135
return result;
132136
}
133137
Err(code) => {
138+
reset(sign_tx_context, tx_reviewer);
134139
return Err(code.into());
135140
}
136141
}
@@ -139,55 +144,66 @@ pub fn handle_apdu(
139144
Ok(())
140145
}
141146

142-
// The transaction is split into multiple APDU commands
143-
// The first APDU command contains the path and token metadata
144-
// The subsequent APDU commands contain the transaction data
145-
// The transaction data is processed in chunks
147+
// The transaction is split into multiple APDU commands, consisting of token metadata APDU and tx APDU commands
148+
// We use `p1` and `p2` to distinguish between APDUs:
149+
// * `p1` = 0 and `p2` = 0 indicates the first token metadata APDU frame
150+
// * `p1` = 0 and `p2` = 1 indicates a new token metadata APDU frame
151+
// * `p1` = 0 and `p2` = 2 indicates the remaining token proof APDU frame
152+
// * `p1` = 1 and `p2` = 0 indicates the first tx APDU frame
153+
// * `p1` = 1 and `p2` = 1 indicates subsequent tx APDU frames
146154
fn handle_sign_tx(
147155
apdu_header: &ApduHeader,
148156
data: &[u8],
149157
sign_tx_context: &mut SignTxContext,
150158
tx_reviewer: &mut TxReviewer,
151159
) -> Result<(), ErrorCode> {
152-
match apdu_header.p1 {
153-
0 if data.len() < FIRST_FRAME_PREFIX_LENGTH => Err(ErrorCode::BadLen),
154-
0 => {
155-
// handle the path
156-
sign_tx_context.init(&data[..PATH_LENGTH])?;
157-
158-
// handle the token metadata
159-
let token_size = data[FIRST_FRAME_PREFIX_LENGTH - 1];
160-
if token_size > MAX_TOKEN_SIZE {
161-
return Err(ErrorCode::InvalidTokenSize);
160+
match (apdu_header.p1, apdu_header.p2) {
161+
(0, 0) => {
162+
// the first frame
163+
if data.is_empty() {
164+
return Err(ErrorCode::BadLen);
165+
}
166+
let token_size = data[0]; // the first byte is the token size
167+
check_token_size(token_size)?;
168+
tx_reviewer.init(token_size)?;
169+
if token_size == 0 {
170+
return Ok(());
162171
}
163-
let tx_data_index: usize =
164-
FIRST_FRAME_PREFIX_LENGTH + TOKEN_METADATA_SIZE * (token_size as usize);
165-
if data.len() < tx_data_index + SCRIPT_OFFSET {
172+
tx_reviewer.handle_token_metadata(&data[1..])
173+
}
174+
(0, 1) => tx_reviewer.handle_token_metadata(data), // token metadata and proof frame
175+
(0, 2) => tx_reviewer.handle_token_proof(data), // the following token proof frame
176+
(1, 0) => {
177+
// the first unsigned tx frame
178+
if data.len() < PATH_LENGTH + SCRIPT_OFFSET {
166179
return Err(ErrorCode::BadLen);
167180
}
168-
let tx_data = &data[tx_data_index..];
181+
let tx_data = &data[PATH_LENGTH..];
169182
let is_tx_execute_script = tx_data[SCRIPT_OFFSET - 1] == CALL_CONTRACT_FLAG;
170183
if is_tx_execute_script {
171184
check_blind_signing()?;
172185
}
173-
let token_metadata = &data[FIRST_FRAME_PREFIX_LENGTH..tx_data_index];
174-
check_token_metadata(token_size, token_metadata)?;
175-
tx_reviewer.init(is_tx_execute_script, token_metadata)?;
176-
sign_tx_context.handle_data(apdu_header, tx_data, tx_reviewer)
186+
tx_reviewer.set_tx_execute_script(is_tx_execute_script);
187+
188+
sign_tx_context.init(&data[..PATH_LENGTH])?;
189+
sign_tx_context.handle_tx_data(apdu_header, tx_data, tx_reviewer)
177190
}
178-
1 => sign_tx_context.handle_data(apdu_header, data, tx_reviewer),
191+
(1, 1) => sign_tx_context.handle_tx_data(apdu_header, data, tx_reviewer), // the following unsigned tx frame
179192
_ => Err(ErrorCode::BadP1P2),
180193
}
181194
}
182195

183-
// Check the token metadata version
184-
// The token metadata version should be 0 for now
185-
fn check_token_metadata(token_size: u8, token_metadata: &[u8]) -> Result<(), ErrorCode> {
186-
for i in 0..token_size {
187-
let version_index = (i as usize) * TOKEN_METADATA_SIZE;
188-
if token_metadata[version_index] != 0 {
189-
return Err(ErrorCode::InvalidMetadataVersion);
190-
}
196+
#[inline]
197+
fn check_token_size(size: u8) -> Result<(), ErrorCode> {
198+
if size > MAX_TOKEN_SIZE {
199+
Err(ErrorCode::InvalidTokenSize)
200+
} else {
201+
Ok(())
191202
}
192-
Ok(())
203+
}
204+
205+
#[inline]
206+
fn reset(sign_tx_context: &mut SignTxContext, tx_reviewer: &mut TxReviewer) {
207+
sign_tx_context.reset();
208+
tx_reviewer.reset();
193209
}

app/src/main.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ mod ledger_sdk_stub;
2222
mod public_key;
2323
mod settings;
2424
mod sign_tx_context;
25+
mod token_verifier;
2526
mod ui;
2627

2728
ledger_device_sdk::set_panic!(ledger_device_sdk::exiting_panic);

app/src/sign_tx_context.rs

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ enum DecodeStep {
2929
// It keeps track of the current step, the transaction decoder, the path, and the device address
3030
// A streaming decoder is used to decode the transaction in chunks so that it can handle large transactions
3131
pub struct SignTxContext {
32-
pub path: [u32; 5],
32+
pub path: [u32; PATH_LENGTH],
3333
tx_decoder: StreamingDecoder<UnsignedTx>,
3434
current_step: DecodeStep,
3535
hasher: Blake2bHasher,
@@ -49,10 +49,9 @@ impl SignTxContext {
4949
}
5050
}
5151

52-
// Initialize the context with the path
52+
// Initialize the context
5353
pub fn init(&mut self, data: &[u8]) -> Result<(), ErrorCode> {
5454
deserialize_path(data, &mut self.path, ErrorCode::HDPathDecodingFailed)?;
55-
5655
self.tx_decoder.reset();
5756
self.current_step = DecodeStep::Init;
5857
self.hasher.reset();
@@ -61,6 +60,15 @@ impl SignTxContext {
6160
Ok(())
6261
}
6362

63+
pub fn reset(&mut self) {
64+
self.path = [0; PATH_LENGTH];
65+
self.tx_decoder.reset();
66+
self.current_step = DecodeStep::Init;
67+
self.hasher.reset();
68+
self.temp_data.reset(0);
69+
self.device_address = None;
70+
}
71+
6472
pub fn is_complete(&self) -> bool {
6573
self.current_step == DecodeStep::Complete
6674
}
@@ -124,7 +132,7 @@ impl SignTxContext {
124132
}
125133

126134
// Handle a transaction data chunk
127-
pub fn handle_data(
135+
pub fn handle_tx_data(
128136
&mut self,
129137
apdu_header: &ApduHeader,
130138
tx_data_chunk: &[u8],
@@ -134,7 +142,7 @@ impl SignTxContext {
134142
DecodeStep::Complete => Err(ErrorCode::InternalError),
135143
DecodeStep::Init => {
136144
// The first chunk of the transaction
137-
if apdu_header.p1 == 0 {
145+
if apdu_header.p1 == 1 && apdu_header.p2 == 0 {
138146
self.current_step = DecodeStep::DecodingTx;
139147
self.decode_tx(tx_data_chunk, tx_reviewer)
140148
} else {
@@ -143,7 +151,7 @@ impl SignTxContext {
143151
}
144152
DecodeStep::DecodingTx => {
145153
// The subsequent chunks of the transaction
146-
if apdu_header.p1 == 1 {
154+
if apdu_header.p1 == 1 && apdu_header.p2 == 1 {
147155
self.decode_tx(tx_data_chunk, tx_reviewer)
148156
} else {
149157
Err(ErrorCode::BadP1P2)

app/src/token_verifier.rs

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
use crate::{
2+
blake2b_hasher::{Blake2bHash, Blake2bHasher, BLAKE2B_HASH_SIZE},
3+
error_code::ErrorCode,
4+
handler::TOKEN_METADATA_SIZE,
5+
};
6+
7+
// b3380866c595544781e9da0ccd79399de8878abfb0bf40545b57a287387d419d
8+
const TOKEN_MERKLE_ROOT: Blake2bHash = [
9+
0xb3, 0x38, 0x08, 0x66, 0xc5, 0x95, 0x54, 0x47, 0x81, 0xe9, 0xda, 0x0c, 0xcd, 0x79, 0x39, 0x9d,
10+
0xe8, 0x87, 0x8a, 0xbf, 0xb0, 0xbf, 0x40, 0x54, 0x5b, 0x57, 0xa2, 0x87, 0x38, 0x7d, 0x41, 0x9d,
11+
];
12+
const PROOF_PREFIX_LENGTH: usize = 2;
13+
14+
// `TokenVerifier` is a streaming token proof verifier that receives proof data and calculates the hash
15+
// After receiving all the proof data, it compares the hash with the `TOKEN_MERKLE_ROOT` to verify if the token is valid
16+
#[derive(Default, Copy, Clone)]
17+
pub struct TokenVerifier {
18+
remaining_proof_size: usize,
19+
hash: Blake2bHash,
20+
}
21+
22+
pub fn hash_pair(a: &[u8], b: &[u8]) -> Result<Blake2bHash, ErrorCode> {
23+
assert!(a.len() == BLAKE2B_HASH_SIZE && b.len() == BLAKE2B_HASH_SIZE);
24+
let mut hasher = Blake2bHasher::new();
25+
if a < b {
26+
hasher.update(a)?;
27+
hasher.update(b)?;
28+
} else {
29+
hasher.update(b)?;
30+
hasher.update(a)?;
31+
}
32+
hasher.finalize()
33+
}
34+
35+
impl TokenVerifier {
36+
pub fn new(data: &[u8]) -> Result<TokenVerifier, ErrorCode> {
37+
let prefix_length = TOKEN_METADATA_SIZE + PROOF_PREFIX_LENGTH;
38+
if data.len() < prefix_length {
39+
return Err(ErrorCode::BadLen);
40+
}
41+
42+
let encoded_token = &data[..TOKEN_METADATA_SIZE];
43+
let proof_size =
44+
((data[TOKEN_METADATA_SIZE] as usize) << 8) | (data[TOKEN_METADATA_SIZE + 1] as usize);
45+
check_proof_size(proof_size)?;
46+
47+
let mut verifier = TokenVerifier {
48+
remaining_proof_size: proof_size,
49+
hash: Blake2bHasher::hash(encoded_token)?,
50+
};
51+
let proof = &data[prefix_length..];
52+
verifier.on_proof(proof)?;
53+
Ok(verifier)
54+
}
55+
56+
// update the hash when receiving token proof data
57+
pub fn on_proof(&mut self, proof: &[u8]) -> Result<(), ErrorCode> {
58+
check_proof_size(proof.len())?;
59+
if self.remaining_proof_size < proof.len() {
60+
return Err(ErrorCode::InvalidTokenProofSize);
61+
}
62+
63+
let mut index: usize = 0;
64+
while index < proof.len() {
65+
let sibling = &proof[index..(index + BLAKE2B_HASH_SIZE)];
66+
self.hash = hash_pair(&self.hash, sibling)?;
67+
index += BLAKE2B_HASH_SIZE
68+
}
69+
self.remaining_proof_size -= proof.len();
70+
Ok(())
71+
}
72+
73+
pub fn is_complete(&self) -> bool {
74+
self.remaining_proof_size == 0
75+
}
76+
77+
#[inline]
78+
pub fn is_token_valid(&self) -> bool {
79+
assert!(self.is_complete());
80+
self.hash == TOKEN_MERKLE_ROOT
81+
}
82+
}
83+
84+
fn check_proof_size(size: usize) -> Result<(), ErrorCode> {
85+
if size % BLAKE2B_HASH_SIZE != 0 {
86+
Err(ErrorCode::InvalidTokenProofSize)
87+
} else {
88+
Ok(())
89+
}
90+
}

0 commit comments

Comments
 (0)