Skip to content

Commit 57da450

Browse files
authored
Merge pull request #22 from alephium/token-metadata
Display the token amount and token symbol
2 parents 7d258ce + 5b885e8 commit 57da450

File tree

18 files changed

+572
-231
lines changed

18 files changed

+572
-231
lines changed

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,8 @@ make run-speculos-<device>
2121
Run the tests:
2222

2323
```shell
24-
cd js && npm run speculos-test
24+
cd js/docker && docker compose up -d && cd ..
25+
npm install && MODEL=<device> npm run speculos-test
2526
```
2627

2728
### Test with a Ledger Device

app/src/error_code.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@ pub enum ErrorCode {
1616
HDPathDecodingFailed = 0xE003,
1717
BlindSigningDisabled = 0xE004,
1818
DerivingPublicKeyFailed = 0xE005,
19+
InvalidTokenSize = 0xE006,
20+
InvalidMetadataVersion = 0xE007,
1921
InternalError = 0xEF00,
2022
}
2123

app/src/handler.rs

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,10 @@ use crate::{
66
error_code::ErrorCode,
77
public_key::derive_pub_key,
88
sign_tx_context::{check_blind_signing, SignTxContext},
9-
ui::{review_address, sign_hash_ui, tx_reviewer::TxReviewer},
9+
ui::{
10+
review_address, sign_hash_ui,
11+
tx_reviewer::{TxReviewer, TOKEN_METADATA_SIZE},
12+
},
1013
};
1114

1215
#[repr(u8)]
@@ -120,25 +123,47 @@ pub fn handle_apdu(
120123
Ok(())
121124
}
122125

126+
const MAX_TOKEN_SIZE: u8 = 5;
127+
123128
fn handle_sign_tx(
124129
apdu_header: &ApduHeader,
125130
data: &[u8],
126131
sign_tx_context: &mut SignTxContext,
127132
tx_reviewer: &mut TxReviewer,
128133
) -> Result<(), ErrorCode> {
129134
match apdu_header.p1 {
130-
0 if data.len() < 23 => Err(ErrorCode::BadLen),
135+
0 if data.len() < 21 => Err(ErrorCode::BadLen), // 20 bytes path + 1 byte token size
131136
0 => {
132137
sign_tx_context.init(data)?;
133-
let tx_data = &data[20..];
138+
let token_size = data[20];
139+
if token_size > MAX_TOKEN_SIZE {
140+
return Err(ErrorCode::InvalidTokenSize);
141+
}
142+
let tx_data_index: usize = 21 + TOKEN_METADATA_SIZE * (token_size as usize);
143+
if data.len() < tx_data_index + 3 {
144+
return Err(ErrorCode::BadLen);
145+
}
146+
let tx_data = &data[tx_data_index..];
134147
let is_tx_execute_script = tx_data[2] == 0x01;
135148
if is_tx_execute_script {
136149
check_blind_signing()?;
137150
}
138-
tx_reviewer.init(is_tx_execute_script);
151+
let token_metadata = &data[21..tx_data_index];
152+
check_token_metadata(token_size, token_metadata)?;
153+
tx_reviewer.init(is_tx_execute_script, token_metadata)?;
139154
sign_tx_context.handle_data(apdu_header, tx_data, tx_reviewer)
140155
}
141156
1 => sign_tx_context.handle_data(apdu_header, data, tx_reviewer),
142157
_ => Err(ErrorCode::BadP1P2),
143158
}
144159
}
160+
161+
fn check_token_metadata(token_size: u8, token_metadata: &[u8]) -> Result<(), ErrorCode> {
162+
for i in 0..token_size {
163+
let version_index = (i as usize) * TOKEN_METADATA_SIZE;
164+
if token_metadata[version_index] != 0 {
165+
return Err(ErrorCode::InvalidMetadataVersion);
166+
}
167+
}
168+
Ok(())
169+
}

app/src/ledger_sdk_stub/multi_field_review.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,8 @@ impl<'a> Field<'a> {
171171
pub fn event_loop(&self, incoming_direction: ButtonEvent, is_first_field: bool) -> ButtonEvent {
172172
let mut buttons = ButtonsState::new();
173173
let chunk_max_lines = layout::MAX_LINES - 1;
174-
let page_count = 1 + self.value.len() / (chunk_max_lines * MAX_CHAR_PER_LINE);
174+
let max_size_per_page = chunk_max_lines * MAX_CHAR_PER_LINE;
175+
let page_count = (self.value.len() + max_size_per_page - 1) / max_size_per_page;
175176

176177
let mut cur_page = match incoming_direction {
177178
ButtonEvent::LeftButtonRelease => page_count - 1,

app/src/ledger_sdk_stub/swapping_buffer.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -131,8 +131,8 @@ impl<'a, const RAM: usize, const FLASH: usize> SwappingBuffer<'a, RAM, FLASH> {
131131
}
132132
}
133133

134-
pub fn reset(&mut self) {
135-
self.state = BufferState::default();
134+
pub fn reset(&mut self, from_index: usize) {
135+
self.state = BufferState::WritingToRam(from_index);
136136
}
137137
}
138138

app/src/sign_tx_context.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ impl SignTxContext {
4848
self.tx_decoder.reset();
4949
self.current_step = DecodeStep::Init;
5050
self.hasher.reset();
51-
self.temp_data.reset();
51+
self.temp_data.reset(0);
5252
self.device_address = Some(Address::from_path(&self.path)?);
5353
Ok(())
5454
}
@@ -80,7 +80,7 @@ impl SignTxContext {
8080
self.device_address.as_ref().unwrap(),
8181
&self.temp_data,
8282
)?;
83-
self.temp_data.reset();
83+
self.temp_data.reset(0);
8484
if self.tx_decoder.inner.is_complete() {
8585
self.current_step = DecodeStep::Complete;
8686
return Ok(());

app/src/ui/tx_reviewer.rs

Lines changed: 126 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -20,20 +20,27 @@ use ledger_device_sdk::nbgl::{Field, TagValueList};
2020
use ledger_device_sdk::ui::bitmaps::{CHECKMARK, CROSS, EYE, WARNING};
2121
use utils::{
2222
base58::ALPHABET,
23-
types::{AssetOutput, Byte32, LockupScript, TxInput, UnlockScript, UnsignedTx, I32, U256},
23+
types::{
24+
AssetOutput, Byte32, Hash, LockupScript, Token, TxInput, UnlockScript, UnsignedTx, I32,
25+
U256,
26+
},
2427
};
2528

2629
#[link_section = ".nvm_data"]
2730
static mut DATA: NVMData<NVM<NVM_DATA_SIZE>> = NVMData::new(NVM::zeroed());
2831

2932
const FIRST_OUTPUT_INDEX: u16 = 1;
33+
pub const TOKEN_METADATA_SIZE: usize = 46;
34+
const MAX_TOKEN_SYMBOL_LENGTH: usize = 12;
35+
type TokenSymbol = [u8; MAX_TOKEN_SYMBOL_LENGTH];
3036

3137
pub struct TxReviewer {
3238
buffer: SwappingBuffer<'static, RAM_SIZE, NVM_DATA_SIZE>,
3339
has_external_inputs: bool,
3440
next_output_index: u16,
3541
tx_fee: Option<U256>,
3642
is_tx_execute_script: bool,
43+
token_metadata_length: usize,
3744
}
3845

3946
impl TxReviewer {
@@ -44,21 +51,29 @@ impl TxReviewer {
4451
next_output_index: FIRST_OUTPUT_INDEX, // display output from index 1, similar to BTC
4552
tx_fee: None,
4653
is_tx_execute_script: false,
54+
token_metadata_length: 0,
4755
}
4856
}
4957

5058
#[inline]
51-
fn reset_buffer(&mut self) {
52-
self.buffer.reset();
59+
fn reset_buffer(&mut self, from_index: usize) {
60+
self.buffer.reset(from_index);
5361
}
5462

5563
#[inline]
56-
pub fn init(&mut self, is_tx_execute_script: bool) {
57-
self.reset_buffer();
64+
pub fn init(
65+
&mut self,
66+
is_tx_execute_script: bool,
67+
token_metadata: &[u8],
68+
) -> Result<(), ErrorCode> {
69+
self.reset_buffer(0);
70+
self.buffer.write(token_metadata)?;
5871
self.has_external_inputs = false;
5972
self.next_output_index = FIRST_OUTPUT_INDEX;
6073
self.tx_fee = None;
6174
self.is_tx_execute_script = is_tx_execute_script;
75+
self.token_metadata_length = token_metadata.len();
76+
Ok(())
6277
}
6378

6479
fn write_alph_amount(&mut self, u256: &U256) -> Result<usize, ErrorCode> {
@@ -67,12 +82,31 @@ impl TxReviewer {
6782
self.buffer.write(amount_str)
6883
}
6984

70-
fn write_token_amount(&mut self, u256: &U256) -> Result<usize, ErrorCode> {
85+
fn write_token_raw_amount(&mut self, u256: &U256) -> Result<usize, ErrorCode> {
7186
let mut amount_output = [0u8; 78]; // u256 max
7287
let amount_str = u256.to_str(&mut amount_output).unwrap();
7388
self.buffer.write(amount_str)
7489
}
7590

91+
fn write_token_amount(
92+
&mut self,
93+
u256: &U256,
94+
symbol: TokenSymbol,
95+
decimals: usize,
96+
) -> Result<usize, ErrorCode> {
97+
let mut amount_output = [0u8; 86]; // u256 max
98+
let symbol_bytes = get_token_symbol_bytes(&symbol[..]);
99+
amount_output[..symbol_bytes.len()].copy_from_slice(symbol_bytes);
100+
amount_output[symbol_bytes.len()] = b' ';
101+
let prefix_length = symbol_bytes.len() + 1;
102+
let amount_str = u256.to_str_with_decimals(&mut amount_output[prefix_length..], decimals);
103+
if amount_str.is_none() {
104+
return Err(ErrorCode::Overflow);
105+
}
106+
let total_length = prefix_length + amount_str.unwrap().len();
107+
self.buffer.write(&amount_output[..total_length])
108+
}
109+
76110
fn write_token_id(&mut self, token_id: &Byte32) -> Result<usize, ErrorCode> {
77111
let hex_str: [u8; 64] = utils::to_hex(&token_id.0).unwrap();
78112
self.buffer.write(&hex_str)
@@ -188,6 +222,25 @@ impl TxReviewer {
188222
self.buffer.write(str_bytes)
189223
}
190224

225+
fn get_token_metadata(&self, token_id: &Hash) -> Option<(TokenSymbol, u8)> {
226+
let token_size = self.token_metadata_length / TOKEN_METADATA_SIZE;
227+
if token_size == 0 {
228+
return None;
229+
}
230+
for i in 0..token_size {
231+
let from_index = i * TOKEN_METADATA_SIZE;
232+
let to_index = from_index + TOKEN_METADATA_SIZE;
233+
let token_metadata_bytes = self.buffer.read(from_index, to_index);
234+
if token_metadata_bytes[1..33] == token_id.0 {
235+
let last_index = TOKEN_METADATA_SIZE - 1;
236+
let token_symbol = token_metadata_bytes[33..last_index].try_into().unwrap();
237+
let token_decimals = token_metadata_bytes[last_index];
238+
return Some((token_symbol, token_decimals));
239+
}
240+
}
241+
None
242+
}
243+
191244
fn prepare_output(
192245
&mut self,
193246
output: &AssetOutput,
@@ -228,21 +281,39 @@ impl TxReviewer {
228281

229282
// Asset output has at most one token
230283
let token = output.tokens.get_current_item().unwrap();
231-
let token_id_from_index = self.buffer.get_index();
232-
let token_id_to_index = self.write_token_id(&token.id)?;
233-
234-
let token_amount_from_index = self.buffer.get_index();
235-
let token_amount_to_index = self.write_token_amount(&token.amount)?;
236-
284+
let token_indexes = self.prepare_token(token)?;
237285
Ok(Some(OutputIndexes {
238-
token: Some(TokenIndexes {
239-
token_id: (token_id_from_index, token_id_to_index),
240-
token_amount: (token_amount_from_index, token_amount_to_index),
241-
}),
286+
token: Some(token_indexes),
242287
..output_indexes
243288
}))
244289
}
245290

291+
fn prepare_token(&mut self, token: &Token) -> Result<TokenIndexes, ErrorCode> {
292+
let token_id_from_index = self.buffer.get_index();
293+
let token_id_to_index = self.write_token_id(&token.id)?;
294+
match self.get_token_metadata(&token.id) {
295+
Some((token_symbol, token_decimals)) => {
296+
let token_amount_from_index = self.buffer.get_index();
297+
let token_amount_to_index =
298+
self.write_token_amount(&token.amount, token_symbol, token_decimals as usize)?;
299+
Ok(TokenIndexes {
300+
has_token_metadata: true,
301+
token_id: (token_id_from_index, token_id_to_index),
302+
token_amount: (token_amount_from_index, token_amount_to_index),
303+
})
304+
}
305+
None => {
306+
let token_amount_from_index = self.buffer.get_index();
307+
let token_amount_to_index = self.write_token_raw_amount(&token.amount)?;
308+
Ok(TokenIndexes {
309+
has_token_metadata: false,
310+
token_id: (token_id_from_index, token_id_to_index),
311+
token_amount: (token_amount_from_index, token_amount_to_index),
312+
})
313+
}
314+
}
315+
}
316+
246317
fn get_str_from_range(&self, range: (usize, usize)) -> Result<&str, ErrorCode> {
247318
let bytes = self.buffer.read(range.0, range.1);
248319
bytes_to_string(bytes)
@@ -310,23 +381,39 @@ impl TxReviewer {
310381
}
311382

312383
let TokenIndexes {
384+
has_token_metadata,
313385
token_id,
314386
token_amount,
315387
} = token.unwrap();
316388
let token_id = self.get_str_from_range(token_id)?;
317389
let token_amount = self.get_str_from_range(token_amount)?;
318-
let fields = [
319-
alph_amount_field,
320-
Field {
321-
name: "Token ID",
322-
value: token_id,
323-
},
324-
Field {
325-
name: "Raw Amount",
326-
value: token_amount,
327-
},
328-
address_field,
329-
];
390+
let fields = if has_token_metadata {
391+
[
392+
Field {
393+
name: "Token ID",
394+
value: token_id,
395+
},
396+
Field {
397+
name: "Token Amount",
398+
value: token_amount,
399+
},
400+
alph_amount_field,
401+
address_field,
402+
]
403+
} else {
404+
[
405+
Field {
406+
name: "Token ID",
407+
value: token_id,
408+
},
409+
Field {
410+
name: "Raw Amount",
411+
value: token_amount,
412+
},
413+
alph_amount_field,
414+
address_field,
415+
]
416+
};
330417
review(&fields, review_message)
331418
}
332419

@@ -362,7 +449,7 @@ impl TxReviewer {
362449
if let Some(current_output) = outputs.get_current_item() {
363450
let result =
364451
self.review_output(current_output, device_address, temp_data.read_all());
365-
self.reset_buffer();
452+
self.reset_buffer(self.token_metadata_length);
366453
result
367454
} else {
368455
Ok(())
@@ -497,6 +584,7 @@ pub struct OutputIndexes {
497584
}
498585

499586
pub struct TokenIndexes {
587+
pub has_token_metadata: bool,
500588
pub token_id: (usize, usize),
501589
pub token_amount: (usize, usize),
502590
}
@@ -506,6 +594,15 @@ fn bytes_to_string(bytes: &[u8]) -> Result<&str, ErrorCode> {
506594
from_utf8(bytes).map_err(|_| ErrorCode::InternalError)
507595
}
508596

597+
#[inline]
598+
fn get_token_symbol_bytes(bytes: &[u8]) -> &[u8] {
599+
let mut index = 0;
600+
while index < bytes.len() && bytes[index] != 0 {
601+
index += 1;
602+
}
603+
&bytes[..index]
604+
}
605+
509606
fn review<'a>(fields: &'a [Field<'a>], review_message: &str) -> Result<(), ErrorCode> {
510607
#[cfg(not(any(target_os = "stax", target_os = "flex")))]
511608
{

0 commit comments

Comments
 (0)