Skip to content

Commit 0575fbe

Browse files
author
Pierre Avital
committed
Allow plugins to fail at startup, and Zenohd to react to that failure
1 parent 02e5f70 commit 0575fbe

File tree

3 files changed

+50
-25
lines changed

3 files changed

+50
-25
lines changed

io/zenoh-transport/src/unicast/link.rs

Lines changed: 17 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ async fn tx_task(
227227
let (batch_size, _) = tx_compressed(
228228
is_compressed,
229229
link.is_streamed(),
230-
&bytes,
230+
bytes,
231231
&mut compression_aux_buff,
232232
)?;
233233
bytes = &compression_aux_buff[..batch_size];
@@ -471,7 +471,7 @@ fn rx_decompress(
471471
end_pos: &mut usize,
472472
) -> ZResult<()> {
473473
let is_compressed: bool = buffer[COMPRESSION_BYTE_INDEX] == COMPRESSION_ENABLED;
474-
Ok(if is_compressed {
474+
if is_compressed {
475475
let mut aux_buff = pool.try_take().unwrap_or_else(|| pool.alloc());
476476
*end_pos = lz4_flex::block::decompress_into(
477477
&buffer[BATCH_PAYLOAD_START_INDEX..read_bytes],
@@ -482,7 +482,8 @@ fn rx_decompress(
482482
} else {
483483
*start_pos = BATCH_PAYLOAD_START_INDEX;
484484
*end_pos = read_bytes;
485-
})
485+
};
486+
Ok(())
486487
}
487488

488489
#[cfg(all(feature = "unstable", feature = "transport_compression"))]
@@ -589,14 +590,11 @@ fn set_uncompressed_batch_header(
589590
if is_streamed {
590591
let mut header = [0_u8, 0_u8];
591592
header[..HEADER_BYTES_SIZE].copy_from_slice(&bytes[..HEADER_BYTES_SIZE]);
592-
let mut batch_size = u16::from_le_bytes(header);
593-
batch_size += 1;
594-
let batch_size: u16 = batch_size.try_into().map_err(|e| {
595-
zerror!(
596-
"Compression error: unable to convert compression size into u16: {}",
597-
e
598-
)
599-
})?;
593+
let batch_size = if let Some(size) = u16::from_le_bytes(header).checked_add(1) {
594+
size
595+
} else {
596+
bail!("Compression error: unable to convert compression size into u16",)
597+
};
600598
buff[0..HEADER_BYTES_SIZE].copy_from_slice(&batch_size.to_le_bytes());
601599
buff[COMPRESSION_BYTE_INDEX_STREAMED] = COMPRESSION_DISABLED;
602600
let batch_size: usize = batch_size.into();
@@ -612,7 +610,7 @@ fn set_uncompressed_batch_header(
612610
// May happen when the payload size is itself the MTU and adding the header exceeds it.
613611
Err(zerror!("Failed to send uncompressed batch, batch size ({}) exceeds the maximum batch size of {}.", final_batch_size, MAX_BATCH_SIZE))?;
614612
}
615-
return Ok(final_batch_size);
613+
Ok(final_batch_size)
616614
}
617615

618616
#[cfg(all(feature = "transport_compression", feature = "unstable"))]
@@ -626,47 +624,46 @@ fn tx_compression_test() {
626624
// Compression done for the sake of comparing the result.
627625
let payload_compression_size = lz4_flex::block::compress_into(&payload, &mut buff).unwrap();
628626

629-
fn get_header_value(buff: &Box<[u8]>) -> u16 {
627+
fn get_header_value(buff: &[u8]) -> u16 {
630628
let mut header = [0_u8, 0_u8];
631629
header[..HEADER_BYTES_SIZE].copy_from_slice(&buff[..HEADER_BYTES_SIZE]);
632-
let batch_size = u16::from_le_bytes(header);
633-
batch_size
630+
u16::from_le_bytes(header)
634631
}
635632

636633
// Streamed with compression enabled
637634
let batch = [16, 0, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4];
638635
let (batch_size, was_compressed) = tx_compressed(true, true, &batch, &mut buff).unwrap();
639636
let header = get_header_value(&buff);
640-
assert_eq!(was_compressed, true);
637+
assert!(was_compressed);
641638
assert_eq!(header as usize, payload_compression_size + COMPRESSION_BYTE);
642639
assert!(batch_size < batch.len() + COMPRESSION_BYTE);
643640
assert_eq!(batch_size, payload_compression_size + 3);
644641

645642
// Not streamed with compression enabled
646643
let batch = payload;
647644
let (batch_size, was_compressed) = tx_compressed(true, false, &batch, &mut buff).unwrap();
648-
assert_eq!(was_compressed, true);
645+
assert!(was_compressed);
649646
assert!(batch_size < batch.len() + COMPRESSION_BYTE);
650647
assert_eq!(batch_size, payload_compression_size + COMPRESSION_BYTE);
651648

652649
// Streamed with compression disabled
653650
let batch = [16, 0, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4];
654651
let (batch_size, was_compressed) = tx_compressed(false, true, &batch, &mut buff).unwrap();
655652
let header = get_header_value(&buff);
656-
assert_eq!(was_compressed, false);
653+
assert!(!was_compressed);
657654
assert_eq!(header as usize, payload.len() + COMPRESSION_BYTE);
658655
assert_eq!(batch_size, batch.len() + COMPRESSION_BYTE);
659656

660657
// Not streamed and compression disabled
661658
let batch = payload;
662659
let (batch_size, was_compressed) = tx_compressed(false, false, &batch, &mut buff).unwrap();
663-
assert_eq!(was_compressed, false);
660+
assert!(!was_compressed);
664661
assert_eq!(batch_size, payload.len() + COMPRESSION_BYTE);
665662

666663
// Verify that if the compression result is bigger than the original payload size, then the non compressed payload is returned.
667664
let batch = [16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; // a non compressable payload with no repetitions
668665
let (batch_size, was_compressed) = tx_compressed(true, true, &batch, &mut buff).unwrap();
669-
assert_eq!(was_compressed, false);
666+
assert!(!was_compressed);
670667
assert_eq!(batch_size, batch.len() + COMPRESSION_BYTE);
671668
}
672669

plugins/zenoh-plugin-rest/src/lib.rs

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,11 @@ impl Plugin for RestPlugin {
198198

199199
let conf: Config = serde_json::from_value(plugin_conf.clone())
200200
.map_err(|e| zerror!("Plugin `{}` configuration error: {}", name, e))?;
201-
async_std::task::spawn(run(runtime.clone(), conf.clone()));
201+
let task = async_std::task::spawn(run(runtime.clone(), conf.clone()));
202+
let task = async_std::task::block_on(task.timeout(std::time::Duration::from_millis(1)));
203+
if let Ok(Err(e)) = task {
204+
bail!("REST server failed within 1ms: {e}")
205+
}
202206
Ok(Box::new(RunningPlugin(conf)))
203207
}
204208
}
@@ -435,7 +439,7 @@ async fn write(mut req: Request<(Arc<Session>, String)>) -> tide::Result<Respons
435439
}
436440
}
437441

438-
pub async fn run(runtime: Runtime, conf: Config) {
442+
pub async fn run(runtime: Runtime, conf: Config) -> ZResult<()> {
439443
// Try to initiate login.
440444
// Required in case of dynamic lib, otherwise no logs.
441445
// But cannot be done twice in case of static link.
@@ -461,7 +465,9 @@ pub async fn run(runtime: Runtime, conf: Config) {
461465

462466
if let Err(e) = app.listen(conf.http_port).await {
463467
log::error!("Unable to start http server for REST: {:?}", e);
468+
return Err(e.into());
464469
}
470+
Ok(())
465471
}
466472

467473
fn path_to_key_expr<'a>(path: &'a str, zid: &str) -> ZResult<KeyExpr<'a>> {

zenohd/src/main.rs

Lines changed: 25 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
use std::collections::HashSet;
2+
13
//
24
// Copyright (c) 2023 ZettaScale Technology
35
//
@@ -78,22 +80,30 @@ clap::Arg::new("adminspace-permissions").long("adminspace-permissions").value_na
7880

7981
let mut plugins = PluginsManager::dynamic(config.libloader());
8082
// Static plugins are to be added here, with `.add_static::<PluginType>()`
83+
let mut required_plugins = HashSet::new();
8184
for plugin_load in config.plugins().load_requests() {
8285
let PluginLoad {
8386
name,
8487
paths,
8588
required,
8689
} = plugin_load;
90+
log::info!(
91+
"Loading {req} plugin \"{name}\"",
92+
req = if required { "required" } else { "" }
93+
);
8794
if let Err(e) = match paths {
88-
None => plugins.load_plugin_by_name(name),
89-
Some(paths) => plugins.load_plugin_by_paths(name, &paths),
95+
None => plugins.load_plugin_by_name(name.clone()),
96+
Some(paths) => plugins.load_plugin_by_paths(name.clone(), &paths),
9097
} {
9198
if required {
9299
panic!("Plugin load failure: {}", e)
93100
} else {
94101
log::error!("Plugin load failure: {}", e)
95102
}
96103
}
104+
if required {
105+
required_plugins.insert(name);
106+
}
97107
}
98108

99109
let runtime = match Runtime::new(config).await {
@@ -105,6 +115,11 @@ clap::Arg::new("adminspace-permissions").long("adminspace-permissions").value_na
105115
};
106116

107117
for (name, path, start_result) in plugins.start_all(&runtime) {
118+
let required = required_plugins.contains(name);
119+
log::info!(
120+
"Starting {req} plugin \"{name}\"",
121+
req = if required { "required" } else { "" }
122+
);
108123
match start_result {
109124
Ok(Some(_)) => log::info!("Successfully started plugin {} from {:?}", name, path),
110125
Ok(None) => log::warn!("Plugin {} from {:?} wasn't loaded, as an other plugin by the same name is already running", name, path),
@@ -113,7 +128,11 @@ clap::Arg::new("adminspace-permissions").long("adminspace-permissions").value_na
113128
Ok(s) => s,
114129
Err(_) => panic!("Formatting the error from plugin {} ({:?}) failed, this is likely due to ABI unstability.\r\nMake sure your plugin was built with the same version of cargo as zenohd", name, path),
115130
};
116-
log::error!("Plugin start failure: {}", if report.is_empty() {"no details provided"} else {report.as_str()});
131+
if required {
132+
panic!("Plugin \"{name}\" failed to start: {}", if report.is_empty() {"no details provided"} else {report.as_str()});
133+
}else {
134+
log::error!("Required plugin \"{name}\" failed to start: {}", if report.is_empty() {"no details provided"} else {report.as_str()});
135+
}
117136
}
118137
}
119138
}
@@ -157,6 +176,9 @@ fn config_from_args(args: &ArgMatches) -> Config {
157176
config
158177
.insert_json5("plugins/rest/http_port", &format!(r#""{value}""#))
159178
.unwrap();
179+
config
180+
.insert_json5("plugins/rest/__required__", "true")
181+
.unwrap();
160182
}
161183
}
162184
if let Some(plugins_search_dirs) = args.values_of("plugin-search-dir") {

0 commit comments

Comments
 (0)