@@ -25,7 +25,6 @@ use fluence_libp2p::PeerId;
25
25
use thiserror:: Error ;
26
26
use tracing:: instrument;
27
27
28
- use crate :: DataStoreError :: SerializeAnomaly ;
29
28
use now_millis:: now_ms;
30
29
use particle_execution:: { ParticleVault , VaultError } ;
31
30
@@ -130,7 +129,7 @@ impl ParticleDataStore {
130
129
async fn cleanup_data ( & self , particle_id : & str , current_peer_id : & str ) -> Result < ( ) > {
131
130
tracing:: debug!( target: "particle_reap" , particle_id = particle_id, "Cleaning up particle data for particle" ) ;
132
131
let path = self . data_file ( particle_id, current_peer_id) ;
133
- match tokio:: fs:: remove_dir_all ( & path) . await {
132
+ match tokio:: fs:: remove_file ( & path) . await {
134
133
Ok ( _) => Ok ( ( ) ) ,
135
134
// ignore NotFound
136
135
Err ( err) if err. kind ( ) == ErrorKind :: NotFound => Ok ( ( ) ) ,
@@ -172,9 +171,12 @@ impl ParticleDataStore {
172
171
)
173
172
. await ?;
174
173
175
- let ser_particle = serde_json:: to_vec ( particle_parameters) . map_err ( SerializeAnomaly ) ?;
176
- let ser_call_results = serde_json:: to_vec ( call_results) . map_err ( SerializeAnomaly ) ?;
177
- let ser_avm_outcome = serde_json:: to_vec ( outcome) . map_err ( SerializeAnomaly ) ?;
174
+ let ser_particle =
175
+ serde_json:: to_vec ( particle_parameters) . map_err ( DataStoreError :: SerializeAnomaly ) ?;
176
+ let ser_call_results =
177
+ serde_json:: to_vec ( call_results) . map_err ( DataStoreError :: SerializeAnomaly ) ?;
178
+ let ser_avm_outcome =
179
+ serde_json:: to_vec ( outcome) . map_err ( DataStoreError :: SerializeAnomaly ) ?;
178
180
179
181
let anomaly_data = AnomalyData {
180
182
air_script : Cow :: Borrowed ( air_script) ,
@@ -210,7 +212,7 @@ impl ParticleDataStore {
210
212
let data = serde_json:: to_vec ( & anomaly_data) . map_err ( DataStoreError :: SerializeAnomaly ) ?;
211
213
tokio:: fs:: write ( & file, data)
212
214
. await
213
- . map_err ( |err| DataStoreError :: ReadData ( err, file) ) ?;
215
+ . map_err ( |err| DataStoreError :: WriteAnomaly ( err, file) ) ?;
214
216
215
217
Ok ( ( ) )
216
218
}
@@ -239,3 +241,141 @@ pub enum DataStoreError {
239
241
fn store_key_from_components ( particle_id : & str , current_peer_id : & str ) -> String {
240
242
format ! ( "particle_{particle_id}-peer_{current_peer_id}" )
241
243
}
244
+
245
+ #[ cfg( test) ]
246
+ mod tests {
247
+ use crate :: ParticleDataStore ;
248
+ use avm_server:: avm_runner:: RawAVMOutcome ;
249
+ use avm_server:: CallRequests ;
250
+ use std:: path:: PathBuf ;
251
+ use std:: time:: Duration ;
252
+
253
+ #[ tokio:: test]
254
+ async fn test_initialize ( ) {
255
+ let temp_dir = tempfile:: tempdir ( ) . expect ( "Failed to create temp dir" ) ;
256
+ let particle_data_store = temp_dir. path ( ) . join ( "particle_data_store" ) ;
257
+ let vault_dir = temp_dir. path ( ) . join ( "vault" ) ;
258
+ let anomaly_data_store = temp_dir. path ( ) . join ( "anomaly_data_store" ) ;
259
+ let particle_data_store_clone = particle_data_store. clone ( ) ;
260
+
261
+ let particle_data_store =
262
+ ParticleDataStore :: new ( particle_data_store, vault_dir, anomaly_data_store) ;
263
+
264
+ let result = particle_data_store. initialize ( ) . await ;
265
+
266
+ assert ! ( result. is_ok( ) ) ;
267
+ assert ! ( particle_data_store_clone. exists( ) ) ;
268
+ }
269
+
270
+ #[ tokio:: test]
271
+ async fn test_store_and_read_data ( ) {
272
+ let temp_dir = tempfile:: tempdir ( ) . expect ( "Failed to create temp dir" ) ;
273
+ let particle_data_store = temp_dir. path ( ) . join ( "particle_data_store" ) ;
274
+ let vault_dir = temp_dir. path ( ) . join ( "vault" ) ;
275
+ let anomaly_data_store = temp_dir. path ( ) . join ( "anomaly_data_store" ) ;
276
+
277
+ let particle_data_store =
278
+ ParticleDataStore :: new ( particle_data_store, vault_dir, anomaly_data_store) ;
279
+ particle_data_store
280
+ . initialize ( )
281
+ . await
282
+ . expect ( "Failed to initialize" ) ;
283
+
284
+ let particle_id = "test_particle" ;
285
+ let current_peer_id = "test_peer" ;
286
+ let data = b"test_data" ;
287
+
288
+ particle_data_store
289
+ . store_data ( data, particle_id, current_peer_id)
290
+ . await
291
+ . expect ( "Failed to store data" ) ;
292
+ let read_result = particle_data_store
293
+ . read_data ( particle_id, current_peer_id)
294
+ . await ;
295
+
296
+ assert ! ( read_result. is_ok( ) ) ;
297
+ assert_eq ! ( read_result. unwrap( ) , data) ;
298
+ }
299
+
300
+ #[ tokio:: test]
301
+ async fn test_detect_anomaly ( ) {
302
+ let particle_data_store = ParticleDataStore :: new (
303
+ PathBuf :: from ( "dummy" ) ,
304
+ PathBuf :: from ( "dummy" ) ,
305
+ PathBuf :: from ( "dummy" ) ,
306
+ ) ;
307
+
308
+ let execution_time_below_threshold = Duration :: from_millis ( 400 ) ;
309
+ let execution_time_above_threshold = Duration :: from_millis ( 600 ) ;
310
+ let memory_delta_below_threshold = 5 * bytesize:: MB as usize ;
311
+ let memory_delta_above_threshold = 15 * bytesize:: MB as usize ;
312
+ let outcome_success = RawAVMOutcome {
313
+ ret_code : 0 ,
314
+ error_message : "" . to_string ( ) ,
315
+ data : vec ! [ ] ,
316
+ call_requests : CallRequests :: new ( ) ,
317
+ next_peer_pks : vec ! [ ] ,
318
+ } ;
319
+ let outcome_failure = RawAVMOutcome {
320
+ ret_code : 1 ,
321
+ error_message : "" . to_string ( ) ,
322
+ data : vec ! [ ] ,
323
+ call_requests : CallRequests :: new ( ) ,
324
+ next_peer_pks : vec ! [ ] ,
325
+ } ;
326
+
327
+ let anomaly_below_threshold = particle_data_store. detect_anomaly (
328
+ execution_time_below_threshold,
329
+ memory_delta_below_threshold,
330
+ & outcome_success,
331
+ ) ;
332
+ let anomaly_above_threshold = particle_data_store. detect_anomaly (
333
+ execution_time_above_threshold,
334
+ memory_delta_above_threshold,
335
+ & outcome_failure,
336
+ ) ;
337
+
338
+ assert ! ( !anomaly_below_threshold) ;
339
+ assert ! ( anomaly_above_threshold) ;
340
+ }
341
+
342
+ #[ tokio:: test]
343
+ async fn test_cleanup_data ( ) {
344
+ let temp_dir = tempfile:: tempdir ( ) . expect ( "Failed to create temp dir" ) ;
345
+ let temp_dir_path = temp_dir. path ( ) ;
346
+ let particle_data_store = ParticleDataStore :: new (
347
+ temp_dir_path. join ( "particle_data_store" ) ,
348
+ temp_dir_path. join ( "vault" ) ,
349
+ temp_dir_path. join ( "anomaly_data_store" ) ,
350
+ ) ;
351
+ particle_data_store
352
+ . initialize ( )
353
+ . await
354
+ . expect ( "Failed to initialize" ) ;
355
+
356
+ let particle_id = "test_particle" ;
357
+ let current_peer_id = "test_peer" ;
358
+ let data = b"test_data" ;
359
+
360
+ particle_data_store
361
+ . store_data ( data, particle_id, current_peer_id)
362
+ . await
363
+ . expect ( "Failed to store data" ) ;
364
+
365
+ let data_file_path = particle_data_store. data_file ( particle_id, current_peer_id) ;
366
+ let vault_path = temp_dir_path. join ( "vault" ) . join ( particle_id) ;
367
+ tokio:: fs:: create_dir_all ( & vault_path)
368
+ . await
369
+ . expect ( "Failed to create vault dir" ) ;
370
+ assert ! ( data_file_path. exists( ) ) ;
371
+ assert ! ( vault_path. exists( ) ) ;
372
+
373
+ let cleanup_result = particle_data_store
374
+ . cleanup_data ( particle_id, current_peer_id)
375
+ . await ;
376
+
377
+ assert ! ( cleanup_result. is_ok( ) ) ;
378
+ assert ! ( !data_file_path. exists( ) ) ;
379
+ assert ! ( !vault_path. exists( ) )
380
+ }
381
+ }
0 commit comments