@@ -4,9 +4,9 @@ use std::{
4
4
ops:: { Add , AddAssign , Mul , MulAssign , Neg , Sub , SubAssign } ,
5
5
} ;
6
6
7
- use arith:: { Field , FieldSerde , FieldSerdeResult , SimdField } ;
7
+ use arith:: { Field , FieldSerde , FieldSerdeResult } ;
8
8
9
- use crate :: { GF2x64 , GF2 } ;
9
+ use crate :: GF2 ;
10
10
11
11
#[ derive( Clone , Copy , Debug ) ]
12
12
pub struct NeonGF2x128 {
@@ -49,7 +49,7 @@ impl FieldSerde for NeonGF2x128 {
49
49
}
50
50
51
51
impl Field for NeonGF2x128 {
52
- const NAME : & ' static str = "Galios Field 2 SIMD 128" ;
52
+ const NAME : & ' static str = "Neon Galois Field 2 SIMD 128" ;
53
53
54
54
const SIZE : usize = 128 / 8 ;
55
55
@@ -317,41 +317,3 @@ impl From<GF2> for NeonGF2x128 {
317
317
}
318
318
}
319
319
}
320
-
321
- impl SimdField for NeonGF2x128 {
322
- type Scalar = GF2 ;
323
-
324
- const PACK_SIZE : usize = 128 ;
325
-
326
- #[ inline( always) ]
327
- fn scale ( & self , challenge : & Self :: Scalar ) -> Self {
328
- if challenge. v == 0 {
329
- Self :: ZERO
330
- } else {
331
- * self
332
- }
333
- }
334
-
335
- #[ inline( always) ]
336
- fn pack ( base_vec : & [ Self :: Scalar ] ) -> Self {
337
- assert_eq ! ( base_vec. len( ) , Self :: PACK_SIZE ) ;
338
- let mut packed_to_gf2x64 = [ GF2x64 :: ZERO ; Self :: PACK_SIZE / GF2x64 :: PACK_SIZE ] ;
339
- packed_to_gf2x64
340
- . iter_mut ( )
341
- . zip ( base_vec. chunks ( GF2x64 :: PACK_SIZE ) )
342
- . for_each ( |( gf2x64, pack) | * gf2x64 = GF2x64 :: pack ( pack) ) ;
343
-
344
- unsafe { transmute ( packed_to_gf2x64) }
345
- }
346
-
347
- #[ inline( always) ]
348
- fn unpack ( & self ) -> Vec < Self :: Scalar > {
349
- let packed_to_gf2x64: [ GF2x64 ; Self :: PACK_SIZE / GF2x64 :: PACK_SIZE ] =
350
- unsafe { transmute ( * self ) } ;
351
-
352
- packed_to_gf2x64
353
- . iter ( )
354
- . flat_map ( |packed| packed. unpack ( ) )
355
- . collect ( )
356
- }
357
- }
0 commit comments