Skip to content

Commit 5e4cee7

Browse files
committed
Add a normalize method to str
1 parent d01eb4d commit 5e4cee7

File tree

2 files changed

+31
-3
lines changed

2 files changed

+31
-3
lines changed

charabia/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ mod token;
5151
mod tokenizer;
5252

5353
pub use detection::{Language, Script};
54+
pub use normalizer::Normalize;
5455
pub use segmenter::Segment;
5556
pub use token::{SeparatorKind, Token, TokenKind};
5657

charabia/src/normalizer/mod.rs

Lines changed: 30 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -224,11 +224,18 @@ impl<'o, 'tb> SegmentedTokenIter<'o, 'tb> {
224224
}
225225
}
226226

227-
impl Token<'_> {
227+
pub trait Normalize {
228+
type Item;
229+
fn normalize(self, options: &NormalizerOption) -> Self::Item;
230+
}
231+
232+
impl Normalize for Token<'_> {
233+
type Item = Self;
234+
228235
/// Normalize [`Token`] using all the compatible Normalizers.
229236
///
230237
/// A Latin `Token` would not be normalized the same as a Chinese `Token`.
231-
pub fn normalize(mut self, options: &NormalizerOption) -> Self {
238+
fn normalize(mut self, options: &NormalizerOption) -> Self::Item {
232239
for normalizer in NORMALIZERS.iter() {
233240
if normalizer.should_normalize(&self) {
234241
self = normalizer.normalize(self, options);
@@ -247,12 +254,32 @@ impl Token<'_> {
247254
}
248255
}
249256

257+
impl<'o> Normalize for &'o str {
258+
type Item = Cow<'o, str>;
259+
260+
/// Normalize an str.
261+
fn normalize(self, options: &NormalizerOption) -> Self::Item {
262+
let mut normalized = Token { lemma: Cow::Borrowed(self), ..Default::default() };
263+
for normalizer in NORMALIZERS.iter() {
264+
normalized = normalizer.normalize(normalized, options);
265+
}
266+
267+
if options.lossy {
268+
for normalizer in LOSSY_NORMALIZERS.iter() {
269+
normalized = normalizer.normalize(normalized, options);
270+
}
271+
}
272+
273+
normalized.lemma
274+
}
275+
}
276+
250277
#[cfg(test)]
251278
mod test {
252279
macro_rules! test_normalizer {
253280
($normalizer:expr, $tokens:expr, $normalizer_result:expr, $global_result:expr) => {
254281
use super::*;
255-
use crate::Token;
282+
use crate::{Token, Normalize};
256283

257284
const TEST_NORMALIZER_OPTIONS: NormalizerOption = NormalizerOption {
258285
create_char_map: true,

0 commit comments

Comments
 (0)