Skip to content

Commit

Permalink
update test they passs
Browse files Browse the repository at this point in the history
  • Loading branch information
ArthurZucker committed Aug 6, 2024
1 parent bfb6222 commit bca9bde
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 7 deletions.
1 change: 0 additions & 1 deletion tokenizers/src/normalizers/byte_level.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::processors::byte_level::bytes_char;
use crate::tokenizer::{NormalizedString, Normalizer, Result};
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
use crate::utils::macro_rules_attribute;

Expand Down
14 changes: 8 additions & 6 deletions tokenizers/src/normalizers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,14 @@ mod tests {

let json =
r#"{"sep":["</s>",2], "cls":["<s>",0], "trim_offsets":true, "add_prefix_space":true}"#;
let reconstructed = serde_json::from_str::<NormalizerWrapper>(json).unwrap();
println!("{:?}", reconstructed);
assert!(matches!(
reconstructed,
NormalizerWrapper::Sequence(_)
));
let reconstructed = serde_json::from_str::<NormalizerWrapper>(json);
match reconstructed {
Err(err) => assert_eq!(
err.to_string(),
"data did not match any variant of untagged enum NormalizerWrapper"
),
_ => panic!("Expected an error here"),
}

let json = r#"{"type":"RobertaProcessing", "sep":["</s>",2] }"#;
let reconstructed = serde_json::from_str::<NormalizerWrapper>(json);
Expand Down

0 comments on commit bca9bde

Please sign in to comment.