From 024ccc2bf37390136ee3e743af4e82996e3acc8e Mon Sep 17 00:00:00 2001 From: Arthur Zucker Date: Mon, 5 Aug 2024 17:10:54 +0200 Subject: [PATCH] remove prints --- tokenizers/src/models/bpe/model.rs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tokenizers/src/models/bpe/model.rs b/tokenizers/src/models/bpe/model.rs index 5ddee240f..c66186804 100644 --- a/tokenizers/src/models/bpe/model.rs +++ b/tokenizers/src/models/bpe/model.rs @@ -282,21 +282,14 @@ pub(crate) fn convert_merges_to_hashmap>( _vocab: &Vocab, ) -> Result { let mut merges = vec![]; - - println!(" Parsing merges!"); let lines = iter.filter(|l| !l.starts_with("#version")); for (rank, line) in lines.enumerate() { let parts = line.split(' ').collect::>(); if parts.len() != 2 { - println!("bad merges parts = {:?}", parts); - return Err(Error::BadMerges(rank + 1).into()); } - - println!("parts = {:?}", parts); merges.push((parts[0].to_string(), parts[1].to_string())); } - println!(" Finished parsing"); Ok(merges) }