From 70c4f6b1fd111f5478f5e13d34cdf18e38d7efd8 Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Fri, 8 Nov 2024 07:52:48 +0800 Subject: [PATCH] Clippy. --- tokenizers/src/tokenizer/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tokenizers/src/tokenizer/mod.rs b/tokenizers/src/tokenizer/mod.rs index 5f542f2a5..cc095c1f8 100644 --- a/tokenizers/src/tokenizer/mod.rs +++ b/tokenizers/src/tokenizer/mod.rs @@ -1070,7 +1070,7 @@ where /// See [`DecodeStream`] pub fn step(&mut self, id: u32) -> Result> { step_decode_stream( - &self.tokenizer, + self.tokenizer, id, self.skip_special_tokens, &mut self.ids, @@ -1107,7 +1107,7 @@ where let new_text = &string[prefix.len()..].to_string(); let new_prefix_index = ids.len() - *prefix_index; *ids = ids.drain(*read_index..).collect(); - *prefix = tokenizer.decode(&ids, skip_special_tokens)?; + *prefix = tokenizer.decode(ids, skip_special_tokens)?; *read_index = *prefix_index; *prefix_index = new_prefix_index; Ok(Some(new_text.to_string()))