From 4ad446ce0207d68eb2fa9f4e50007d0ac61cc11f Mon Sep 17 00:00:00 2001 From: Dimitris Iliopoulos Date: Wed, 20 Nov 2024 17:04:26 -0500 Subject: [PATCH] Fix clippy --- bindings/python/src/tokenizer.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/bindings/python/src/tokenizer.rs b/bindings/python/src/tokenizer.rs index 918a012e6..2fcdb2282 100644 --- a/bindings/python/src/tokenizer.rs +++ b/bindings/python/src/tokenizer.rs @@ -1037,8 +1037,7 @@ impl PyTokenizer { add_special_tokens: bool, ) -> PyResult> { let mut items = Vec::::with_capacity(input.len()); - for i in 0..input.len() { - let item = &input[i]; + for item in &input { let item: tk::EncodeInput = if is_pretokenized { item.extract::()?.into() } else { @@ -1097,8 +1096,7 @@ impl PyTokenizer { add_special_tokens: bool, ) -> PyResult> { let mut items = Vec::::with_capacity(input.len()); - for i in 0..input.len() { - let item = &input[i]; + for item in &input { let item: tk::EncodeInput = if is_pretokenized { item.extract::()?.into() } else {