diff --git a/bindings/python/src/tokenizer.rs b/bindings/python/src/tokenizer.rs index 918a012e6..2fcdb2282 100644 --- a/bindings/python/src/tokenizer.rs +++ b/bindings/python/src/tokenizer.rs @@ -1037,8 +1037,7 @@ impl PyTokenizer { add_special_tokens: bool, ) -> PyResult> { let mut items = Vec::::with_capacity(input.len()); - for i in 0..input.len() { - let item = &input[i]; + for item in &input { let item: tk::EncodeInput = if is_pretokenized { item.extract::()?.into() } else { @@ -1097,8 +1096,7 @@ impl PyTokenizer { add_special_tokens: bool, ) -> PyResult> { let mut items = Vec::::with_capacity(input.len()); - for i in 0..input.len() { - let item = &input[i]; + for item in &input { let item: tk::EncodeInput = if is_pretokenized { item.extract::()?.into() } else {