Skip to content

Commit

Permalink
fix with capacity
Browse files Browse the repository at this point in the history
  • Loading branch information
ArthurZucker committed Nov 5, 2024
1 parent e1169ee commit 6056e0c
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions bindings/python/src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1036,7 +1036,7 @@ impl PyTokenizer {
is_pretokenized: bool,
add_special_tokens: bool,
) -> PyResult<Vec<PyEncoding>> {
let mut items = Vec::<tk::EncodeInput>::with_capacity(input.len());
let mut items = Vec::<tk::EncodeInput>::with_capacity(input.len()?);
for i in 0..input.len()? {
let item = input.get_item(i)?;
let item: tk::EncodeInput = if is_pretokenized {
Expand Down Expand Up @@ -1096,7 +1096,7 @@ impl PyTokenizer {
is_pretokenized: bool,
add_special_tokens: bool,
) -> PyResult<Vec<PyEncoding>> {
let mut items = Vec::<tk::EncodeInput>::new();
let mut items = Vec::<tk::EncodeInput>::with_capacity(input.len()?);
for i in 0..input.len()? {
let item = input.get_item(i)?;
let item: tk::EncodeInput = if is_pretokenized {
Expand Down

0 comments on commit 6056e0c

Please sign in to comment.