Skip to content

Commit

Permalink
Putting back the imports.
Browse files Browse the repository at this point in the history
  • Loading branch information
Narsil committed Mar 12, 2024
1 parent 0abe84f commit 20d74e5
Show file tree
Hide file tree
Showing 14 changed files with 19 additions and 5 deletions.
2 changes: 1 addition & 1 deletion bindings/python/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ check_dirs := examples py_src/tokenizers tests
style:
python stub.py
ruff check $(check_dirs) --fix
ruff format $(check_dirs)
ruff format $(check_dirs)t

# Check the source code is formatted correctly
check-style:
Expand Down
2 changes: 2 additions & 0 deletions bindings/python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -67,4 +67,6 @@ lint.ignore = [
"E721",
# Import order
"E402",
# Fixtures unused import
"F811",
]
2 changes: 2 additions & 0 deletions bindings/python/tests/bindings/test_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

from tokenizers import BertWordPieceTokenizer

from ..utils import bert_files, data_dir


class TestEncoding:
@pytest.fixture(scope="class")
Expand Down
1 change: 1 addition & 0 deletions bindings/python/tests/bindings/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import pytest

from tokenizers.models import BPE, Model, WordLevel, WordPiece
from ..utils import bert_files, data_dir, roberta_files


class TestBPE:
Expand Down
2 changes: 2 additions & 0 deletions bindings/python/tests/bindings/test_processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
TemplateProcessing,
)

from ..utils import data_dir, roberta_files


class TestBertProcessing:
def test_instantiate(self):
Expand Down
2 changes: 1 addition & 1 deletion bindings/python/tests/bindings/test_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from tokenizers.pre_tokenizers import ByteLevel
from tokenizers.processors import RobertaProcessing

from ..utils import multiprocessing_with_parallelism
from ..utils import bert_files, data_dir, multiprocessing_with_parallelism, roberta_files


class TestAddedToken:
Expand Down
2 changes: 2 additions & 0 deletions bindings/python/tests/bindings/test_trainers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
trainers,
)

from ..utils import data_dir, train_files


class TestBpeTrainer:
def test_can_modify(self):
Expand Down
1 change: 1 addition & 0 deletions bindings/python/tests/documentation/test_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from tokenizers import Tokenizer

from ..utils import data_dir, doc_pipeline_bert_tokenizer, doc_wiki_tokenizer

disable_printing = True
original_print = print
Expand Down
1 change: 1 addition & 0 deletions bindings/python/tests/documentation/test_quicktour.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from tokenizers import Tokenizer
from ..utils import data_dir, doc_wiki_tokenizer


disable_printing = True
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import datasets
import pytest

from ..utils import data_dir, train_files


class TestTrainFromIterators:
@staticmethod
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from tokenizers import BertWordPieceTokenizer

from ..utils import multiprocessing_with_parallelism
from ..utils import bert_files, data_dir, multiprocessing_with_parallelism


class TestBertWordPieceTokenizer:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from tokenizers import ByteLevelBPETokenizer

from ..utils import multiprocessing_with_parallelism
from ..utils import data_dir, multiprocessing_with_parallelism, roberta_files


class TestByteLevelBPE:
Expand Down
2 changes: 1 addition & 1 deletion bindings/python/tests/implementations/test_char_bpe.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from tokenizers import CharBPETokenizer

from ..utils import multiprocessing_with_parallelism
from ..utils import data_dir, multiprocessing_with_parallelism, openai_files


class TestCharBPETokenizer:
Expand Down
1 change: 1 addition & 0 deletions bindings/python/tests/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from huggingface_hub import HfApi, cached_download, hf_hub_url
from tokenizers import Tokenizer
from .utils import albert_base, data_dir


class TestSerialization:
Expand Down

0 comments on commit 20d74e5

Please sign in to comment.