diff --git a/bindings/python/py_src/tokenizers/implementations/byte_level_bpe.py b/bindings/python/py_src/tokenizers/implementations/byte_level_bpe.py index c7e3dbc46..f65f05e1d 100644 --- a/bindings/python/py_src/tokenizers/implementations/byte_level_bpe.py +++ b/bindings/python/py_src/tokenizers/implementations/byte_level_bpe.py @@ -16,7 +16,7 @@ class ByteLevelBPETokenizer(BaseTokenizer): def __init__( self, vocab: Optional[Union[str, Dict[str, int]]] = None, - merges: Optional[Union[str, Dict[Tuple[int, int], Tuple[int, int]]]] = None, + merges: Optional[Union[str, List[Tuple[str, str]]]] = None, add_prefix_space: bool = False, lowercase: bool = False, dropout: Optional[float] = None, diff --git a/bindings/python/py_src/tokenizers/implementations/char_level_bpe.py b/bindings/python/py_src/tokenizers/implementations/char_level_bpe.py index 29ca5977d..62b5bcdf0 100644 --- a/bindings/python/py_src/tokenizers/implementations/char_level_bpe.py +++ b/bindings/python/py_src/tokenizers/implementations/char_level_bpe.py @@ -25,7 +25,7 @@ class CharBPETokenizer(BaseTokenizer): def __init__( self, vocab: Optional[Union[str, Dict[str, int]]] = None, - merges: Optional[Union[str, Dict[Tuple[int, int], Tuple[int, int]]]] = None, + merges: Optional[Union[str, List[Tuple[str, str]]]] = None, unk_token: Union[str, AddedToken] = "", suffix: str = "", dropout: Optional[float] = None, diff --git a/bindings/python/py_src/tokenizers/implementations/sentencepiece_bpe.py b/bindings/python/py_src/tokenizers/implementations/sentencepiece_bpe.py index cd550b410..26200489a 100644 --- a/bindings/python/py_src/tokenizers/implementations/sentencepiece_bpe.py +++ b/bindings/python/py_src/tokenizers/implementations/sentencepiece_bpe.py @@ -16,7 +16,7 @@ class SentencePieceBPETokenizer(BaseTokenizer): def __init__( self, vocab: Optional[Union[str, Dict[str, int]]] = None, - merges: Optional[Union[str, Dict[Tuple[int, int], Tuple[int, int]]]] = None, + merges: Optional[Union[str, List[Tuple[str, str]]]] = None, unk_token: Union[str, AddedToken] = "", replacement: str = "▁", add_prefix_space: bool = True,