diff --git a/bindings/python/py_src/tokenizers/__init__.pyi b/bindings/python/py_src/tokenizers/__init__.pyi index 0ad96fc8a..6c731ff0a 100644 --- a/bindings/python/py_src/tokenizers/__init__.pyi +++ b/bindings/python/py_src/tokenizers/__init__.pyi @@ -971,7 +971,7 @@ class Tokenizer: pass @staticmethod - def from_pretrained(identifier, revision="main", auth_token=None): + def from_pretrained(identifier, revision="main", token=None): """ Instantiate a new :class:`~tokenizers.Tokenizer` from an existing file on the Hugging Face Hub. @@ -982,7 +982,7 @@ class Tokenizer: a tokenizer.json file revision (:obj:`str`, defaults to `main`): A branch or commit id - auth_token (:obj:`str`, `optional`, defaults to `None`): + token (:obj:`str`, `optional`, defaults to `None`): An optional auth token used to access private repositories on the Hugging Face Hub diff --git a/bindings/python/src/tokenizer.rs b/bindings/python/src/tokenizer.rs index 24a68c6bb..00dacf417 100644 --- a/bindings/python/src/tokenizer.rs +++ b/bindings/python/src/tokenizer.rs @@ -578,19 +578,19 @@ impl PyTokenizer { /// a tokenizer.json file /// revision (:obj:`str`, defaults to `main`): /// A branch or commit id - /// auth_token (:obj:`str`, `optional`, defaults to `None`): + /// token (:obj:`str`, `optional`, defaults to `None`): /// An optional auth token used to access private repositories on the /// Hugging Face Hub /// /// Returns: /// :class:`~tokenizers.Tokenizer`: The new tokenizer #[staticmethod] - #[pyo3(signature = (identifier, revision = String::from("main"), auth_token = None))] - #[pyo3(text_signature = "(identifier, revision=\"main\", auth_token=None)")] + #[pyo3(signature = (identifier, revision = String::from("main"), token = None))] + #[pyo3(text_signature = "(identifier, revision=\"main\", token=None)")] fn from_pretrained( identifier: &str, revision: String, - auth_token: Option, + token: Option, ) -> PyResult { let path = Python::with_gil(|py| -> PyResult { let huggingface_hub = PyModule::import_bound(py, intern!(py, "huggingface_hub"))?; @@ -601,8 +601,8 @@ impl PyTokenizer { (intern!(py, "revision"), &revision), ] .into_py_dict_bound(py); - if let Some(auth_token) = auth_token { - kwargs.set_item(intern!(py, "token"), auth_token)?; + if let Some(token) = token { + kwargs.set_item(intern!(py, "token"), token)?; } let path: String = hf_hub_download.call((), Some(&kwargs))?.extract()?; Ok(path) diff --git a/tokenizers/src/utils/from_pretrained.rs b/tokenizers/src/utils/from_pretrained.rs index 0cc2215aa..dac5d8263 100644 --- a/tokenizers/src/utils/from_pretrained.rs +++ b/tokenizers/src/utils/from_pretrained.rs @@ -8,7 +8,7 @@ use std::path::PathBuf; pub struct FromPretrainedParameters { pub revision: String, pub user_agent: HashMap, - pub auth_token: Option, + pub token: Option, } impl Default for FromPretrainedParameters { @@ -16,7 +16,7 @@ impl Default for FromPretrainedParameters { Self { revision: "main".into(), user_agent: HashMap::new(), - auth_token: None, + token: None, } } } @@ -60,7 +60,7 @@ pub fn from_pretrained>( } let mut builder = ApiBuilder::new(); - if let Some(token) = params.auth_token { + if let Some(token) = params.token { builder = builder.with_token(Some(token)); } let api = builder.build()?;