Skip to content

Commit

Permalink
👋 Remove deprecated tokenizer argument in BCO, GKD, Iterative SFT, …
Browse files Browse the repository at this point in the history
…Nash MD and XPO (#2349)
  • Loading branch information
qgallouedec authored Nov 12, 2024
1 parent 2d24d35 commit 6239631
Show file tree
Hide file tree
Showing 5 changed files with 0 additions and 10 deletions.
2 changes: 0 additions & 2 deletions trl/trainer/bco_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
from transformers.trainer_callback import TrainerCallback
from transformers.trainer_utils import EvalLoopOutput, has_length
from transformers.utils import is_peft_available
from transformers.utils.deprecation import deprecate_kwarg

from ..data_utils import maybe_apply_chat_template
from ..models import PreTrainedModelWrapper, create_reference_model
Expand Down Expand Up @@ -320,7 +319,6 @@ class BCOTrainer(Trainer):

_tag_names = ["trl", "bco"]

@deprecate_kwarg("tokenizer", new_name="processing_class", version="0.13.0", raise_if_both_names=True)
def __init__(
self,
model: Union[PreTrainedModel, nn.Module, str] = None,
Expand Down
2 changes: 0 additions & 2 deletions trl/trainer/gkd_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
from transformers.trainer_callback import TrainerCallback
from transformers.trainer_utils import EvalPrediction
from transformers.utils import is_liger_kernel_available, is_peft_available
from transformers.utils.deprecation import deprecate_kwarg

from ..models import PreTrainedModelWrapper
from ..models.utils import unwrap_model_for_generation
Expand All @@ -62,7 +61,6 @@
class GKDTrainer(SFTTrainer):
_tag_names = ["trl", "gkd"]

@deprecate_kwarg("tokenizer", new_name="processing_class", version="0.13.0", raise_if_both_names=True)
def __init__(
self,
model: Optional[Union[PreTrainedModel, nn.Module, str]] = None,
Expand Down
2 changes: 0 additions & 2 deletions trl/trainer/iterative_sft_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
)
from transformers.trainer_utils import EvalLoopOutput
from transformers.utils import is_peft_available
from transformers.utils.deprecation import deprecate_kwarg

from ..core import PPODecorators
from .utils import generate_model_card
Expand Down Expand Up @@ -81,7 +80,6 @@ class IterativeSFTTrainer(Trainer):

_tag_names = ["trl", "iterative-sft"]

@deprecate_kwarg("tokenizer", new_name="processing_class", version="0.13.0", raise_if_both_names=True)
def __init__(
self,
model: Optional[PreTrainedModel] = None,
Expand Down
2 changes: 0 additions & 2 deletions trl/trainer/nash_md_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
from transformers.trainer_utils import EvalPrediction
from transformers.training_args import OptimizerNames
from transformers.utils import is_apex_available
from transformers.utils.deprecation import deprecate_kwarg

from ..data_utils import is_conversational, maybe_apply_chat_template
from ..models.modeling_base import GeometricMixtureWrapper
Expand Down Expand Up @@ -94,7 +93,6 @@ class NashMDTrainer(OnlineDPOTrainer):

_tag_names = ["trl", "nash-md"]

@deprecate_kwarg("tokenizer", new_name="processing_class", version="0.13.0", raise_if_both_names=True)
def __init__(
self,
model: Union[PreTrainedModel, nn.Module] = None,
Expand Down
2 changes: 0 additions & 2 deletions trl/trainer/xpo_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
)
from transformers.trainer_utils import EvalPrediction
from transformers.training_args import OptimizerNames
from transformers.utils.deprecation import deprecate_kwarg

from ..data_utils import is_conversational, maybe_apply_chat_template
from ..models.utils import unwrap_model_for_generation
Expand Down Expand Up @@ -93,7 +92,6 @@ class XPOTrainer(OnlineDPOTrainer):

_tag_names = ["trl", "xpo"]

@deprecate_kwarg("tokenizer", new_name="processing_class", version="0.13.0", raise_if_both_names=True)
def __init__(
self,
model: Union[PreTrainedModel, nn.Module] = None,
Expand Down

0 comments on commit 6239631

Please sign in to comment.