From edb170238febf7fc3e3278ed5b9ca0b2c40c70e3 Mon Sep 17 00:00:00 2001 From: amyeroberts <22614925+amyeroberts@users.noreply.github.com> Date: Mon, 15 Jan 2024 19:36:59 +0000 Subject: [PATCH] SiLU activation wrapper for safe importing (#28509) Add back in wrapper for safe importing --- src/transformers/activations.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/transformers/activations.py b/src/transformers/activations.py index 2355fb5fed678d..22f5fe9b1bc2f4 100644 --- a/src/transformers/activations.py +++ b/src/transformers/activations.py @@ -13,6 +13,7 @@ # limitations under the License. import math +import warnings from collections import OrderedDict import torch @@ -137,6 +138,14 @@ def forward(self, input: Tensor) -> Tensor: return 0.5 * input * (1 + torch.tanh(self.precomputed_constant * (input + 0.044715 * torch.pow(input, 3)))) +class SiLUActivation(nn.SiLU): + def __init__(self, *args, **kwargs): + warnings.warn( + "The SiLUActivation class has been deprecated and will be removed in v4.39. Please use nn.SiLU instead.", + ) + super().__init__(*args, **kwargs) + + class MishActivation(nn.Module): """ See Mish: A Self-Regularized Non-Monotonic Activation Function (Misra., https://arxiv.org/abs/1908.08681). Also