Skip to content

Commit

Permalink
SiLU activation wrapper for safe importing (#28509)
Browse files Browse the repository at this point in the history
Add back in wrapper for safe importing
  • Loading branch information
amyeroberts authored Jan 15, 2024
1 parent ff86bc3 commit edb1702
Showing 1 changed file with 9 additions and 0 deletions.
9 changes: 9 additions & 0 deletions src/transformers/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.

import math
import warnings
from collections import OrderedDict

import torch
Expand Down Expand Up @@ -137,6 +138,14 @@ def forward(self, input: Tensor) -> Tensor:
return 0.5 * input * (1 + torch.tanh(self.precomputed_constant * (input + 0.044715 * torch.pow(input, 3))))


class SiLUActivation(nn.SiLU):
def __init__(self, *args, **kwargs):
warnings.warn(
"The SiLUActivation class has been deprecated and will be removed in v4.39. Please use nn.SiLU instead.",
)
super().__init__(*args, **kwargs)


class MishActivation(nn.Module):
"""
See Mish: A Self-Regularized Non-Monotonic Activation Function (Misra., https://arxiv.org/abs/1908.08681). Also
Expand Down

0 comments on commit edb1702

Please sign in to comment.