Skip to content

Commit

Permalink
Remove safeglobals add, not worth having with all pretrained weights …
Browse files Browse the repository at this point in the history
…on hub and numpy 1 vs 2 issues
  • Loading branch information
rwightman committed Oct 24, 2024
1 parent 84f7d2f commit 1b01224
Showing 1 changed file with 0 additions and 16 deletions.
16 changes: 0 additions & 16 deletions src/open_clip/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,11 @@

import torch

from .constants import OPENAI_DATASET_MEAN, OPENAI_DATASET_STD
from .convert import convert_state_dict
from .model import CLIP, CustomTextCLIP, convert_weights_to_lp, convert_to_custom_text_state_dict,\
resize_pos_embed, get_cast_dtype, resize_text_pos_embed, set_model_preprocess_cfg
from .coca_model import CoCa
from .loss import ClipLoss, DistillClipLoss, CoCaLoss, SigLipLoss
from .openai import load_openai_model
from .pretrained import is_pretrained_cfg, get_pretrained_cfg, download_pretrained,\
list_pretrained_tags_by_model, download_pretrained_from_hf
from .transform import image_transform_v2, AugmentationCfg, PreprocessCfg, merge_preprocess_dict, merge_preprocess_kwargs
Expand All @@ -27,20 +25,6 @@
_MODEL_CONFIGS = {} # directory (model_name: config) of model architecture configs


try:
import _codecs
import numpy as np
# add safe globals that are known to be needed for metaclip weights loading in weights_only=True mode
torch.serialization.add_safe_globals([
_codecs.encode, # this one not needed for PyTorch >= 2.5.0
np.core.multiarray.scalar,
np.dtype,
np.dtypes.Float64DType,
])
except Exception:
pass


def _natural_key(string_):
return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_.lower())]

Expand Down

0 comments on commit 1b01224

Please sign in to comment.