Skip to content

Commit

Permalink
Use weights_only for load (#127)
Browse files Browse the repository at this point in the history
  • Loading branch information
kit1980 authored Sep 13, 2024
1 parent b27b84d commit e6aadeb
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion experiments/eval_combo.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ def run(
weights_path = Path(f"static_quant_scalars/{sam_model_type}_{batch_size}_static_quant_weights.ptk")
if weights_path.exists() and weights_path.is_file():
print("Loading static quantization weights")
weights = torch.load(f"static_quant_scalars/{sam_model_type}_{batch_size}_static_quant_weights.ptk")
weights = torch.load(f"static_quant_scalars/{sam_model_type}_{batch_size}_static_quant_weights.ptk", weights_only=True)
from static_quant import set_x_absmax
set_x_absmax(predictor.model.image_encoder, weights)
elif compress == "sparse":
Expand Down
2 changes: 1 addition & 1 deletion segment_anything_fast/build_sam.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,6 @@ def _build_sam(
sam.eval()
if checkpoint is not None:
with open(checkpoint, "rb") as f:
state_dict = torch.load(f)
state_dict = torch.load(f, weights_only=True)
sam.load_state_dict(state_dict)
return sam

0 comments on commit e6aadeb

Please sign in to comment.