Skip to content

Commit e6aadeb

Browse files
authored
Use weights_only for load (#127)
1 parent b27b84d commit e6aadeb

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

experiments/eval_combo.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -365,7 +365,7 @@ def run(
365365
weights_path = Path(f"static_quant_scalars/{sam_model_type}_{batch_size}_static_quant_weights.ptk")
366366
if weights_path.exists() and weights_path.is_file():
367367
print("Loading static quantization weights")
368-
weights = torch.load(f"static_quant_scalars/{sam_model_type}_{batch_size}_static_quant_weights.ptk")
368+
weights = torch.load(f"static_quant_scalars/{sam_model_type}_{batch_size}_static_quant_weights.ptk", weights_only=True)
369369
from static_quant import set_x_absmax
370370
set_x_absmax(predictor.model.image_encoder, weights)
371371
elif compress == "sparse":

segment_anything_fast/build_sam.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,6 @@ def _build_sam(
142142
sam.eval()
143143
if checkpoint is not None:
144144
with open(checkpoint, "rb") as f:
145-
state_dict = torch.load(f)
145+
state_dict = torch.load(f, weights_only=True)
146146
sam.load_state_dict(state_dict)
147147
return sam

0 commit comments

Comments
 (0)