Skip to content

Commit

Permalink
shorter loop local_files_only
Browse files Browse the repository at this point in the history
  • Loading branch information
olegklimov committed Nov 7, 2023
1 parent c2e2fd3 commit 20931af
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions self_hosting_machinery/inference/inference_hf.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,9 +146,8 @@ def __init__(self,
assert torch.cuda.is_available(), "model is only supported on GPU"

self._device = "cuda:0"
for attempt in [0, 1]:
for local_files_only in [True, False]:
try:
local_files_only = (attempt == 0)
logging.getLogger("MODEL").info("loading model local_files_only=%i" % local_files_only)
self._tokenizer = AutoTokenizer.from_pretrained(
self._model_dict["model_path"], cache_dir=self.cache_dir, trust_remote_code=True,
Expand Down Expand Up @@ -177,7 +176,7 @@ def __init__(self,
raise RuntimeError(f"unknown model backend {model_dict['backend']}")
break
except IOError as e:
if attempt == 1:
if local_files_only == False:
raise e
self._dump_embeddings()

Expand Down

0 comments on commit 20931af

Please sign in to comment.