Skip to content

Commit

Permalink
py : try to fix flake stuff
Browse files Browse the repository at this point in the history
  • Loading branch information
ggerganov committed Jan 13, 2024
1 parent fe25223 commit c3d64a0
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions convert-hf-to-gguf.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

# check for any of the given keys in the dictionary and return the value of the first key found
def get_key_opts(d, keys):
vals = []
for k in keys:
if k in d:
return d[k]
Expand Down Expand Up @@ -267,7 +266,6 @@ def _set_vocab_gpt2(self):
toktypes.append(gguf.TokenType.USER_DEFINED)
elif reverse_vocab[i] in added_vocab:
tokens.append(reverse_vocab[i])
# check if tokenizer has added_tokens_decoder
if hasattr(tokenizer, "added_tokens_decoder"):
if tokenizer.added_tokens_decoder[i].special:
toktypes.append(gguf.TokenType.CONTROL)
Expand Down Expand Up @@ -1092,7 +1090,9 @@ def set_gguf_parameters(self):
self.gguf_writer.add_head_count_kv(get_key_opts(self.hparams, ["n_head", "num_attention_heads"]))
self.gguf_writer.add_layer_norm_eps(get_key_opts(self.hparams, ["layer_norm_epsilon", "layer_norm_eps"]))
self.gguf_writer.add_rope_dimension_count(
int(get_key_opts(self.hparams, ["partial_rotary_factor"]) * get_key_opts(self.hparams, ["n_embd", "hidden_size"])) // get_key_opts(self.hparams, ["n_head", "num_attention_heads"]))
int(get_key_opts(self.hparams, ["partial_rotary_factor"]) *
get_key_opts(self.hparams, ["n_embd", "hidden_size"])) //
get_key_opts(self.hparams, ["n_head", "num_attention_heads"]))
self.gguf_writer.add_file_type(self.ftype)
self.gguf_writer.add_add_bos_token(False)

Expand Down

0 comments on commit c3d64a0

Please sign in to comment.