Skip to content

Commit

Permalink
fixed code review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
l3utterfly committed Oct 4, 2023
1 parent 16f45c4 commit d9cb48f
Showing 1 changed file with 16 additions and 21 deletions.
37 changes: 16 additions & 21 deletions llama.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ static void replace_all(std::string & s, const std::string & search, const std::
s = std::move(result);
}

bool is_float_eq(float a, float b, float abs_tol) {
static bool is_float_eq(float a, float b, float abs_tol) {
// Check for non-negative tolerance
if (abs_tol < 0.0) {
throw std::invalid_argument("Tolerance must be non-negative");
Expand Down Expand Up @@ -969,30 +969,25 @@ struct llama_hparams {
float rope_freq_base_train;
float rope_freq_scale_train;

bool operator==(const llama_hparams & other) const {
if (this->vocab_only != other.vocab_only) return false;
if (this->n_vocab != other.n_vocab) return false;
if (this->n_ctx_train != other.n_ctx_train) return false;
if (this->n_embd != other.n_embd) return false;
if (this->n_head != other.n_head) return false;
if (this->n_head_kv != other.n_head_kv) return false;
if (this->n_layer != other.n_layer) return false;
if (this->n_rot != other.n_rot) return false;
if (this->n_ff != other.n_ff) return false;
bool operator!=(const llama_hparams & other) const {
if (this->vocab_only != other.vocab_only) return true;
if (this->n_vocab != other.n_vocab) return true;
if (this->n_ctx_train != other.n_ctx_train) return true;
if (this->n_embd != other.n_embd) return true;
if (this->n_head != other.n_head) return true;
if (this->n_head_kv != other.n_head_kv) return true;
if (this->n_layer != other.n_layer) return true;
if (this->n_rot != other.n_rot) return true;
if (this->n_ff != other.n_ff) return true;

const float EPSILON = 1e-9;

if (!is_float_eq(this->f_norm_eps, other.f_norm_eps, EPSILON)) return false;
if (!is_float_eq(this->f_norm_rms_eps, other.f_norm_rms_eps, EPSILON)) return false;
if (!is_float_eq(this->rope_freq_base_train, other.rope_freq_base_train, EPSILON)) return false;
if (!is_float_eq(this->rope_freq_scale_train, other.rope_freq_scale_train, EPSILON)) return false;

return true;
}
if (!is_float_eq(this->f_norm_eps, other.f_norm_eps, EPSILON)) return true;
if (!is_float_eq(this->f_norm_rms_eps, other.f_norm_rms_eps, EPSILON)) return true;
if (!is_float_eq(this->rope_freq_base_train, other.rope_freq_base_train, EPSILON)) return true;
if (!is_float_eq(this->rope_freq_scale_train, other.rope_freq_scale_train, EPSILON)) return true;

// implement != explicitly using the "==" implementation above so we don't get a warning about it
bool operator!=(const llama_hparams & other) const {
return !(*this == other);
return false;
}

uint32_t n_gqa() const {
Expand Down

0 comments on commit d9cb48f

Please sign in to comment.