mirror of https://github.com/google/gemma.cpp.git
Warning fixes: unused member, cast, unused function
PiperOrigin-RevId: 611074887
This commit is contained in:
parent
f4a14bfdf2
commit
272f17ddb3
|
|
@ -25,6 +25,8 @@ namespace gcpp {
|
||||||
class DistortionStats {
|
class DistortionStats {
|
||||||
public:
|
public:
|
||||||
void Notify(float original, float distorted) {
|
void Notify(float original, float distorted) {
|
||||||
|
(void)padding_; // prevent unused member warning
|
||||||
|
|
||||||
const double l1 = hwy::ScalarAbs(original - distorted);
|
const double l1 = hwy::ScalarAbs(original - distorted);
|
||||||
|
|
||||||
if (l1 > max_l1_) {
|
if (l1 > max_l1_) {
|
||||||
|
|
|
||||||
20
gemma.cc
20
gemma.cc
|
|
@ -633,30 +633,32 @@ void ForEachTensor(const Weights<TConfig>* weights,
|
||||||
c_weights.c_final_norm_scale);
|
c_weights.c_final_norm_scale);
|
||||||
|
|
||||||
char name[16];
|
char name[16];
|
||||||
for (size_t layer_idx = 0; layer_idx < TConfig::kLayers; ++layer_idx) {
|
for (int layer_idx = 0; layer_idx < static_cast<int>(TConfig::kLayers);
|
||||||
Layer<TConfig>* layer = weights ? &weights->layers[layer_idx] : nullptr;
|
++layer_idx) {
|
||||||
CompressedLayer<TConfig>* c_layer = c_weights.CLayer(layer_idx);
|
const size_t idx = static_cast<size_t>(layer_idx);
|
||||||
|
Layer<TConfig>* layer = weights ? &weights->layers[idx] : nullptr;
|
||||||
|
CompressedLayer<TConfig>* c_layer = c_weights.CLayer(idx);
|
||||||
|
|
||||||
snprintf(name, sizeof(name), "pre_ff_ns_%lu", layer_idx);
|
snprintf(name, sizeof(name), "pre_ff_ns_%d", layer_idx);
|
||||||
func(name, layer ? layer->pre_ffw_norm_scale.data() : nullptr,
|
func(name, layer ? layer->pre_ffw_norm_scale.data() : nullptr,
|
||||||
c_layer->c_pre_ffw_norm_scale);
|
c_layer->c_pre_ffw_norm_scale);
|
||||||
|
|
||||||
snprintf(name, sizeof(name), "gating_ein_%lu", layer_idx);
|
snprintf(name, sizeof(name), "gating_ein_%d", layer_idx);
|
||||||
func(name, layer ? layer->gating_einsum_w.data() : nullptr,
|
func(name, layer ? layer->gating_einsum_w.data() : nullptr,
|
||||||
c_layer->c_gating_einsum_w);
|
c_layer->c_gating_einsum_w);
|
||||||
|
|
||||||
snprintf(name, sizeof(name), "linear_w_%lu", layer_idx);
|
snprintf(name, sizeof(name), "linear_w_%d", layer_idx);
|
||||||
func(name, layer ? layer->linear_w.data() : nullptr, c_layer->c_linear_w);
|
func(name, layer ? layer->linear_w.data() : nullptr, c_layer->c_linear_w);
|
||||||
snprintf(name, sizeof(name), "qkv_ein_%lu", layer_idx);
|
snprintf(name, sizeof(name), "qkv_ein_%d", layer_idx);
|
||||||
|
|
||||||
func(name, layer ? layer->qkv_einsum_w.data() : nullptr,
|
func(name, layer ? layer->qkv_einsum_w.data() : nullptr,
|
||||||
c_layer->c_qkv_einsum_w);
|
c_layer->c_qkv_einsum_w);
|
||||||
snprintf(name, sizeof(name), "att_ein_%lu", layer_idx);
|
snprintf(name, sizeof(name), "att_ein_%d", layer_idx);
|
||||||
|
|
||||||
func(name, layer ? layer->attn_vec_einsum_w.data() : nullptr,
|
func(name, layer ? layer->attn_vec_einsum_w.data() : nullptr,
|
||||||
c_layer->c_attn_vec_einsum_w);
|
c_layer->c_attn_vec_einsum_w);
|
||||||
|
|
||||||
snprintf(name, sizeof(name), "pre_att_ns_%lu", layer_idx);
|
snprintf(name, sizeof(name), "pre_att_ns_%d", layer_idx);
|
||||||
func(name, layer ? layer->pre_attention_norm_scale.data() : nullptr,
|
func(name, layer ? layer->pre_attention_norm_scale.data() : nullptr,
|
||||||
c_layer->c_pre_attention_norm_scale);
|
c_layer->c_pre_attention_norm_scale);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -204,7 +204,7 @@ class ArgsBase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static bool HasHelp(int argc, char* argv[]) {
|
static inline HWY_MAYBE_UNUSED bool HasHelp(int argc, char* argv[]) {
|
||||||
// TODO(austinvhuang): handle case insensitivity
|
// TODO(austinvhuang): handle case insensitivity
|
||||||
if (argc == 1) {
|
if (argc == 1) {
|
||||||
// no arguments - print help
|
// no arguments - print help
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue