Warning fixes: unused member, cast, unused function

PiperOrigin-RevId: 611074887
This commit is contained in:
Jan Wassenberg 2024-02-28 05:53:52 -08:00 committed by Copybara-Service
parent f4a14bfdf2
commit 272f17ddb3
3 changed files with 14 additions and 10 deletions

View File

@ -25,6 +25,8 @@ namespace gcpp {
class DistortionStats {
public:
void Notify(float original, float distorted) {
(void)padding_; // prevent unused member warning
const double l1 = hwy::ScalarAbs(original - distorted);
if (l1 > max_l1_) {

View File

@ -633,30 +633,32 @@ void ForEachTensor(const Weights<TConfig>* weights,
c_weights.c_final_norm_scale);
char name[16];
for (size_t layer_idx = 0; layer_idx < TConfig::kLayers; ++layer_idx) {
Layer<TConfig>* layer = weights ? &weights->layers[layer_idx] : nullptr;
CompressedLayer<TConfig>* c_layer = c_weights.CLayer(layer_idx);
for (int layer_idx = 0; layer_idx < static_cast<int>(TConfig::kLayers);
++layer_idx) {
const size_t idx = static_cast<size_t>(layer_idx);
Layer<TConfig>* layer = weights ? &weights->layers[idx] : nullptr;
CompressedLayer<TConfig>* c_layer = c_weights.CLayer(idx);
snprintf(name, sizeof(name), "pre_ff_ns_%lu", layer_idx);
snprintf(name, sizeof(name), "pre_ff_ns_%d", layer_idx);
func(name, layer ? layer->pre_ffw_norm_scale.data() : nullptr,
c_layer->c_pre_ffw_norm_scale);
snprintf(name, sizeof(name), "gating_ein_%lu", layer_idx);
snprintf(name, sizeof(name), "gating_ein_%d", layer_idx);
func(name, layer ? layer->gating_einsum_w.data() : nullptr,
c_layer->c_gating_einsum_w);
snprintf(name, sizeof(name), "linear_w_%lu", layer_idx);
snprintf(name, sizeof(name), "linear_w_%d", layer_idx);
func(name, layer ? layer->linear_w.data() : nullptr, c_layer->c_linear_w);
snprintf(name, sizeof(name), "qkv_ein_%lu", layer_idx);
snprintf(name, sizeof(name), "qkv_ein_%d", layer_idx);
func(name, layer ? layer->qkv_einsum_w.data() : nullptr,
c_layer->c_qkv_einsum_w);
snprintf(name, sizeof(name), "att_ein_%lu", layer_idx);
snprintf(name, sizeof(name), "att_ein_%d", layer_idx);
func(name, layer ? layer->attn_vec_einsum_w.data() : nullptr,
c_layer->c_attn_vec_einsum_w);
snprintf(name, sizeof(name), "pre_att_ns_%lu", layer_idx);
snprintf(name, sizeof(name), "pre_att_ns_%d", layer_idx);
func(name, layer ? layer->pre_attention_norm_scale.data() : nullptr,
c_layer->c_pre_attention_norm_scale);
}

View File

@ -204,7 +204,7 @@ class ArgsBase {
}
};
static bool HasHelp(int argc, char* argv[]) {
static inline HWY_MAYBE_UNUSED bool HasHelp(int argc, char* argv[]) {
// TODO(austinvhuang): handle case insensitivity
if (argc == 1) {
// no arguments - print help