moe cleanuips
This commit is contained in:
parent
68cb7845e9
commit
d2f46f18ac
|
|
@ -120,14 +120,12 @@ struct llama_hparams {
|
||||||
|
|
||||||
uint32_t attn_head_dim = 0;
|
uint32_t attn_head_dim = 0;
|
||||||
bool mamba_rms_norm = false;
|
bool mamba_rms_norm = false;
|
||||||
double rope_theta = 10000.0;
|
|
||||||
uint32_t vocab_size = 0;
|
uint32_t vocab_size = 0;
|
||||||
uint32_t intermediate_size = 0;
|
uint32_t intermediate_size = 0;
|
||||||
float mamba_expand = 0.0f;
|
float mamba_expand = 0.0f;
|
||||||
bool ssm_rms_norm = false;
|
bool ssm_rms_norm = false;
|
||||||
bool ssm_conv_bias = false;
|
bool ssm_conv_bias = false;
|
||||||
bool ssm_proj_bias = false;
|
bool ssm_proj_bias = false;
|
||||||
uint32_t chunk_size = 0;
|
|
||||||
|
|
||||||
// for hybrid state space models
|
// for hybrid state space models
|
||||||
std::array<bool, LLAMA_MAX_LAYERS> recurrent_layer_arr;
|
std::array<bool, LLAMA_MAX_LAYERS> recurrent_layer_arr;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue