File tree Expand file tree Collapse file tree 2 files changed +20
-4
lines changed Expand file tree Collapse file tree 2 files changed +20
-4
lines changed Original file line number Diff line number Diff line change @@ -49,7 +49,10 @@ uint32_t llama_hparams::n_embd_v_gqa(uint32_t il) const {
4949 return n_embd_head_v * n_head_kv;
5050}
5151
52- uint32_t llama_hparams::n_embd_k_s () const {
52+ uint32_t llama_hparams::n_embd_k_s (uint32_t il) const {
53+ if (!recurrent_layer (il)) {
54+ return 0 ;
55+ }
5356 if (wkv_head_size != 0 ) {
5457 // for RWKV models
5558 return token_shift_count * n_embd;
@@ -60,7 +63,10 @@ uint32_t llama_hparams::n_embd_k_s() const {
6063 return (ssm_d_conv > 0 ? ssm_d_conv - 1 : 0 ) * ssm_d_inner;
6164}
6265
63- uint32_t llama_hparams::n_embd_v_s () const {
66+ uint32_t llama_hparams::n_embd_v_s (uint32_t il) const {
67+ if (!recurrent_layer (il)) {
68+ return 0 ;
69+ }
6470 if (wkv_head_size != 0 ) {
6571 // corresponds to RWKV's wkv_states size
6672 return n_embd * wkv_head_size;
@@ -70,6 +76,10 @@ uint32_t llama_hparams::n_embd_v_s() const {
7076 return ssm_d_state * ssm_d_inner;
7177}
7278
79+ bool llama_hparams::recurrent_layer (uint32_t il) const {
80+ return recurrent_layer_arr[il];
81+ }
82+
7383bool llama_hparams::is_swa (uint32_t il) const {
7484 if (il < n_layer) {
7585 return n_swa > 0 && n_swa_pattern > 0 && il % n_swa_pattern < (n_swa_pattern - 1 );
Original file line number Diff line number Diff line change @@ -102,6 +102,9 @@ struct llama_hparams {
102102 uint32_t ssm_d_state = 0 ;
103103 uint32_t ssm_dt_rank = 0 ;
104104
105+ // for hybrid state space models
106+ std::array<bool , LLAMA_MAX_LAYERS> recurrent_layer_arr;
107+
105108 bool ssm_dt_b_c_rms = false ;
106109
107110 float f_clamp_kqv = 0 .0f ;
@@ -149,10 +152,13 @@ struct llama_hparams {
149152
150153 // dimension of the rolling state embeddings
151154 // corresponds to Mamba's conv_states size or RWKV's token_shift states size
152- uint32_t n_embd_k_s () const ;
155+ uint32_t n_embd_k_s (uint32_t il = 0 ) const ;
153156
154157 // dimension of the recurrent state embeddings
155- uint32_t n_embd_v_s () const ;
158+ uint32_t n_embd_v_s (uint32_t il = 0 ) const ;
159+
160+ // whether or not the given layer is recurrent (for hybrid models)
161+ bool recurrent_layer (uint32_t il) const ;
156162
157163 bool is_swa (uint32_t il) const ;
158164};
You can’t perform that action at this time.
0 commit comments