File tree Expand file tree Collapse file tree 1 file changed +2
-5
lines changed
vllm/model_executor/models Expand file tree Collapse file tree 1 file changed +2
-5
lines changed Original file line number Diff line number Diff line change @@ -1079,7 +1079,7 @@ def forward(
1079
1079
self .layers [idx ].self_attn .rotary_emb .prepare_cos_sin (
1080
1080
positions )
1081
1081
for idx , decoder_layer in enumerate (self .layers ):
1082
- if isinstance ( decoder_layer , MllamaCrossAttentionDecoderLayer ) :
1082
+ if idx in self . cross_attention_layers :
1083
1083
if not skip_cross_attention :
1084
1084
hidden_states = decoder_layer (
1085
1085
hidden_states = hidden_states ,
@@ -1091,7 +1091,7 @@ def forward(
1091
1091
kv_cache = kv_caches [idx ],
1092
1092
attn_metadata = attn_metadata ,
1093
1093
)
1094
- elif isinstance ( decoder_layer , LlamaDecoderLayer ) :
1094
+ else :
1095
1095
hidden_states , residual = decoder_layer (
1096
1096
positions = positions ,
1097
1097
hidden_states = hidden_states ,
@@ -1100,9 +1100,6 @@ def forward(
1100
1100
residual = None ,
1101
1101
)
1102
1102
hidden_states = hidden_states + residual
1103
- else :
1104
- raise ValueError (
1105
- f"Unknown decoder layer type { type (decoder_layer )} " )
1106
1103
hidden_states = self .norm (hidden_states )
1107
1104
return hidden_states
1108
1105
You can’t perform that action at this time.
0 commit comments