Skip to content

Commit 6a8f898

Browse files
committed
skyworkr1v update
Signed-off-by: jiacai.liu <[email protected]>
1 parent 61c7a1b commit 6a8f898

File tree

7 files changed

+1517
-2
lines changed

7 files changed

+1517
-2
lines changed

vllm/entrypoints/chat_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -423,7 +423,7 @@ def _placeholder_str(self, modality: ModalityStr,
423423
return self._cached_token_str(self._tokenizer,
424424
hf_config.image_token_index)
425425
if model_type in ("chameleon", "deepseek_vl_v2", "internvl_chat",
426-
"NVLM_D", "h2ovl_chat"):
426+
"skywork_chat", "NVLM_D", "h2ovl_chat"):
427427
return "<image>"
428428
if model_type == "mllama":
429429
return "<|image|>"

vllm/model_executor/models/registry.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,7 @@
167167
"GLM4VForCausalLM": ("glm4v", "GLM4VForCausalLM"),
168168
"H2OVLChatModel": ("h2ovl", "H2OVLChatModel"),
169169
"InternVLChatModel": ("internvl", "InternVLChatModel"),
170+
"SkyworkR1VChatModel": ("skyworkr1v", "SkyworkR1VChatModel"),
170171
"Idefics3ForConditionalGeneration":("idefics3","Idefics3ForConditionalGeneration"),
171172
"LlavaForConditionalGeneration": ("llava", "LlavaForConditionalGeneration"),
172173
"LlavaNextForConditionalGeneration": ("llava_next", "LlavaNextForConditionalGeneration"), # noqa: E501

0 commit comments

Comments
 (0)