We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 18b7548 commit 0b95111Copy full SHA for 0b95111
vllm/multimodal/processing.py
@@ -43,7 +43,7 @@ class PromptReplacementDetails:
43
"""
44
45
@staticmethod
46
- def from_seq(seq: _PromptSeq):
+ def from_seq(seq: _PromptSeq) -> "PromptReplacementDetails":
47
return PromptReplacementDetails(full=seq, features=seq)
48
49
@@ -132,7 +132,10 @@ class _BoundPromptSequence:
132
_token_ids: Optional[list[int]]
133
134
135
- def from_seq(tokenizer: AnyTokenizer, seq: _PromptSeq):
+ def from_seq(
136
+ tokenizer: AnyTokenizer,
137
+ seq: _PromptSeq,
138
+ ) -> "_BoundPromptSequence":
139
return _BoundPromptSequence(
140
tokenizer=tokenizer,
141
_text=seq if isinstance(seq, str) else None,
0 commit comments