File tree 2 files changed +39
-2
lines changed
2 files changed +39
-2
lines changed Original file line number Diff line number Diff line change
1
+ """
2
+ Example of using the OpenAI entrypoint's rerank API which is compatible with
3
+ the Cohere SDK: https://github.com/cohere-ai/cohere-python
4
+
5
+ run: vllm serve --model BAAI/bge-reranker-base
6
+ """
7
+ import cohere
8
+
9
+ # cohere v1 client
10
+ co = cohere .Client (base_url = "http://localhost:8000" , api_key = "sk-fake-key" )
11
+ rerank_v1_result = co .rerank (
12
+ model = "BAAI/bge-reranker-base" ,
13
+ query = "What is the capital of France?" ,
14
+ documents = [
15
+ "The capital of France is Paris" ,
16
+ "Reranking is fun!" ,
17
+ "vLLM is an open-source framework for fast AI serving"
18
+ ]
19
+ )
20
+
21
+ print (rerank_v1_result )
22
+
23
+ # or the v2
24
+ co2 = cohere .ClientV2 ("sk-fake-key" , base_url = "http://localhost:8000" )
25
+
26
+ v2_rerank_result = co2 .rerank (
27
+ model = "BAAI/bge-reranker-base" ,
28
+ query = "What is the capital of France?" ,
29
+ documents = [
30
+ "The capital of France is Paris" ,
31
+ "Reranking is fun!" ,
32
+ "vLLM is an open-source framework for fast AI serving"
33
+ ]
34
+ )
35
+
36
+ print (v2_rerank_result )
37
+
Original file line number Diff line number Diff line change 1
1
"""
2
2
Example of using the OpenAI entrypoint's rerank API which is compatible with
3
- Jina and Cohere
3
+ Jina and Cohere https://jina.ai/reranker
4
+
4
5
run: vllm serve --model BAAI/bge-reranker-base
5
6
"""
6
7
import json
21
22
"The capital of France is Paris." , "Horses and cows are both animals"
22
23
]
23
24
}
24
-
25
25
response = requests .post (url , headers = headers , json = data )
26
26
27
27
# Check the response
You can’t perform that action at this time.
0 commit comments