Skip to content

Commit 05cd6e5

Browse files
authored
server : recognize cache_prompt parameter in OAI API (ggml-org#4347)
1 parent caa9249 commit 05cd6e5

File tree

1 file changed

+1
-0
lines changed

1 file changed

+1
-0
lines changed

Diff for: examples/server/server.cpp

+1
Original file line numberDiff line numberDiff line change
@@ -2387,6 +2387,7 @@ json oaicompat_completion_params_parse(
23872387

23882388
// Map OpenAI parameters to llama.cpp parameters
23892389
llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt'
2390+
llama_params["cache_prompt"] = json_value(body, "cache_prompt", false);
23902391
llama_params["temperature"] = json_value(body, "temperature", 0.8);
23912392
llama_params["top_k"] = json_value(body, "top_k", 40);
23922393
llama_params["top_p"] = json_value(body, "top_p", 0.95);

0 commit comments

Comments
 (0)