Skip to content

Commit 3969aa3

Browse files
committed
llama.vim : update infill API params [no ci]
1 parent aebe34f commit 3969aa3

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

examples/llama.vim

+4-4
Original file line numberDiff line numberDiff line change
@@ -287,16 +287,16 @@ function! s:ring_update()
287287

288288
" no samplers needed here
289289
let l:request = json_encode({
290-
\ 'prompt': "",
291290
\ 'input_prefix': "",
292291
\ 'input_suffix': "",
292+
\ 'input_extra': l:extra_context,
293+
\ 'prompt': "",
293294
\ 'n_predict': 1,
294295
\ 'penalty_last_n': 0,
295296
\ 'temperature': 0.0,
296297
\ 'stream': v:false,
297298
\ 'samplers': ["temperature"],
298299
\ 'cache_prompt': v:true,
299-
\ 'extra_context': l:extra_context,
300300
\ 't_max_prompt_ms': 1,
301301
\ 't_max_predict_ms': 1
302302
\ })
@@ -379,16 +379,16 @@ function! llama#fim(is_auto, on_hold) abort
379379

380380
let l:request = json_encode({
381381
\ 'input_prefix': l:prefix,
382-
\ 'prompt': l:prompt,
383382
\ 'input_suffix': l:suffix,
383+
\ 'input_extra': l:extra_context,
384+
\ 'prompt': l:prompt,
384385
\ 'n_predict': g:llama_config.n_predict,
385386
\ 'penalty_last_n': 0,
386387
\ 'top_k': 40,
387388
\ 'top_p': 0.99,
388389
\ 'stream': v:false,
389390
\ 'samplers': ["top_k", "top_p", "infill"],
390391
\ 'cache_prompt': v:true,
391-
\ 'extra_context': l:extra_context,
392392
\ 't_max_prompt_ms': g:llama_config.t_max_prompt_ms,
393393
\ 't_max_predict_ms': g:llama_config.t_max_predict_ms
394394
\ })

0 commit comments

Comments
 (0)