Skip to content

Commit e1675d1

Browse files
committed
llama : avoid fprintf in favor of LLAMA_LOG (#3538)
1 parent 8402566 commit e1675d1

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

examples/main/main.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -799,7 +799,7 @@ int main(int argc, char ** argv) {
799799
}
800800

801801
const auto line_pfx = ::llama_tokenize(ctx, params.input_prefix, false, true);
802-
const auto line_inp = ::llama_tokenize(ctx, buffer, false, false);
802+
const auto line_inp = ::llama_tokenize(ctx, buffer, false, false);
803803
const auto line_sfx = ::llama_tokenize(ctx, params.input_suffix, false, true);
804804
LOG("input tokens: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, line_inp));
805805

llama.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2327,13 +2327,13 @@ static void llm_load_vocab(
23272327
}
23282328

23292329
if (special_tokens_definition_mismatch || special_tokens_count_from_verification != special_tokens_count_by_type) {
2330-
fprintf(stderr, "%s: warning: Mismatch in special tokens definition ( %u/%zu vs %u/%zu ).\n",
2330+
LLAMA_LOG_WARN("%s: mismatch in special tokens definition ( %u/%zu vs %u/%zu ).\n",
23312331
__func__,
23322332
special_tokens_count_from_verification, vocab.id_to_token.size(),
23332333
special_tokens_count_by_type, vocab.id_to_token.size()
23342334
);
23352335
} else {
2336-
fprintf(stderr, "%s: Special tokens definition check successful ( %u/%zu ).\n",
2336+
LLAMA_LOG_INFO("%s: special tokens definition check successful ( %u/%zu ).\n",
23372337
__func__,
23382338
special_tokens_count_from_verification, vocab.id_to_token.size()
23392339
);

0 commit comments

Comments
 (0)