chore(deps): bump llama.cpp to 'd82b7a7c1d73c0674698d9601b1bbb0200933f29' (#7392)

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto
2025-11-29 08:58:07 +01:00
committed by GitHub
parent 53e5b2d6be
commit 468ac608f3
2 changed files with 4 additions and 4 deletions

View File

@@ -1,5 +1,5 @@
LLAMA_VERSION?=4abef75f2cf2eee75eb5083b30a94cf981587394
LLAMA_VERSION?=d82b7a7c1d73c0674698d9601b1bbb0200933f29
LLAMA_REPO?=https://github.com/ggerganov/llama.cpp
CMAKE_ARGS?=

View File

@@ -1199,7 +1199,7 @@ public:
task.id_slot = json_value(data, "id_slot", -1);
// OAI-compat
task.params.oaicompat = OAICOMPAT_TYPE_NONE;
task.params.res_type = TASK_RESPONSE_TYPE_NONE;
task.params.oaicompat_cmpl_id = completion_id;
// oaicompat_model is already populated by params_from_json_cmpl
@@ -1930,7 +1930,7 @@ public:
task.id_slot = json_value(data, "id_slot", -1);
// OAI-compat
task.params.oaicompat = OAICOMPAT_TYPE_NONE;
task.params.res_type = TASK_RESPONSE_TYPE_NONE;
task.params.oaicompat_cmpl_id = completion_id;
// oaicompat_model is already populated by params_from_json_cmpl
@@ -2056,7 +2056,7 @@ public:
task.index = i;
task.tokens = std::move(tokenized_prompts[i]);
task.params.oaicompat = OAICOMPAT_TYPE_NONE;
task.params.res_type = TASK_RESPONSE_TYPE_NONE;
task.params.embd_normalize = embd_normalize;
tasks.push_back(std::move(task));
}