mirror of
https://github.com/mudler/LocalAI.git
synced 2026-01-04 09:40:32 -06:00
fix(reranker): llama-cpp sort score desc, crop top_n (#7211)
Signed-off-by: Mikhail Khludnev <mkhl@apache.org>
This commit is contained in:
@@ -1394,7 +1394,16 @@ public:
|
||||
if (error) {
|
||||
return grpc::Status(grpc::StatusCode::INTERNAL, "Error in receiving results");
|
||||
}
|
||||
// Sort responses by score in descending order
|
||||
std::sort(responses.begin(), responses.end(), [](const json& a, const json& b) {
|
||||
return a.value("score", 0.0f) > b.value("score", 0.0f);
|
||||
});
|
||||
|
||||
// Crop results by request.top_n if specified
|
||||
int top_n = request->top_n();
|
||||
if (top_n > 0 && top_n < static_cast<int>(responses.size())) {
|
||||
responses = json(responses.begin(), responses.begin() + top_n);
|
||||
}
|
||||
// Set usage information
|
||||
backend::Usage* usage = rerankResult->mutable_usage();
|
||||
int total_tokens = 0;
|
||||
|
||||
Reference in New Issue
Block a user