mirror of
https://github.com/mudler/LocalAI.git
synced 2026-01-06 10:39:55 -06:00
* WIP - add endpoint Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Rename Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Wire the Completion API Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Try to make it functional Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Almost functional Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Bump golang versions used in tests Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Add description of the tool Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Make it working Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Small optimizations Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Cleanup/refactor Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Update docs Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
136 lines
3.8 KiB
Go
136 lines
3.8 KiB
Go
package openai
|
|
|
|
import (
|
|
"encoding/json"
|
|
"errors"
|
|
"strings"
|
|
"sync"
|
|
"time"
|
|
|
|
"github.com/mudler/LocalAI/core/config"
|
|
"github.com/mudler/LocalAI/core/http/endpoints/mcp"
|
|
"github.com/mudler/LocalAI/core/http/middleware"
|
|
|
|
"github.com/gofiber/fiber/v2"
|
|
"github.com/google/uuid"
|
|
"github.com/mudler/LocalAI/core/schema"
|
|
"github.com/mudler/LocalAI/core/templates"
|
|
"github.com/mudler/LocalAI/pkg/model"
|
|
"github.com/mudler/cogito"
|
|
"github.com/rs/zerolog/log"
|
|
)
|
|
|
|
// MCPCompletionEndpoint is the OpenAI Completion API endpoint https://platform.openai.com/docs/api-reference/completions
|
|
// @Summary Generate completions for a given prompt and model.
|
|
// @Param request body schema.OpenAIRequest true "query params"
|
|
// @Success 200 {object} schema.OpenAIResponse "Response"
|
|
// @Router /mcp/v1/completions [post]
|
|
func MCPCompletionEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evaluator *templates.Evaluator, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error {
|
|
|
|
toolsCache := map[string][]*mcp.MCPTool{}
|
|
mu := sync.Mutex{}
|
|
|
|
// We do not support streaming mode (Yet?)
|
|
return func(c *fiber.Ctx) error {
|
|
created := int(time.Now().Unix())
|
|
|
|
ctx := c.Context()
|
|
|
|
// Handle Correlation
|
|
id := c.Get("X-Correlation-ID", uuid.New().String())
|
|
|
|
input, ok := c.Locals(middleware.CONTEXT_LOCALS_KEY_LOCALAI_REQUEST).(*schema.OpenAIRequest)
|
|
if !ok || input.Model == "" {
|
|
return fiber.ErrBadRequest
|
|
}
|
|
|
|
config, ok := c.Locals(middleware.CONTEXT_LOCALS_KEY_MODEL_CONFIG).(*config.ModelConfig)
|
|
if !ok || config == nil {
|
|
return fiber.ErrBadRequest
|
|
}
|
|
|
|
allTools := []*mcp.MCPTool{}
|
|
|
|
// Get MCP config from model config
|
|
remote, stdio := config.MCP.MCPConfigFromYAML()
|
|
|
|
// Check if we have tools in cache, or we have to have an initial connection
|
|
mu.Lock()
|
|
tools, exists := toolsCache[config.Name]
|
|
if exists {
|
|
allTools = append(allTools, tools...)
|
|
} else {
|
|
tools, err := mcp.ToolsFromMCPConfig(ctx, remote, stdio)
|
|
if err != nil {
|
|
mu.Unlock()
|
|
return err
|
|
}
|
|
|
|
toolsCache[config.Name] = tools
|
|
|
|
allTools = append(allTools, tools...)
|
|
}
|
|
mu.Unlock()
|
|
|
|
cogitoTools := []cogito.Tool{}
|
|
for _, tool := range allTools {
|
|
cogitoTools = append(cogitoTools, tool)
|
|
// defer tool.Close()
|
|
}
|
|
|
|
fragment := cogito.NewEmptyFragment()
|
|
|
|
for _, message := range input.Messages {
|
|
fragment = fragment.AddMessage(message.Role, message.StringContent)
|
|
}
|
|
|
|
port := appConfig.APIAddress[strings.LastIndex(appConfig.APIAddress, ":")+1:]
|
|
apiKey := ""
|
|
if appConfig.ApiKeys != nil {
|
|
apiKey = appConfig.ApiKeys[0]
|
|
}
|
|
// TODO: instead of connecting to the API, we should just wire this internally
|
|
// and act like completion.go.
|
|
// We can do this as cogito expects an interface and we can create one that
|
|
// we satisfy to just call internally ComputeChoices
|
|
defaultLLM := cogito.NewOpenAILLM(config.Name, apiKey, "http://127.0.0.1:"+port)
|
|
|
|
f, err := cogito.ExecuteTools(
|
|
defaultLLM, fragment,
|
|
cogito.WithStatusCallback(func(s string) {
|
|
log.Debug().Msgf("[model agent] [model: %s] Status: %s", config.Name, s)
|
|
}),
|
|
cogito.WithContext(ctx),
|
|
// TODO: move these to configs
|
|
cogito.EnableToolReEvaluator,
|
|
cogito.WithIterations(3),
|
|
cogito.WithMaxAttempts(3),
|
|
cogito.WithTools(
|
|
cogitoTools...,
|
|
),
|
|
)
|
|
if err != nil && !errors.Is(err, cogito.ErrNoToolSelected) {
|
|
return err
|
|
}
|
|
|
|
f, err = defaultLLM.Ask(ctx, f)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
resp := &schema.OpenAIResponse{
|
|
ID: id,
|
|
Created: created,
|
|
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
Choices: []schema.Choice{{Text: f.LastMessage().Content}},
|
|
Object: "text_completion",
|
|
}
|
|
|
|
jsonResult, _ := json.Marshal(resp)
|
|
log.Debug().Msgf("Response: %s", jsonResult)
|
|
|
|
// Return the prediction in the response body
|
|
return c.JSON(resp)
|
|
}
|
|
}
|