fix: security scanner warning noise: error handlers part 2 (#2145)

check off a few more error handlers

Signed-off-by: Dave Lee <dave@gray101.com>
This commit is contained in:
Dave
2024-04-29 09:11:42 -04:00
committed by GitHub
parent b7ea9602f5
commit 11c48a0004
11 changed files with 82 additions and 24 deletions
+10 -2
View File
@@ -2,6 +2,8 @@ package functions
import (
"encoding/json"
"github.com/rs/zerolog/log"
)
type Function struct {
@@ -30,8 +32,14 @@ func (f Functions) ToJSONStructure() JSONFunctionStructure {
prop := map[string]interface{}{}
defsD := map[string]interface{}{}
json.Unmarshal(dat, &prop)
json.Unmarshal(dat2, &defsD)
err := json.Unmarshal(dat, &prop)
if err != nil {
log.Error().Err(err).Msg("error unmarshalling dat")
}
err = json.Unmarshal(dat2, &defsD)
if err != nil {
log.Error().Err(err).Msg("error unmarshalling dat2")
}
if js.Defs == nil {
js.Defs = defsD
}
+8 -2
View File
@@ -59,7 +59,10 @@ func ParseFunctionCall(llmresult string, functionConfig FunctionsConfig) []FuncC
if multipleResults {
ss := []map[string]interface{}{}
s := utils.EscapeNewLines(llmresult)
json.Unmarshal([]byte(s), &ss)
err := json.Unmarshal([]byte(s), &ss)
if err != nil {
log.Error().Err(err).Str("escapedLLMResult", s).Msg("multiple results: unable to unmarshal llm result")
}
log.Debug().Msgf("Function return: %s %+v", s, ss)
for _, s := range ss {
@@ -83,7 +86,10 @@ func ParseFunctionCall(llmresult string, functionConfig FunctionsConfig) []FuncC
ss := map[string]interface{}{}
// This prevent newlines to break JSON parsing for clients
s := utils.EscapeNewLines(llmresult)
json.Unmarshal([]byte(s), &ss)
err := json.Unmarshal([]byte(s), &ss)
if err != nil {
log.Error().Err(err).Str("escapedLLMResult", s).Msg("unable to unmarshal llm result")
}
log.Debug().Msgf("Function return: %s %+v", s, ss)
// The grammar defines the function name as "function", while OpenAI returns "name"
+14 -3
View File
@@ -70,7 +70,10 @@ func (ml *ModelLoader) grpcModel(backend string, o *Options) func(string, string
// If no specific model path is set for transformers/HF, set it to the model path
for _, env := range []string{"HF_HOME", "TRANSFORMERS_CACHE", "HUGGINGFACE_HUB_CACHE"} {
if os.Getenv(env) == "" {
os.Setenv(env, ml.ModelPath)
err := os.Setenv(env, ml.ModelPath)
if err != nil {
log.Error().Err(err).Str("name", env).Str("modelPath", ml.ModelPath).Msg("unable to set environment variable to modelPath")
}
}
}
@@ -184,8 +187,13 @@ func (ml *ModelLoader) BackendLoader(opts ...Option) (client grpc.Backend, err e
if o.singleActiveBackend {
ml.mu.Lock()
log.Debug().Msgf("Stopping all backends except '%s'", o.model)
ml.StopAllExcept(o.model)
err := ml.StopAllExcept(o.model)
ml.mu.Unlock()
if err != nil {
log.Error().Err(err).Str("keptModel", o.model).Msg("error while shutting down all backends except for the keptModel")
return nil, err
}
}
var backendToConsume string
@@ -224,7 +232,10 @@ func (ml *ModelLoader) GreedyLoader(opts ...Option) (grpc.Backend, error) {
// If we can have only one backend active, kill all the others (except external backends)
if o.singleActiveBackend {
log.Debug().Msgf("Stopping all backends except '%s'", o.model)
ml.StopAllExcept(o.model)
err := ml.StopAllExcept(o.model)
if err != nil {
log.Error().Err(err).Str("keptModel", o.model).Msg("error while shutting down all backends except for the keptModel - greedyloader continuing")
}
}
ml.mu.Unlock()
+4 -1
View File
@@ -174,7 +174,10 @@ func (ml *ModelLoader) CheckIsLoaded(s string) ModelAddress {
if !ml.grpcProcesses[s].IsAlive() {
log.Debug().Msgf("GRPC Process is not responding: %s", s)
// stop and delete the process, this forces to re-load the model and re-create again the service
ml.deleteProcess(s)
err := ml.deleteProcess(s)
if err != nil {
log.Error().Err(err).Str("process", s).Msg("error stopping process")
}
return ""
}
}
+10 -6
View File
@@ -1,6 +1,7 @@
package model
import (
"errors"
"fmt"
"os"
"os/signal"
@@ -14,8 +15,8 @@ import (
"github.com/rs/zerolog/log"
)
func (ml *ModelLoader) StopAllExcept(s string) {
ml.StopGRPC(func(id string, p *process.Process) bool {
func (ml *ModelLoader) StopAllExcept(s string) error {
return ml.StopGRPC(func(id string, p *process.Process) bool {
if id != s {
for ml.models[id].GRPC(false, ml.wd).IsBusy() {
log.Debug().Msgf("%s busy. Waiting.", id)
@@ -43,16 +44,19 @@ func includeAllProcesses(_ string, _ *process.Process) bool {
return true
}
func (ml *ModelLoader) StopGRPC(filter GRPCProcessFilter) {
func (ml *ModelLoader) StopGRPC(filter GRPCProcessFilter) error {
var err error = nil
for k, p := range ml.grpcProcesses {
if filter(k, p) {
ml.deleteProcess(k)
e := ml.deleteProcess(k)
err = errors.Join(err, e)
}
}
return err
}
func (ml *ModelLoader) StopAllGRPC() {
ml.StopGRPC(includeAllProcesses)
func (ml *ModelLoader) StopAllGRPC() error {
return ml.StopGRPC(includeAllProcesses)
}
func (ml *ModelLoader) GetGRPCPID(id string) (int, error) {