fix: security scanner warning noise: error handlers part 2 (#2145)

check off a few more error handlers

Signed-off-by: Dave Lee <dave@gray101.com>
This commit is contained in:
Dave 2024-04-29 09:11:42 -04:00 committed by GitHub
parent b7ea9602f5
commit 11c48a0004
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 82 additions and 24 deletions

View file

@ -70,7 +70,10 @@ func (ml *ModelLoader) grpcModel(backend string, o *Options) func(string, string
// If no specific model path is set for transformers/HF, set it to the model path
for _, env := range []string{"HF_HOME", "TRANSFORMERS_CACHE", "HUGGINGFACE_HUB_CACHE"} {
if os.Getenv(env) == "" {
os.Setenv(env, ml.ModelPath)
err := os.Setenv(env, ml.ModelPath)
if err != nil {
log.Error().Err(err).Str("name", env).Str("modelPath", ml.ModelPath).Msg("unable to set environment variable to modelPath")
}
}
}
@ -184,8 +187,13 @@ func (ml *ModelLoader) BackendLoader(opts ...Option) (client grpc.Backend, err e
if o.singleActiveBackend {
ml.mu.Lock()
log.Debug().Msgf("Stopping all backends except '%s'", o.model)
ml.StopAllExcept(o.model)
err := ml.StopAllExcept(o.model)
ml.mu.Unlock()
if err != nil {
log.Error().Err(err).Str("keptModel", o.model).Msg("error while shutting down all backends except for the keptModel")
return nil, err
}
}
var backendToConsume string
@ -224,7 +232,10 @@ func (ml *ModelLoader) GreedyLoader(opts ...Option) (grpc.Backend, error) {
// If we can have only one backend active, kill all the others (except external backends)
if o.singleActiveBackend {
log.Debug().Msgf("Stopping all backends except '%s'", o.model)
ml.StopAllExcept(o.model)
err := ml.StopAllExcept(o.model)
if err != nil {
log.Error().Err(err).Str("keptModel", o.model).Msg("error while shutting down all backends except for the keptModel - greedyloader continuing")
}
}
ml.mu.Unlock()

View file

@ -174,7 +174,10 @@ func (ml *ModelLoader) CheckIsLoaded(s string) ModelAddress {
if !ml.grpcProcesses[s].IsAlive() {
log.Debug().Msgf("GRPC Process is not responding: %s", s)
// stop and delete the process, this forces to re-load the model and re-create again the service
ml.deleteProcess(s)
err := ml.deleteProcess(s)
if err != nil {
log.Error().Err(err).Str("process", s).Msg("error stopping process")
}
return ""
}
}

View file

@ -1,6 +1,7 @@
package model
import (
"errors"
"fmt"
"os"
"os/signal"
@ -14,8 +15,8 @@ import (
"github.com/rs/zerolog/log"
)
func (ml *ModelLoader) StopAllExcept(s string) {
ml.StopGRPC(func(id string, p *process.Process) bool {
func (ml *ModelLoader) StopAllExcept(s string) error {
return ml.StopGRPC(func(id string, p *process.Process) bool {
if id != s {
for ml.models[id].GRPC(false, ml.wd).IsBusy() {
log.Debug().Msgf("%s busy. Waiting.", id)
@ -43,16 +44,19 @@ func includeAllProcesses(_ string, _ *process.Process) bool {
return true
}
func (ml *ModelLoader) StopGRPC(filter GRPCProcessFilter) {
func (ml *ModelLoader) StopGRPC(filter GRPCProcessFilter) error {
var err error = nil
for k, p := range ml.grpcProcesses {
if filter(k, p) {
ml.deleteProcess(k)
e := ml.deleteProcess(k)
err = errors.Join(err, e)
}
}
return err
}
func (ml *ModelLoader) StopAllGRPC() {
ml.StopGRPC(includeAllProcesses)
func (ml *ModelLoader) StopAllGRPC() error {
return ml.StopGRPC(includeAllProcesses)
}
func (ml *ModelLoader) GetGRPCPID(id string) (int, error) {