diff --git a/core/cli/models.go b/core/cli/models.go index 6615e21d..5bbb60e6 100644 --- a/core/cli/models.go +++ b/core/cli/models.go @@ -64,7 +64,11 @@ func (mi *ModelsInstall) Run(ctx *Context) error { progressbar.OptionClearOnFinish(), ) progressCallback := func(fileName string, current string, total string, percentage float64) { - progressBar.Set(int(percentage * 10)) + v := int(percentage * 10) + err := progressBar.Set(v) + if err != nil { + log.Error().Err(err).Str("filename", fileName).Int("value", v).Msg("error while updating progress bar") + } } err := gallery.InstallModelFromGallery(galleries, modelName, mi.ModelsPath, gallery.GalleryModel{}, progressCallback) if err != nil { diff --git a/core/cli/transcript.go b/core/cli/transcript.go index 9f36a77c..1f2f779a 100644 --- a/core/cli/transcript.go +++ b/core/cli/transcript.go @@ -8,6 +8,7 @@ import ( "github.com/go-skynet/LocalAI/core/backend" "github.com/go-skynet/LocalAI/core/config" "github.com/go-skynet/LocalAI/pkg/model" + "github.com/rs/zerolog/log" ) type TranscriptCMD struct { @@ -41,7 +42,12 @@ func (t *TranscriptCMD) Run(ctx *Context) error { c.Threads = &t.Threads - defer ml.StopAllGRPC() + defer func() { + err := ml.StopAllGRPC() + if err != nil { + log.Error().Err(err).Msg("unable to stop all grpc processes") + } + }() tr, err := backend.ModelTranscription(t.Filename, t.Language, ml, c, opts) if err != nil { diff --git a/core/cli/tts.go b/core/cli/tts.go index 1d8fd3a3..d4bd2553 100644 --- a/core/cli/tts.go +++ b/core/cli/tts.go @@ -10,6 +10,7 @@ import ( "github.com/go-skynet/LocalAI/core/backend" "github.com/go-skynet/LocalAI/core/config" "github.com/go-skynet/LocalAI/pkg/model" + "github.com/rs/zerolog/log" ) type TTSCMD struct { @@ -40,7 +41,12 @@ func (t *TTSCMD) Run(ctx *Context) error { } ml := model.NewModelLoader(opts.ModelPath) - defer ml.StopAllGRPC() + defer func() { + err := ml.StopAllGRPC() + if err != nil { + log.Error().Err(err).Msg("unable to stop all grpc processes") + } + }() options := config.BackendConfig{} options.SetDefaults() diff --git a/core/startup/config_file_watcher.go b/core/startup/config_file_watcher.go index 6bbb367f..259446f1 100644 --- a/core/startup/config_file_watcher.go +++ b/core/startup/config_file_watcher.go @@ -31,8 +31,14 @@ func newConfigFileHandler(appConfig *config.ApplicationConfig) configFileHandler handlers: make(map[string]fileHandler), appConfig: appConfig, } - c.Register("api_keys.json", readApiKeysJson(*appConfig), true) - c.Register("external_backends.json", readExternalBackendsJson(*appConfig), true) + err := c.Register("api_keys.json", readApiKeysJson(*appConfig), true) + if err != nil { + log.Error().Err(err).Str("file", "api_keys.json").Msg("unable to register config file handler") + } + err = c.Register("external_backends.json", readExternalBackendsJson(*appConfig), true) + if err != nil { + log.Error().Err(err).Str("file", "external_backends.json").Msg("unable to register config file handler") + } return c } @@ -118,8 +124,8 @@ func (c *configFileHandler) Watch() error { } // TODO: When we institute graceful shutdown, this should be called -func (c *configFileHandler) Stop() { - c.watcher.Close() +func (c *configFileHandler) Stop() error { + return c.watcher.Close() } func readApiKeysJson(startupAppConfig config.ApplicationConfig) fileHandler { diff --git a/core/startup/startup.go b/core/startup/startup.go index 17bbf9f5..e5660f4c 100644 --- a/core/startup/startup.go +++ b/core/startup/startup.go @@ -100,7 +100,10 @@ func Startup(opts ...config.AppOption) (*config.BackendConfigLoader, *model.Mode go func() { <-options.Context.Done() log.Debug().Msgf("Context canceled, shutting down") - ml.StopAllGRPC() + err := ml.StopAllGRPC() + if err != nil { + log.Error().Err(err).Msg("error while stopping all grpc backends") + } }() if options.WatchDog { diff --git a/pkg/functions/functions.go b/pkg/functions/functions.go index d75a2ee3..f5e37d75 100644 --- a/pkg/functions/functions.go +++ b/pkg/functions/functions.go @@ -2,6 +2,8 @@ package functions import ( "encoding/json" + + "github.com/rs/zerolog/log" ) type Function struct { @@ -30,8 +32,14 @@ func (f Functions) ToJSONStructure() JSONFunctionStructure { prop := map[string]interface{}{} defsD := map[string]interface{}{} - json.Unmarshal(dat, &prop) - json.Unmarshal(dat2, &defsD) + err := json.Unmarshal(dat, &prop) + if err != nil { + log.Error().Err(err).Msg("error unmarshalling dat") + } + err = json.Unmarshal(dat2, &defsD) + if err != nil { + log.Error().Err(err).Msg("error unmarshalling dat2") + } if js.Defs == nil { js.Defs = defsD } diff --git a/pkg/functions/parse.go b/pkg/functions/parse.go index 5324e8c6..26312560 100644 --- a/pkg/functions/parse.go +++ b/pkg/functions/parse.go @@ -59,7 +59,10 @@ func ParseFunctionCall(llmresult string, functionConfig FunctionsConfig) []FuncC if multipleResults { ss := []map[string]interface{}{} s := utils.EscapeNewLines(llmresult) - json.Unmarshal([]byte(s), &ss) + err := json.Unmarshal([]byte(s), &ss) + if err != nil { + log.Error().Err(err).Str("escapedLLMResult", s).Msg("multiple results: unable to unmarshal llm result") + } log.Debug().Msgf("Function return: %s %+v", s, ss) for _, s := range ss { @@ -83,7 +86,10 @@ func ParseFunctionCall(llmresult string, functionConfig FunctionsConfig) []FuncC ss := map[string]interface{}{} // This prevent newlines to break JSON parsing for clients s := utils.EscapeNewLines(llmresult) - json.Unmarshal([]byte(s), &ss) + err := json.Unmarshal([]byte(s), &ss) + if err != nil { + log.Error().Err(err).Str("escapedLLMResult", s).Msg("unable to unmarshal llm result") + } log.Debug().Msgf("Function return: %s %+v", s, ss) // The grammar defines the function name as "function", while OpenAI returns "name" diff --git a/pkg/model/initializers.go b/pkg/model/initializers.go index 5d9808a4..5a65d01f 100644 --- a/pkg/model/initializers.go +++ b/pkg/model/initializers.go @@ -70,7 +70,10 @@ func (ml *ModelLoader) grpcModel(backend string, o *Options) func(string, string // If no specific model path is set for transformers/HF, set it to the model path for _, env := range []string{"HF_HOME", "TRANSFORMERS_CACHE", "HUGGINGFACE_HUB_CACHE"} { if os.Getenv(env) == "" { - os.Setenv(env, ml.ModelPath) + err := os.Setenv(env, ml.ModelPath) + if err != nil { + log.Error().Err(err).Str("name", env).Str("modelPath", ml.ModelPath).Msg("unable to set environment variable to modelPath") + } } } @@ -184,8 +187,13 @@ func (ml *ModelLoader) BackendLoader(opts ...Option) (client grpc.Backend, err e if o.singleActiveBackend { ml.mu.Lock() log.Debug().Msgf("Stopping all backends except '%s'", o.model) - ml.StopAllExcept(o.model) + err := ml.StopAllExcept(o.model) ml.mu.Unlock() + if err != nil { + log.Error().Err(err).Str("keptModel", o.model).Msg("error while shutting down all backends except for the keptModel") + return nil, err + } + } var backendToConsume string @@ -224,7 +232,10 @@ func (ml *ModelLoader) GreedyLoader(opts ...Option) (grpc.Backend, error) { // If we can have only one backend active, kill all the others (except external backends) if o.singleActiveBackend { log.Debug().Msgf("Stopping all backends except '%s'", o.model) - ml.StopAllExcept(o.model) + err := ml.StopAllExcept(o.model) + if err != nil { + log.Error().Err(err).Str("keptModel", o.model).Msg("error while shutting down all backends except for the keptModel - greedyloader continuing") + } } ml.mu.Unlock() diff --git a/pkg/model/loader.go b/pkg/model/loader.go index 2d6b3acb..8bf9da5a 100644 --- a/pkg/model/loader.go +++ b/pkg/model/loader.go @@ -174,7 +174,10 @@ func (ml *ModelLoader) CheckIsLoaded(s string) ModelAddress { if !ml.grpcProcesses[s].IsAlive() { log.Debug().Msgf("GRPC Process is not responding: %s", s) // stop and delete the process, this forces to re-load the model and re-create again the service - ml.deleteProcess(s) + err := ml.deleteProcess(s) + if err != nil { + log.Error().Err(err).Str("process", s).Msg("error stopping process") + } return "" } } diff --git a/pkg/model/process.go b/pkg/model/process.go index 08822fd9..ff3b12cc 100644 --- a/pkg/model/process.go +++ b/pkg/model/process.go @@ -1,6 +1,7 @@ package model import ( + "errors" "fmt" "os" "os/signal" @@ -14,8 +15,8 @@ import ( "github.com/rs/zerolog/log" ) -func (ml *ModelLoader) StopAllExcept(s string) { - ml.StopGRPC(func(id string, p *process.Process) bool { +func (ml *ModelLoader) StopAllExcept(s string) error { + return ml.StopGRPC(func(id string, p *process.Process) bool { if id != s { for ml.models[id].GRPC(false, ml.wd).IsBusy() { log.Debug().Msgf("%s busy. Waiting.", id) @@ -43,16 +44,19 @@ func includeAllProcesses(_ string, _ *process.Process) bool { return true } -func (ml *ModelLoader) StopGRPC(filter GRPCProcessFilter) { +func (ml *ModelLoader) StopGRPC(filter GRPCProcessFilter) error { + var err error = nil for k, p := range ml.grpcProcesses { if filter(k, p) { - ml.deleteProcess(k) + e := ml.deleteProcess(k) + err = errors.Join(err, e) } } + return err } -func (ml *ModelLoader) StopAllGRPC() { - ml.StopGRPC(includeAllProcesses) +func (ml *ModelLoader) StopAllGRPC() error { + return ml.StopGRPC(includeAllProcesses) } func (ml *ModelLoader) GetGRPCPID(id string) (int, error) { diff --git a/tests/integration/stores_test.go b/tests/integration/stores_test.go index 54d0844c..ec67af78 100644 --- a/tests/integration/stores_test.go +++ b/tests/integration/stores_test.go @@ -63,8 +63,9 @@ var _ = Describe("Integration tests for the stores backend(s) and internal APIs" }) AfterEach(func() { - sl.StopAllGRPC() - err := os.RemoveAll(tmpdir) + err := sl.StopAllGRPC() + Expect(err).ToNot(HaveOccurred()) + err = os.RemoveAll(tmpdir) Expect(err).ToNot(HaveOccurred()) })