mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-30 15:35:01 +00:00
feat: cancel stream generation if client disappears (#792)
This commit is contained in:
parent
72e3e236de
commit
12fe0932c4
12 changed files with 37 additions and 21 deletions
|
@ -78,7 +78,7 @@ func (ml *ModelLoader) startProcess(grpcProcess, id string, serverAddress string
|
|||
return err
|
||||
}
|
||||
|
||||
log.Debug().Msgf("Loading GRPC Process", grpcProcess)
|
||||
log.Debug().Msgf("Loading GRPC Process: %s", grpcProcess)
|
||||
|
||||
log.Debug().Msgf("GRPC Service for %s will be running at: '%s'", id, serverAddress)
|
||||
|
||||
|
|
|
@ -102,7 +102,6 @@ func (ml *ModelLoader) LoadModel(modelName string, loader func(string) (*grpc.Cl
|
|||
|
||||
// Check if we already have a loaded model
|
||||
if model := ml.checkIsLoaded(modelName); model != nil {
|
||||
log.Debug().Msgf("Model already loaded in memory: %s", modelName)
|
||||
return model, nil
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue