mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
fix(base-grpc): close channel in base grpc server (#3734)
If the LLM does not implement any logic for PredictStream, we close the channel immediately to not leave the process hanging. Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
e28e80857b
commit
092bb0bd6b
1 changed files with 1 additions and 0 deletions
|
@ -41,6 +41,7 @@ func (llm *Base) Predict(opts *pb.PredictOptions) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (llm *Base) PredictStream(opts *pb.PredictOptions, results chan string) error {
|
func (llm *Base) PredictStream(opts *pb.PredictOptions, results chan string) error {
|
||||||
|
close(results)
|
||||||
return fmt.Errorf("unimplemented")
|
return fmt.Errorf("unimplemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue