mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
feat: move other backends to grpc
This finally makes everything more consistent Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
5dcfdbe51d
commit
1d0ed95a54
54 changed files with 3171 additions and 1712 deletions
|
@ -7,34 +7,8 @@ import (
|
|||
pb "github.com/go-skynet/LocalAI/pkg/grpc/proto"
|
||||
|
||||
config "github.com/go-skynet/LocalAI/api/config"
|
||||
"github.com/go-skynet/LocalAI/pkg/langchain"
|
||||
"github.com/go-skynet/bloomz.cpp"
|
||||
)
|
||||
|
||||
func langchainOptions(c config.Config) []langchain.PredictOption {
|
||||
return []langchain.PredictOption{
|
||||
langchain.SetModel(c.Model),
|
||||
langchain.SetMaxTokens(c.Maxtokens),
|
||||
langchain.SetTemperature(c.Temperature),
|
||||
langchain.SetStopWords(c.StopWords),
|
||||
}
|
||||
}
|
||||
|
||||
func bloomzOptions(c config.Config) []bloomz.PredictOption {
|
||||
// Generate the prediction using the language model
|
||||
predictOptions := []bloomz.PredictOption{
|
||||
bloomz.SetTemperature(c.Temperature),
|
||||
bloomz.SetTopP(c.TopP),
|
||||
bloomz.SetTopK(c.TopK),
|
||||
bloomz.SetTokens(c.Maxtokens),
|
||||
bloomz.SetThreads(c.Threads),
|
||||
}
|
||||
|
||||
if c.Seed != 0 {
|
||||
predictOptions = append(predictOptions, bloomz.SetSeed(c.Seed))
|
||||
}
|
||||
return predictOptions
|
||||
}
|
||||
func gRPCModelOpts(c config.Config) *pb.ModelOptions {
|
||||
b := 512
|
||||
if c.Batch != 0 {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue