LocalAI/cmd/grpc/llama-master/main.go
Ettore Di Giacinto fab26ac6fe feat: add llama-master backend
So we can keep one stable and one master to point to latest upstream
changes

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
2023-07-15 23:32:42 +02:00

25 lines
453 B
Go

package main
// GRPC Falcon server
// Note: this is started internally by LocalAI and a server is allocated for each model
import (
"flag"
llama "github.com/go-skynet/LocalAI/pkg/grpc/llm/llama-master"
grpc "github.com/go-skynet/LocalAI/pkg/grpc"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
func main() {
flag.Parse()
if err := grpc.StartServer(*addr, &llama.LLM{}); err != nil {
panic(err)
}
}