mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-31 07:54:59 +00:00
feat(p2p): allow to run multiple clusters in the same p2p network (#3128)
feat(p2p): allow to run multiple clusters in the same network Allow to specify a network ID via CLI which allows to run multiple clusters, logically separated within the same network (by using the same shared token). Note: This segregation is not "secure" by any means, anyone having the network token can see the services available in all the network, however, this provides a way to separate the inference endpoints. This allows for instance to have a node which is both federated and having attached a set of llama.cpp workers. Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
2c8623dbb4
commit
36e185ba63
8 changed files with 50 additions and 26 deletions
|
@ -19,12 +19,13 @@ import (
|
|||
)
|
||||
|
||||
type P2P struct {
|
||||
WorkerFlags `embed:""`
|
||||
Token string `env:"LOCALAI_TOKEN,LOCALAI_P2P_TOKEN,TOKEN" help:"P2P token to use"`
|
||||
NoRunner bool `env:"LOCALAI_NO_RUNNER,NO_RUNNER" help:"Do not start the llama-cpp-rpc-server"`
|
||||
RunnerAddress string `env:"LOCALAI_RUNNER_ADDRESS,RUNNER_ADDRESS" help:"Address of the llama-cpp-rpc-server"`
|
||||
RunnerPort string `env:"LOCALAI_RUNNER_PORT,RUNNER_PORT" help:"Port of the llama-cpp-rpc-server"`
|
||||
ExtraLLamaCPPArgs []string `env:"LOCALAI_EXTRA_LLAMA_CPP_ARGS,EXTRA_LLAMA_CPP_ARGS" help:"Extra arguments to pass to llama-cpp-rpc-server"`
|
||||
WorkerFlags `embed:""`
|
||||
Token string `env:"LOCALAI_TOKEN,LOCALAI_P2P_TOKEN,TOKEN" help:"P2P token to use"`
|
||||
NoRunner bool `env:"LOCALAI_NO_RUNNER,NO_RUNNER" help:"Do not start the llama-cpp-rpc-server"`
|
||||
RunnerAddress string `env:"LOCALAI_RUNNER_ADDRESS,RUNNER_ADDRESS" help:"Address of the llama-cpp-rpc-server"`
|
||||
RunnerPort string `env:"LOCALAI_RUNNER_PORT,RUNNER_PORT" help:"Port of the llama-cpp-rpc-server"`
|
||||
ExtraLLamaCPPArgs []string `env:"LOCALAI_EXTRA_LLAMA_CPP_ARGS,EXTRA_LLAMA_CPP_ARGS" help:"Extra arguments to pass to llama-cpp-rpc-server"`
|
||||
Peer2PeerNetworkID string `env:"LOCALAI_P2P_NETWORK_ID,P2P_NETWORK_ID" help:"Network ID for P2P mode, can be set arbitrarly by the user for grouping a set of instances" group:"p2p"`
|
||||
}
|
||||
|
||||
func (r *P2P) Run(ctx *cliContext.Context) error {
|
||||
|
@ -59,7 +60,7 @@ func (r *P2P) Run(ctx *cliContext.Context) error {
|
|||
p = r.RunnerPort
|
||||
}
|
||||
|
||||
err = p2p.ExposeService(context.Background(), address, p, r.Token, "")
|
||||
err = p2p.ExposeService(context.Background(), address, p, r.Token, p2p.NetworkID(r.Peer2PeerNetworkID, ""))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -99,7 +100,7 @@ func (r *P2P) Run(ctx *cliContext.Context) error {
|
|||
}
|
||||
}()
|
||||
|
||||
err = p2p.ExposeService(context.Background(), address, fmt.Sprint(port), r.Token, "")
|
||||
err = p2p.ExposeService(context.Background(), address, fmt.Sprint(port), r.Token, p2p.NetworkID(r.Peer2PeerNetworkID, ""))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue