feat(install.sh): support federated install (#2752)

* feat(install.sh): support federated install

This allows to support federation by exposing:

- FEDERATED: true/false to share the instance
- FEDERATED_SERVER: true/false to start the federated load balancer (it
  forwards requests to the federation)

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* docs: update installer parameters

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

---------

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
Co-authored-by: Dave <dave@gray101.com>
This commit is contained in:
Ettore Di Giacinto 2024-07-12 08:42:21 +02:00 committed by GitHub
parent fb04347d3b
commit 95e31fd279
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 36 additions and 16 deletions

View file

@ -78,6 +78,9 @@ API_KEY=${API_KEY:-}
CORE_IMAGES=${CORE_IMAGES:-false}
P2P_TOKEN=${P2P_TOKEN:-}
WORKER=${WORKER:-false}
FEDERATED=${FEDERATED:-false}
FEDERATED_SERVER=${FEDERATED_SERVER:-false}
# nprocs -1
if available nproc; then
procs=$(nproc)
@ -134,14 +137,6 @@ configure_systemd() {
info "Adding current user to local-ai group..."
$SUDO usermod -a -G local-ai $(whoami)
STARTCOMMAND="run"
if [ "$WORKER" = true ]; then
if [ -n "$P2P_TOKEN" ]; then
STARTCOMMAND="worker p2p-llama-cpp-rpc"
else
STARTCOMMAND="worker llama-cpp-rpc"
fi
fi
info "Creating local-ai systemd service..."
cat <<EOF | $SUDO tee /etc/systemd/system/local-ai.service >/dev/null
[Unit]
@ -173,6 +168,10 @@ EOF
$SUDO echo "LOCALAI_P2P=true" | $SUDO tee -a /etc/localai.env >/dev/null
fi
if [ "$LOCALAI_P2P_DISABLE_DHT" = true ]; then
$SUDO echo "LOCALAI_P2P_DISABLE_DHT=true" | $SUDO tee -a /etc/localai.env >/dev/null
fi
SYSTEMCTL_RUNNING="$(systemctl is-system-running || true)"
case $SYSTEMCTL_RUNNING in
running|degraded)
@ -421,18 +420,13 @@ install_docker() {
# exit 0
fi
STARTCOMMAND="run"
if [ "$WORKER" = true ]; then
if [ -n "$P2P_TOKEN" ]; then
STARTCOMMAND="worker p2p-llama-cpp-rpc"
else
STARTCOMMAND="worker llama-cpp-rpc"
fi
fi
envs=""
if [ -n "$P2P_TOKEN" ]; then
envs="-e LOCALAI_P2P_TOKEN=$P2P_TOKEN -e LOCALAI_P2P=true"
fi
if [ "$LOCALAI_P2P_DISABLE_DHT" = true ]; then
envs="$envs -e LOCALAI_P2P_DISABLE_DHT=true"
fi
IMAGE_TAG=
if [ "$HAS_CUDA" ]; then
@ -604,6 +598,28 @@ install_binary() {
exit 0
}
detect_start_command() {
STARTCOMMAND="run"
if [ "$WORKER" = true ]; then
if [ -n "$P2P_TOKEN" ]; then
STARTCOMMAND="worker p2p-llama-cpp-rpc"
else
STARTCOMMAND="worker llama-cpp-rpc"
fi
elif [ "$FEDERATED" = true ]; then
if [ "$FEDERATED_SERVER" = true ]; then
STARTCOMMAND="federated"
else
STARTCOMMAND="$STARTCOMMAND --p2p --federated"
fi
elif [ -n "$P2P_TOKEN" ]; then
STARTCOMMAND="$STARTCOMMAND --p2p"
fi
}
detect_start_command
OS="$(uname -s)"
ARCH=$(uname -m)