Use .env variables in docker-compose

This commit is contained in:
mudler 2023-04-16 00:54:20 +02:00
parent 3f8d5aaeb3
commit dbd087988f
3 changed files with 14 additions and 15 deletions

3
.env
View file

@ -1,4 +1,3 @@
THREADS=14
CONTEXT_SIZE=700
MODEL_PATH=/models
DEFAULT_MODEL=/models/7B/ggml-vicuna-7b-4bit.bin
MODELS_PATH=/models

View file

@ -19,8 +19,8 @@ cd llama-cli
# copy your models to models/
cp your-model.bin models/
# (optional) Edit the .env file to set the number of concurrent threads used for inference
# echo "THREADS=14" > .env
# (optional) Edit the .env file to set things like context size and threads
# vim .env
# start with docker-compose
docker compose up -d --build

View file

@ -2,14 +2,14 @@ version: '3.6'
services:
chatgpt:
image: ghcr.io/mckaywrigley/chatbot-ui:main
# platform: linux/amd64
ports:
- 3000:3000
environment:
- 'OPENAI_API_KEY=sk-000000000000000'
- 'OPENAI_API_HOST=http://api:8080'
# chatgpt:
# image: ghcr.io/mckaywrigley/chatbot-ui:main
# # platform: linux/amd64
# ports:
# - 3000:3000
# environment:
# - 'OPENAI_API_KEY=sk-000000000000000'
# - 'OPENAI_API_HOST=http://api:8080'
api:
# image: quay.io/go-skynet/llama-cli:latest
@ -19,9 +19,9 @@ services:
ports:
- 8080:8080
environment:
- MODELS_PATH=/models
- CONTEXT_SIZE=700
- THREADS=10
- MODELS_PATH=$MODELS_PATH
- CONTEXT_SIZE=$CONTEXT_SIZE
- THREADS=$THREADS
volumes:
- ./models:/models:cached
command: api