mirror of
https://github.com/mudler/LocalAI.git
synced 2025-06-29 14:14:59 +00:00
parent
daea8e158e
commit
1d877bd70b
4 changed files with 15 additions and 10 deletions
|
@ -15,8 +15,13 @@
|
|||
"service": "api",
|
||||
|
||||
// The optional 'workspaceFolder' property is the path VS Code should open by default when
|
||||
"workspaceFolder": "/workspace",
|
||||
"mounts": [
|
||||
"source=${localWorkspaceFolder},target=/workspace,type=bind,consistency=cached"
|
||||
],
|
||||
|
||||
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
|
||||
"workspaceFolder": "/workspaces",
|
||||
// "workspaceFolder": "/workspace",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/go:1": {
|
||||
"version": "latest"
|
||||
|
|
|
@ -11,10 +11,9 @@ services:
|
|||
# context: .
|
||||
# dockerfile: .devcontainer/Dockerfile
|
||||
|
||||
volumes:
|
||||
# volumes:
|
||||
# Update this to wherever you want VS Code to mount the folder of your project
|
||||
- .:/workspaces:cached
|
||||
|
||||
# - .:/workspaces:cached
|
||||
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
|
||||
# cap_add:
|
||||
# - SYS_PTRACE
|
||||
|
|
1
.env
1
.env
|
@ -1,3 +1,4 @@
|
|||
THREADS=14
|
||||
CONTEXT_SIZE=700
|
||||
MODEL_PATH=/models
|
||||
DEFAULT_MODEL=/models/7B/ggml-vicuna-7b-4bit.bin
|
||||
|
|
|
@ -12,17 +12,17 @@ services:
|
|||
- 'OPENAI_API_HOST=http://api:8080'
|
||||
|
||||
api:
|
||||
image: quay.io/go-skynet/llama-cli:latest
|
||||
# image: quay.io/go-skynet/llama-cli:latest
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- 3001:8080
|
||||
- 8080:8080
|
||||
environment:
|
||||
- MODELS_PATH=$MODELS_PATH
|
||||
- CONTEXT_SIZE=$CONTEXT_SIZE
|
||||
- THREADS=$THREADS
|
||||
- MODELS_PATH=/models
|
||||
- CONTEXT_SIZE=700
|
||||
- THREADS=10
|
||||
volumes:
|
||||
- ./models/:/models:cached
|
||||
- ./models:/models:cached
|
||||
command: api
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue