mirror of
https://github.com/mudler/LocalAI.git
synced 2025-06-29 22:20:43 +00:00
parent
daea8e158e
commit
1d877bd70b
4 changed files with 15 additions and 10 deletions
|
@ -15,8 +15,13 @@
|
||||||
"service": "api",
|
"service": "api",
|
||||||
|
|
||||||
// The optional 'workspaceFolder' property is the path VS Code should open by default when
|
// The optional 'workspaceFolder' property is the path VS Code should open by default when
|
||||||
|
"workspaceFolder": "/workspace",
|
||||||
|
"mounts": [
|
||||||
|
"source=${localWorkspaceFolder},target=/workspace,type=bind,consistency=cached"
|
||||||
|
],
|
||||||
|
|
||||||
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
|
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
|
||||||
"workspaceFolder": "/workspaces",
|
// "workspaceFolder": "/workspace",
|
||||||
"features": {
|
"features": {
|
||||||
"ghcr.io/devcontainers/features/go:1": {
|
"ghcr.io/devcontainers/features/go:1": {
|
||||||
"version": "latest"
|
"version": "latest"
|
||||||
|
|
|
@ -11,10 +11,9 @@ services:
|
||||||
# context: .
|
# context: .
|
||||||
# dockerfile: .devcontainer/Dockerfile
|
# dockerfile: .devcontainer/Dockerfile
|
||||||
|
|
||||||
volumes:
|
# volumes:
|
||||||
# Update this to wherever you want VS Code to mount the folder of your project
|
# Update this to wherever you want VS Code to mount the folder of your project
|
||||||
- .:/workspaces:cached
|
# - .:/workspaces:cached
|
||||||
|
|
||||||
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
|
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
|
||||||
# cap_add:
|
# cap_add:
|
||||||
# - SYS_PTRACE
|
# - SYS_PTRACE
|
||||||
|
|
1
.env
1
.env
|
@ -1,3 +1,4 @@
|
||||||
THREADS=14
|
THREADS=14
|
||||||
CONTEXT_SIZE=700
|
CONTEXT_SIZE=700
|
||||||
MODEL_PATH=/models
|
MODEL_PATH=/models
|
||||||
|
DEFAULT_MODEL=/models/7B/ggml-vicuna-7b-4bit.bin
|
||||||
|
|
|
@ -12,17 +12,17 @@ services:
|
||||||
- 'OPENAI_API_HOST=http://api:8080'
|
- 'OPENAI_API_HOST=http://api:8080'
|
||||||
|
|
||||||
api:
|
api:
|
||||||
image: quay.io/go-skynet/llama-cli:latest
|
# image: quay.io/go-skynet/llama-cli:latest
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
ports:
|
ports:
|
||||||
- 3001:8080
|
- 8080:8080
|
||||||
environment:
|
environment:
|
||||||
- MODELS_PATH=$MODELS_PATH
|
- MODELS_PATH=/models
|
||||||
- CONTEXT_SIZE=$CONTEXT_SIZE
|
- CONTEXT_SIZE=700
|
||||||
- THREADS=$THREADS
|
- THREADS=10
|
||||||
volumes:
|
volumes:
|
||||||
- ./models/:/models:cached
|
- ./models:/models:cached
|
||||||
command: api
|
command: api
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue