mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-21 02:55:01 +00:00
Bump vLLM version + more options when loading models in vLLM (#1782)
* Bump vLLM version to 0.3.2 * Add vLLM model loading options * Remove transformers-exllama * Fix install exllama
This commit is contained in:
parent
1c312685aa
commit
939411300a
28 changed files with 736 additions and 641 deletions
|
@ -1,7 +1,8 @@
|
|||
export CONDA_ENV_PATH = "exllama.yml"
|
||||
|
||||
.PHONY: exllama
|
||||
exllama:
|
||||
$(MAKE) -C ../common-env/transformers
|
||||
bash install.sh
|
||||
bash install.sh ${CONDA_ENV_PATH}
|
||||
|
||||
.PHONY: run
|
||||
run:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue