Bump vLLM version + more options when loading models in vLLM (#1782)

* Bump vLLM version to 0.3.2

* Add vLLM model loading options

* Remove transformers-exllama

* Fix install exllama
This commit is contained in:
Ludovic Leroux 2024-03-01 16:48:53 -05:00 committed by GitHub
parent 1c312685aa
commit 939411300a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 736 additions and 641 deletions

View file

@ -1,7 +1,8 @@
export CONDA_ENV_PATH = "exllama.yml"
.PHONY: exllama
exllama:
$(MAKE) -C ../common-env/transformers
bash install.sh
bash install.sh ${CONDA_ENV_PATH}
.PHONY: run
run:

File diff suppressed because one or more lines are too long

View file

@ -1,14 +1,22 @@
#!/bin/bash
set -ex
##
## A bash script installs the required dependencies of VALL-E-X and prepares the environment
export PATH=$PATH:/opt/conda/bin
# Activate conda environment
source activate transformers
# Check if environment exist
conda_env_exists(){
! conda list --name "${@}" >/dev/null 2>/dev/null
}
echo $CONDA_PREFIX
if conda_env_exists "exllama" ; then
echo "Creating virtual environment..."
conda env create --name exllama --file $1
echo "Virtual environment created."
else
echo "Virtual environment already exists."
fi
source activate exllama
git clone https://github.com/turboderp/exllama $CONDA_PREFIX/exllama && pushd $CONDA_PREFIX/exllama && pip install -r requirements.txt && popd

View file

@ -2,11 +2,10 @@
##
## A bash script wrapper that runs the exllama server with conda
export PATH=$PATH:/opt/conda/bin
# Activate conda environment
source activate transformers
source activate exllama
# get the directory where the bash script is located
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"