mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 18:45:00 +00:00
Bump vLLM version + more options when loading models in vLLM (#1782)
* Bump vLLM version to 0.3.2 * Add vLLM model loading options * Remove transformers-exllama * Fix install exllama
This commit is contained in:
parent
1c312685aa
commit
939411300a
28 changed files with 736 additions and 641 deletions
|
@ -1,14 +1,22 @@
|
|||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
##
|
||||
## A bash script installs the required dependencies of VALL-E-X and prepares the environment
|
||||
export PATH=$PATH:/opt/conda/bin
|
||||
|
||||
# Activate conda environment
|
||||
source activate transformers
|
||||
# Check if environment exist
|
||||
conda_env_exists(){
|
||||
! conda list --name "${@}" >/dev/null 2>/dev/null
|
||||
}
|
||||
|
||||
echo $CONDA_PREFIX
|
||||
if conda_env_exists "exllama" ; then
|
||||
echo "Creating virtual environment..."
|
||||
conda env create --name exllama --file $1
|
||||
echo "Virtual environment created."
|
||||
else
|
||||
echo "Virtual environment already exists."
|
||||
fi
|
||||
|
||||
source activate exllama
|
||||
|
||||
git clone https://github.com/turboderp/exllama $CONDA_PREFIX/exllama && pushd $CONDA_PREFIX/exllama && pip install -r requirements.txt && popd
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue